source: inundation/pyvolution/data_manager.py @ 2267

Last change on this file since 2267 was 2262, checked in by ole, 19 years ago

Fixed missing geo reference in set_quantity and added tests

File size: 114.7 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50import os
51
52from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
53     searchsorted, zeros, allclose, around
54
55
56from coordinate_transforms.geo_reference import Geo_reference
57
58def make_filename(s):
59    """Transform argument string into a suitable filename
60    """
61
62    s = s.strip()
63    s = s.replace(' ', '_')
64    s = s.replace('(', '')
65    s = s.replace(')', '')
66    s = s.replace('__', '_')
67
68    return s
69
70
71def check_dir(path, verbose=None):
72    """Check that specified path exists.
73    If path does not exist it will be created if possible
74
75    USAGE:
76       checkdir(path, verbose):
77
78    ARGUMENTS:
79        path -- Directory
80        verbose -- Flag verbose output (default: None)
81
82    RETURN VALUE:
83        Verified path including trailing separator
84
85    """
86
87    import os, sys
88    import os.path
89
90    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
91        unix = 0
92    else:
93        unix = 1
94
95
96    if path[-1] != os.sep:
97        path = path + os.sep  # Add separator for directories
98
99    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
100    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
101        try:
102            exitcode=os.mkdir(path)
103
104            # Change access rights if possible
105            #
106            if unix:
107                exitcode=os.system('chmod 775 '+path)
108            else:
109                pass  # FIXME: What about acces rights under Windows?
110
111            if verbose: print 'MESSAGE: Directory', path, 'created.'
112
113        except:
114            print 'WARNING: Directory', path, 'could not be created.'
115            if unix:
116                path = '/tmp/'
117            else:
118                path = 'C:'
119
120            print 'Using directory %s instead' %path
121
122    return(path)
123
124
125
126def del_dir(path):
127    """Recursively delete directory path and all its contents
128    """
129
130    import os
131
132    if os.path.isdir(path):
133        for file in os.listdir(path):
134            X = os.path.join(path, file)
135
136
137            if os.path.isdir(X) and not os.path.islink(X):
138                del_dir(X)
139            else:
140                try:
141                    os.remove(X)
142                except:
143                    print "Could not remove file %s" %X
144
145        os.rmdir(path)
146
147
148
149def create_filename(datadir, filename, format, size=None, time=None):
150
151    import os
152    #from config import data_dir
153
154    FN = check_dir(datadir) + filename
155
156    if size is not None:
157        FN += '_size%d' %size
158
159    if time is not None:
160        FN += '_time%.2f' %time
161
162    FN += '.' + format
163    return FN
164
165
166def get_files(datadir, filename, format, size):
167    """Get all file (names) with gven name, size and format
168    """
169
170    import glob
171
172    import os
173    #from config import data_dir
174
175    dir = check_dir(datadir)
176
177    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
178    return glob.glob(pattern)
179
180
181
182#Generic class for storing output to e.g. visualisation or checkpointing
183class Data_format:
184    """Generic interface to data formats
185    """
186
187
188    def __init__(self, domain, extension, mode = 'w'):
189        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
190                                        '''   'w' (write)'''+\
191                                        '''   'r' (read)''' +\
192                                        '''   'a' (append)'''
193
194        #Create filename
195        #self.filename = create_filename(domain.get_datadir(),
196        #domain.get_name(), extension, len(domain))
197
198
199        self.filename = create_filename(domain.get_datadir(),
200                        domain.get_name(), extension)
201
202        #print 'F', self.filename
203        self.timestep = 0
204        self.number_of_volumes = len(domain)
205        self.domain = domain
206
207
208        #FIXME: Should we have a general set_precision function?
209
210
211
212#Class for storing output to e.g. visualisation
213class Data_format_sww(Data_format):
214    """Interface to native NetCDF format (.sww) for storing model output
215
216    There are two kinds of data
217
218    1: Constant data: Vertex coordinates and field values. Stored once
219    2: Variable data: Conserved quantities. Stored once per timestep.
220
221    All data is assumed to reside at vertex locations.
222    """
223
224
225    def __init__(self, domain, mode = 'w',\
226                 max_size = 2000000000,
227                 recursion = False):
228        from Scientific.IO.NetCDF import NetCDFFile
229        from Numeric import Int, Float, Float32
230
231        self.precision = Float32 #Use single precision
232        if hasattr(domain, 'max_size'):
233            self.max_size = domain.max_size #file size max is 2Gig
234        else:
235            self.max_size = max_size
236        self.recursion = recursion
237        self.mode = mode
238
239        Data_format.__init__(self, domain, 'sww', mode)
240
241
242        # NetCDF file definition
243        fid = NetCDFFile(self.filename, mode)
244
245        if mode == 'w':
246
247            #Create new file
248            fid.institution = 'Geoscience Australia'
249            fid.description = 'Output from pyvolution suitable for plotting'
250
251            if domain.smooth:
252                fid.smoothing = 'Yes'
253            else:
254                fid.smoothing = 'No'
255
256            fid.order = domain.default_order
257
258            if hasattr(domain, 'texture'):
259                fid.texture = domain.texture
260            #else:
261            #    fid.texture = 'None'
262
263            #Reference point
264            #Start time in seconds since the epoch (midnight 1/1/1970)
265            #FIXME: Use Georef
266            fid.starttime = domain.starttime
267
268            # dimension definitions
269            fid.createDimension('number_of_volumes', self.number_of_volumes)
270            fid.createDimension('number_of_vertices', 3)
271
272            if domain.smooth is True:
273                fid.createDimension('number_of_points', len(domain.vertexlist))
274            else:
275                fid.createDimension('number_of_points', 3*self.number_of_volumes)
276
277            fid.createDimension('number_of_timesteps', None) #extensible
278
279            # variable definitions
280            fid.createVariable('x', self.precision, ('number_of_points',))
281            fid.createVariable('y', self.precision, ('number_of_points',))
282            fid.createVariable('elevation', self.precision, ('number_of_points',))
283            if domain.geo_reference is not None:
284                domain.geo_reference.write_NetCDF(fid)
285
286            #FIXME: Backwards compatibility
287            fid.createVariable('z', self.precision, ('number_of_points',))
288            #################################
289
290            fid.createVariable('volumes', Int, ('number_of_volumes',
291                                                'number_of_vertices'))
292
293            fid.createVariable('time', self.precision,
294                               ('number_of_timesteps',))
295
296            fid.createVariable('stage', self.precision,
297                               ('number_of_timesteps',
298                                'number_of_points'))
299
300            fid.createVariable('xmomentum', self.precision,
301                               ('number_of_timesteps',
302                                'number_of_points'))
303
304            fid.createVariable('ymomentum', self.precision,
305                               ('number_of_timesteps',
306                                'number_of_points'))
307
308        #Close
309        fid.close()
310
311
312    def store_connectivity(self):
313        """Specialisation of store_connectivity for net CDF format
314
315        Writes x,y,z coordinates of triangles constituting
316        the bed elevation.
317        """
318
319        from Scientific.IO.NetCDF import NetCDFFile
320
321        from Numeric import concatenate, Int
322
323        domain = self.domain
324
325        #Get NetCDF
326        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
327
328        # Get the variables
329        x = fid.variables['x']
330        y = fid.variables['y']
331        z = fid.variables['elevation']
332
333        volumes = fid.variables['volumes']
334
335        # Get X, Y and bed elevation Z
336        Q = domain.quantities['elevation']
337        X,Y,Z,V = Q.get_vertex_values(xy=True,
338                                      precision = self.precision)
339
340
341
342        x[:] = X.astype(self.precision)
343        y[:] = Y.astype(self.precision)
344        z[:] = Z.astype(self.precision)
345
346        #FIXME: Backwards compatibility
347        z = fid.variables['z']
348        z[:] = Z.astype(self.precision)
349        ################################
350
351        volumes[:] = V.astype(volumes.typecode())
352
353        #Close
354        fid.close()
355
356    def store_timestep(self, names):
357        """Store time and named quantities to file
358        """
359        from Scientific.IO.NetCDF import NetCDFFile
360        import types
361        from time import sleep
362        from os import stat
363
364        #minimum_allowed_depth = 0.001
365        minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
366        from Numeric import choose
367
368        #Get NetCDF
369        retries = 0
370        file_open = False
371        while not file_open and retries < 10:
372            try:
373                fid = NetCDFFile(self.filename, 'a') #Open existing file
374            except IOError:
375                #This could happen if someone was reading the file.
376                #In that case, wait a while and try again
377                msg = 'Warning (store_timestep): File %s could not be opened'\
378                      %self.filename
379                msg += ' - trying step %s again' %self.domain.time
380                print msg
381                retries += 1
382                sleep(1)
383            else:
384                file_open = True
385
386        if not file_open:
387            msg = 'File %s could not be opened for append' %self.filename
388            raise msg
389
390
391
392        #Check to see if the file is already too big:
393        time = fid.variables['time']
394        i = len(time)+1
395        file_size = stat(self.filename)[6]
396        file_size_increase =  file_size/i
397        if file_size + file_size_increase > self.max_size*(2**self.recursion):
398            #in order to get the file name and start time correct,
399            #I change the domian.filename and domain.starttime.
400            #This is the only way to do this without changing
401            #other modules (I think).
402
403            #write a filename addon that won't break swollens reader
404            #(10.sww is bad)
405            filename_ext = '_time_%s'%self.domain.time
406            filename_ext = filename_ext.replace('.', '_')
407            #remember the old filename, then give domain a
408            #name with the extension
409            old_domain_filename = self.domain.filename
410            if not self.recursion:
411                self.domain.filename = self.domain.filename+filename_ext
412
413            #change the domain starttime to the current time
414            old_domain_starttime = self.domain.starttime
415            self.domain.starttime = self.domain.time
416
417            #build a new data_structure.
418            next_data_structure=\
419                Data_format_sww(self.domain, mode=self.mode,\
420                                max_size = self.max_size,\
421                                recursion = self.recursion+1)
422            if not self.recursion:
423                print '    file_size = %s'%file_size
424                print '    saving file to %s'%next_data_structure.filename
425            #set up the new data_structure
426            self.domain.writer = next_data_structure
427
428            #FIXME - could be cleaner to use domain.store_timestep etc.
429            next_data_structure.store_connectivity()
430            next_data_structure.store_timestep(names)
431            fid.sync()
432            fid.close()
433
434            #restore the old starttime and filename
435            self.domain.starttime = old_domain_starttime
436            self.domain.filename = old_domain_filename
437        else:
438            self.recursion = False
439            domain = self.domain
440
441            # Get the variables
442            time = fid.variables['time']
443            stage = fid.variables['stage']
444            xmomentum = fid.variables['xmomentum']
445            ymomentum = fid.variables['ymomentum']
446            i = len(time)
447
448            #Store time
449            time[i] = self.domain.time
450
451
452            if type(names) not in [types.ListType, types.TupleType]:
453                names = [names]
454
455            for name in names:
456                # Get quantity
457                Q = domain.quantities[name]
458                A,V = Q.get_vertex_values(xy = False,
459                                          precision = self.precision)
460
461                #FIXME: Make this general (see below)
462                if name == 'stage':
463                    z = fid.variables['elevation']
464                    #print z[:]
465                    #print A-z[:]
466                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
467                    stage[i,:] = A.astype(self.precision)
468                elif name == 'xmomentum':
469                    xmomentum[i,:] = A.astype(self.precision)
470                elif name == 'ymomentum':
471                    ymomentum[i,:] = A.astype(self.precision)
472
473                #As in....
474                #eval( name + '[i,:] = A.astype(self.precision)' )
475                #FIXME: But we need a UNIT test for that before refactoring
476
477
478
479            #Flush and close
480            fid.sync()
481            fid.close()
482
483
484
485#Class for handling checkpoints data
486class Data_format_cpt(Data_format):
487    """Interface to native NetCDF format (.cpt)
488    """
489
490
491    def __init__(self, domain, mode = 'w'):
492        from Scientific.IO.NetCDF import NetCDFFile
493        from Numeric import Int, Float, Float
494
495        self.precision = Float #Use full precision
496
497        Data_format.__init__(self, domain, 'sww', mode)
498
499
500        # NetCDF file definition
501        fid = NetCDFFile(self.filename, mode)
502
503        if mode == 'w':
504            #Create new file
505            fid.institution = 'Geoscience Australia'
506            fid.description = 'Checkpoint data'
507            #fid.smooth = domain.smooth
508            fid.order = domain.default_order
509
510            # dimension definitions
511            fid.createDimension('number_of_volumes', self.number_of_volumes)
512            fid.createDimension('number_of_vertices', 3)
513
514            #Store info at all vertices (no smoothing)
515            fid.createDimension('number_of_points', 3*self.number_of_volumes)
516            fid.createDimension('number_of_timesteps', None) #extensible
517
518            # variable definitions
519
520            #Mesh
521            fid.createVariable('x', self.precision, ('number_of_points',))
522            fid.createVariable('y', self.precision, ('number_of_points',))
523
524
525            fid.createVariable('volumes', Int, ('number_of_volumes',
526                                                'number_of_vertices'))
527
528            fid.createVariable('time', self.precision,
529                               ('number_of_timesteps',))
530
531            #Allocate space for all quantities
532            for name in domain.quantities.keys():
533                fid.createVariable(name, self.precision,
534                                   ('number_of_timesteps',
535                                    'number_of_points'))
536
537        #Close
538        fid.close()
539
540
541    def store_checkpoint(self):
542        """
543        Write x,y coordinates of triangles.
544        Write connectivity (
545        constituting
546        the bed elevation.
547        """
548
549        from Scientific.IO.NetCDF import NetCDFFile
550
551        from Numeric import concatenate
552
553        domain = self.domain
554
555        #Get NetCDF
556        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
557
558        # Get the variables
559        x = fid.variables['x']
560        y = fid.variables['y']
561
562        volumes = fid.variables['volumes']
563
564        # Get X, Y and bed elevation Z
565        Q = domain.quantities['elevation']
566        X,Y,Z,V = Q.get_vertex_values(xy=True,
567                                      precision = self.precision)
568
569
570
571        x[:] = X.astype(self.precision)
572        y[:] = Y.astype(self.precision)
573        z[:] = Z.astype(self.precision)
574
575        volumes[:] = V
576
577        #Close
578        fid.close()
579
580
581    def store_timestep(self, name):
582        """Store time and named quantity to file
583        """
584        from Scientific.IO.NetCDF import NetCDFFile
585        from time import sleep
586
587        #Get NetCDF
588        retries = 0
589        file_open = False
590        while not file_open and retries < 10:
591            try:
592                fid = NetCDFFile(self.filename, 'a') #Open existing file
593            except IOError:
594                #This could happen if someone was reading the file.
595                #In that case, wait a while and try again
596                msg = 'Warning (store_timestep): File %s could not be opened'\
597                          %self.filename
598                msg += ' - trying again'
599                print msg
600                retries += 1
601                sleep(1)
602            else:
603                file_open = True
604
605            if not file_open:
606                msg = 'File %s could not be opened for append' %self.filename
607            raise msg
608
609
610        domain = self.domain
611
612        # Get the variables
613        time = fid.variables['time']
614        stage = fid.variables['stage']
615        i = len(time)
616
617        #Store stage
618        time[i] = self.domain.time
619
620        # Get quantity
621        Q = domain.quantities[name]
622        A,V = Q.get_vertex_values(xy=False,
623                                  precision = self.precision)
624
625        stage[i,:] = A.astype(self.precision)
626
627        #Flush and close
628        fid.sync()
629        fid.close()
630
631
632
633
634
635#Function for storing xya output
636#FIXME Not done yet for this version
637class Data_format_xya(Data_format):
638    """Generic interface to data formats
639    """
640
641
642    def __init__(self, domain, mode = 'w'):
643        from Scientific.IO.NetCDF import NetCDFFile
644        from Numeric import Int, Float, Float32
645
646        self.precision = Float32 #Use single precision
647
648        Data_format.__init__(self, domain, 'xya', mode)
649
650
651
652    #FIXME -This is the old xya format
653    def store_all(self):
654        """Specialisation of store all for xya format
655
656        Writes x,y,z coordinates of triangles constituting
657        the bed elevation.
658        """
659
660        from Numeric import concatenate
661
662        domain = self.domain
663
664        fd = open(self.filename, 'w')
665
666
667        if domain.smooth is True:
668            number_of_points =  len(domain.vertexlist)
669        else:
670            number_of_points = 3*self.number_of_volumes
671
672        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
673
674        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
675                 " # <vertex #> <x> <y> [attributes]" + "\n")
676
677
678        # Get X, Y, bed elevation and friction (index=0,1)
679        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
680                                           indices = (0,1), precision = self.precision)
681
682        bed_eles = A[:,0]
683        fricts = A[:,1]
684
685        # Get stage (index=0)
686        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
687                                       indices = (0,), precision = self.precision)
688
689        stages = B[:,0]
690
691        #<vertex #> <x> <y> [attributes]
692        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
693                                               stages, fricts):
694
695            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
696            fd.write(s)
697
698        #close
699        fd.close()
700
701
702    def store_timestep(self, t, V0, V1, V2):
703        """Store time, water heights (and momentums) to file
704        """
705        pass
706
707
708#Auxiliary
709def write_obj(filename,x,y,z):
710    """Store x,y,z vectors into filename (obj format)
711       Vectors are assumed to have dimension (M,3) where
712       M corresponds to the number elements.
713       triangles are assumed to be disconnected
714
715       The three numbers in each vector correspond to three vertices,
716
717       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
718
719    """
720    #print 'Writing obj to %s' % filename
721
722    import os.path
723
724    root, ext = os.path.splitext(filename)
725    if ext == '.obj':
726        FN = filename
727    else:
728        FN = filename + '.obj'
729
730
731    outfile = open(FN, 'wb')
732    outfile.write("# Triangulation as an obj file\n")
733
734    M, N = x.shape
735    assert N==3  #Assuming three vertices per element
736
737    for i in range(M):
738        for j in range(N):
739            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
740
741    for i in range(M):
742        base = i*N
743        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
744
745    outfile.close()
746
747
748#########################################################
749#Conversion routines
750########################################################
751
752def sww2obj(basefilename, size):
753    """Convert netcdf based data output to obj
754    """
755    from Scientific.IO.NetCDF import NetCDFFile
756
757    from Numeric import Float, zeros
758
759    #Get NetCDF
760    FN = create_filename('.', basefilename, 'sww', size)
761    print 'Reading from ', FN
762    fid = NetCDFFile(FN, 'r')  #Open existing file for read
763
764
765    # Get the variables
766    x = fid.variables['x']
767    y = fid.variables['y']
768    z = fid.variables['elevation']
769    time = fid.variables['time']
770    stage = fid.variables['stage']
771
772    M = size  #Number of lines
773    xx = zeros((M,3), Float)
774    yy = zeros((M,3), Float)
775    zz = zeros((M,3), Float)
776
777    for i in range(M):
778        for j in range(3):
779            xx[i,j] = x[i+j*M]
780            yy[i,j] = y[i+j*M]
781            zz[i,j] = z[i+j*M]
782
783    #Write obj for bathymetry
784    FN = create_filename('.', basefilename, 'obj', size)
785    write_obj(FN,xx,yy,zz)
786
787
788    #Now read all the data with variable information, combine with
789    #x,y info and store as obj
790
791    for k in range(len(time)):
792        t = time[k]
793        print 'Processing timestep %f' %t
794
795        for i in range(M):
796            for j in range(3):
797                zz[i,j] = stage[k,i+j*M]
798
799
800        #Write obj for variable data
801        #FN = create_filename(basefilename, 'obj', size, time=t)
802        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
803        write_obj(FN,xx,yy,zz)
804
805
806def dat2obj(basefilename):
807    """Convert line based data output to obj
808    FIXME: Obsolete?
809    """
810
811    import glob, os
812    from config import data_dir
813
814
815    #Get bathymetry and x,y's
816    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
817
818    from Numeric import zeros, Float
819
820    M = len(lines)  #Number of lines
821    x = zeros((M,3), Float)
822    y = zeros((M,3), Float)
823    z = zeros((M,3), Float)
824
825    ##i = 0
826    for i, line in enumerate(lines):
827        tokens = line.split()
828        values = map(float,tokens)
829
830        for j in range(3):
831            x[i,j] = values[j*3]
832            y[i,j] = values[j*3+1]
833            z[i,j] = values[j*3+2]
834
835        ##i += 1
836
837
838    #Write obj for bathymetry
839    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
840
841
842    #Now read all the data files with variable information, combine with
843    #x,y info
844    #and store as obj
845
846    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
847
848    for filename in files:
849        print 'Processing %s' % filename
850
851        lines = open(data_dir+os.sep+filename,'r').readlines()
852        assert len(lines) == M
853        root, ext = os.path.splitext(filename)
854
855        #Get time from filename
856        i0 = filename.find('_time=')
857        if i0 == -1:
858            #Skip bathymetry file
859            continue
860
861        i0 += 6  #Position where time starts
862        i1 = filename.find('.dat')
863
864        if i1 > i0:
865            t = float(filename[i0:i1])
866        else:
867            raise 'Hmmmm'
868
869
870
871        ##i = 0
872        for i, line in enumerate(lines):
873            tokens = line.split()
874            values = map(float,tokens)
875
876            for j in range(3):
877                z[i,j] = values[j]
878
879            ##i += 1
880
881        #Write obj for variable data
882        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
883
884
885def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
886    """Read netcdf filename1, pick timesteps first:step:last and save to
887    nettcdf file filename2
888    """
889    from Scientific.IO.NetCDF import NetCDFFile
890
891    #Get NetCDF
892    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
893    outfile = NetCDFFile(filename2, 'w')  #Open new file
894
895
896    #Copy dimensions
897    for d in infile.dimensions:
898        outfile.createDimension(d, infile.dimensions[d])
899
900    for name in infile.variables:
901        var = infile.variables[name]
902        outfile.createVariable(name, var.typecode(), var.dimensions)
903
904
905    #Copy the static variables
906    for name in infile.variables:
907        if name == 'time' or name == 'stage':
908            pass
909        else:
910            #Copy
911            outfile.variables[name][:] = infile.variables[name][:]
912
913    #Copy selected timesteps
914    time = infile.variables['time']
915    stage = infile.variables['stage']
916
917    newtime = outfile.variables['time']
918    newstage = outfile.variables['stage']
919
920    if last is None:
921        last = len(time)
922
923    selection = range(first, last, step)
924    for i, j in enumerate(selection):
925        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
926        newtime[i] = time[j]
927        newstage[i,:] = stage[j,:]
928
929    #Close
930    infile.close()
931    outfile.close()
932
933
934#Get data objects
935def get_dataobject(domain, mode='w'):
936    """Return instance of class of given format using filename
937    """
938
939    cls = eval('Data_format_%s' %domain.format)
940    return cls(domain, mode)
941
942def xya2pts(basename_in, basename_out=None, verbose=False,
943            #easting_min=None, easting_max=None,
944            #northing_min=None, northing_max=None,
945            stride = 1,
946            attribute_name = 'elevation',
947            z_func = None):
948    """Read points data from ascii (.xya)
949
950    Example:
951
952              x(m)        y(m)        z(m)
953         0.00000e+00  0.00000e+00  1.3535000e-01
954         0.00000e+00  1.40000e-02  1.3535000e-01
955
956
957
958    Convert to NetCDF pts format which is
959
960    points:  (Nx2) Float array
961    elevation: N Float array
962
963    Only lines that contain three numeric values are processed
964
965    If z_func is specified, it will be applied to the third field
966    """
967
968    import os
969    #from Scientific.IO.NetCDF import NetCDFFile
970    from Numeric import Float, arrayrange, concatenate
971
972    root, ext = os.path.splitext(basename_in)
973
974    if ext == '': ext = '.xya'
975
976    #Get NetCDF
977    infile = open(root + ext, 'r')  #Open existing xya file for read
978
979    if verbose: print 'Reading xya points from %s' %(root + ext)
980
981    points = []
982    attribute = []
983    for i, line in enumerate(infile.readlines()):
984
985        if i % stride != 0: continue
986
987        fields = line.split()
988
989        try:
990            assert len(fields) == 3
991        except:
992            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
993
994        try:
995            x = float( fields[0] )
996            y = float( fields[1] )
997            z = float( fields[2] )
998        except:
999            continue
1000
1001        points.append( [x, y] )
1002
1003        if callable(z_func):
1004            attribute.append(z_func(z))
1005        else:
1006            attribute.append(z)
1007
1008
1009    #Get output file
1010    if basename_out == None:
1011        ptsname = root + '.pts'
1012    else:
1013        ptsname = basename_out + '.pts'
1014
1015    if verbose: print 'Store to NetCDF file %s' %ptsname
1016    write_ptsfile(ptsname, points, attribute, attribute_name)
1017
1018
1019
1020######Obsoleted by export_points in load_mesh
1021def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1022                  zone=None, xllcorner=None, yllcorner=None):
1023    """Write points and associated attribute to pts (NetCDF) format
1024    """
1025
1026    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1027
1028    from Numeric import Float
1029
1030    if attribute_name is None:
1031        attribute_name = 'attribute'
1032
1033
1034    from Scientific.IO.NetCDF import NetCDFFile
1035
1036    # NetCDF file definition
1037    outfile = NetCDFFile(ptsname, 'w')
1038
1039
1040    #Create new file
1041    outfile.institution = 'Geoscience Australia'
1042    outfile.description = 'NetCDF pts format for compact and '\
1043                          'portable storage of spatial point data'
1044
1045
1046    #Georeferencing
1047    from coordinate_transforms.geo_reference import Geo_reference
1048    if zone is None:
1049        assert xllcorner is None, 'xllcorner must be None'
1050        assert yllcorner is None, 'yllcorner must be None'       
1051        Geo_reference().write_NetCDF(outfile)
1052    else:
1053        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)       
1054       
1055
1056
1057    outfile.createDimension('number_of_points', len(points))
1058    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1059
1060    # variable definitions
1061    outfile.createVariable('points', Float, ('number_of_points',
1062                                             'number_of_dimensions'))
1063    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1064
1065    # Get handles to the variables
1066    nc_points = outfile.variables['points']
1067    nc_attribute = outfile.variables[attribute_name]
1068
1069    #Store data
1070    nc_points[:, :] = points
1071    nc_attribute[:] = attribute
1072
1073    outfile.close()
1074
1075
1076def dem2pts(basename_in, basename_out=None, verbose=False,
1077            easting_min=None, easting_max=None,
1078            northing_min=None, northing_max=None):
1079    """Read Digitial Elevation model from the following NetCDF format (.dem)
1080
1081    Example:
1082
1083    ncols         3121
1084    nrows         1800
1085    xllcorner     722000
1086    yllcorner     5893000
1087    cellsize      25
1088    NODATA_value  -9999
1089    138.3698 137.4194 136.5062 135.5558 ..........
1090
1091    Convert to NetCDF pts format which is
1092
1093    points:  (Nx2) Float array
1094    elevation: N Float array
1095    """
1096
1097    #FIXME: Can this be written feasibly using write_pts?
1098
1099    import os
1100    from Scientific.IO.NetCDF import NetCDFFile
1101    from Numeric import Float, zeros, reshape
1102
1103    root = basename_in
1104
1105    #Get NetCDF
1106    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1107
1108    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1109
1110    ncols = infile.ncols[0]
1111    nrows = infile.nrows[0]
1112    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1113    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1114    cellsize = infile.cellsize[0]
1115    NODATA_value = infile.NODATA_value[0]
1116    dem_elevation = infile.variables['elevation']
1117
1118    zone = infile.zone[0]
1119    false_easting = infile.false_easting[0]
1120    false_northing = infile.false_northing[0]
1121
1122    #Text strings
1123    projection = infile.projection
1124    datum = infile.datum
1125    units = infile.units
1126
1127
1128    #Get output file
1129    if basename_out == None:
1130        ptsname = root + '.pts'
1131    else:
1132        ptsname = basename_out + '.pts'
1133
1134    if verbose: print 'Store to NetCDF file %s' %ptsname
1135    # NetCDF file definition
1136    outfile = NetCDFFile(ptsname, 'w')
1137
1138    #Create new file
1139    outfile.institution = 'Geoscience Australia'
1140    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1141                      'of spatial point data'
1142    #assign default values
1143    if easting_min is None: easting_min = xllcorner
1144    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1145    if northing_min is None: northing_min = yllcorner
1146    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1147
1148    #compute offsets to update georeferencing
1149    easting_offset = xllcorner - easting_min
1150    northing_offset = yllcorner - northing_min
1151
1152    #Georeferencing
1153    outfile.zone = zone
1154    outfile.xllcorner = easting_min #Easting of lower left corner
1155    outfile.yllcorner = northing_min #Northing of lower left corner
1156    outfile.false_easting = false_easting
1157    outfile.false_northing = false_northing
1158
1159    outfile.projection = projection
1160    outfile.datum = datum
1161    outfile.units = units
1162
1163
1164    #Grid info (FIXME: probably not going to be used, but heck)
1165    outfile.ncols = ncols
1166    outfile.nrows = nrows
1167
1168
1169    # dimension definitions
1170    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1171    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1172    outfile.createDimension('number_of_points', nrows_in_bounding_box*ncols_in_bounding_box)
1173    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1174
1175    # variable definitions
1176    outfile.createVariable('points', Float, ('number_of_points',
1177                                             'number_of_dimensions'))
1178    outfile.createVariable('elevation', Float, ('number_of_points',))
1179
1180    # Get handles to the variables
1181    points = outfile.variables['points']
1182    elevation = outfile.variables['elevation']
1183
1184    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1185
1186    #Store data
1187    #FIXME: Could perhaps be faster using array operations (Fixed 27/7/05)
1188    global_index = 0
1189    for i in range(nrows):
1190        if verbose and i%((nrows+10)/10)==0:
1191            print 'Processing row %d of %d' %(i, nrows)
1192
1193        lower_index = global_index
1194        tpoints = zeros((ncols_in_bounding_box, 2), Float)
1195        telev =  zeros(ncols_in_bounding_box, Float)
1196        local_index = 0
1197
1198        y = (nrows-i)*cellsize + yllcorner
1199        for j in range(ncols):
1200
1201            x = j*cellsize + xllcorner
1202            if easting_min <= x <= easting_max and \
1203               northing_min <= y <= northing_max:
1204                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1205                telev[local_index] = dem_elevation_r[i, j]
1206                global_index += 1
1207                local_index += 1
1208
1209        upper_index = global_index
1210
1211        if upper_index == lower_index + ncols_in_bounding_box:
1212            points[lower_index:upper_index, :] = tpoints
1213            elevation[lower_index:upper_index] = telev
1214
1215    assert global_index == nrows_in_bounding_box*ncols_in_bounding_box, 'index not equal to number of points'
1216
1217    infile.close()
1218    outfile.close()
1219
1220
1221
1222def _read_hecras_cross_sections(lines):
1223    """Return block of surface lines for each cross section
1224    Starts with SURFACE LINE,
1225    Ends with END CROSS-SECTION
1226    """
1227
1228    points = []
1229
1230    reading_surface = False
1231    for i, line in enumerate(lines):
1232
1233        if len(line.strip()) == 0:    #Ignore blanks
1234            continue
1235
1236        if lines[i].strip().startswith('SURFACE LINE'):
1237            reading_surface = True
1238            continue
1239
1240        if lines[i].strip().startswith('END') and reading_surface:
1241            yield points
1242            reading_surface = False
1243            points = []
1244
1245        if reading_surface:
1246            fields = line.strip().split(',')
1247            easting = float(fields[0])
1248            northing = float(fields[1])
1249            elevation = float(fields[2])
1250            points.append([easting, northing, elevation])
1251
1252
1253
1254
1255def hecras_cross_sections2pts(basename_in,
1256                              basename_out=None,
1257                              verbose=False):
1258    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1259
1260    Example:
1261
1262
1263# RAS export file created on Mon 15Aug2005 11:42
1264# by HEC-RAS Version 3.1.1
1265
1266BEGIN HEADER:
1267  UNITS: METRIC
1268  DTM TYPE: TIN
1269  DTM: v:\1\cit\perth_topo\river_tin
1270  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1271  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1272  MAP PROJECTION: UTM
1273  PROJECTION ZONE: 50
1274  DATUM: AGD66
1275  VERTICAL DATUM:
1276  NUMBER OF REACHES:  19
1277  NUMBER OF CROSS-SECTIONS:  14206
1278END HEADER:
1279
1280
1281Only the SURFACE LINE data of the following form will be utilised
1282
1283  CROSS-SECTION:
1284    STREAM ID:Southern-Wungong
1285    REACH ID:Southern-Wungong
1286    STATION:19040.*
1287    CUT LINE:
1288      405548.671603161 , 6438142.7594925
1289      405734.536092045 , 6438326.10404912
1290      405745.130459356 , 6438331.48627354
1291      405813.89633823 , 6438368.6272789
1292    SURFACE LINE:
1293     405548.67,   6438142.76,   35.37
1294     405552.24,   6438146.28,   35.41
1295     405554.78,   6438148.78,   35.44
1296     405555.80,   6438149.79,   35.44
1297     405559.37,   6438153.31,   35.45
1298     405560.88,   6438154.81,   35.44
1299     405562.93,   6438156.83,   35.42
1300     405566.50,   6438160.35,   35.38
1301     405566.99,   6438160.83,   35.37
1302     ...
1303   END CROSS-SECTION
1304
1305    Convert to NetCDF pts format which is
1306
1307    points:  (Nx2) Float array
1308    elevation: N Float array
1309    """
1310
1311    #FIXME: Can this be written feasibly using write_pts?
1312
1313    import os
1314    from Scientific.IO.NetCDF import NetCDFFile
1315    from Numeric import Float, zeros, reshape
1316
1317    root = basename_in
1318
1319    #Get ASCII file
1320    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1321
1322    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1323
1324    lines = infile.readlines()
1325    infile.close()
1326
1327    if verbose: print 'Converting to pts format'
1328
1329    i = 0
1330    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1331        i += 1
1332
1333    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1334    i += 1
1335
1336    assert lines[i].strip().upper().startswith('UNITS:')
1337    units = lines[i].strip().split()[1]
1338    i += 1
1339
1340    assert lines[i].strip().upper().startswith('DTM TYPE:')
1341    i += 1
1342
1343    assert lines[i].strip().upper().startswith('DTM:')
1344    i += 1
1345
1346    assert lines[i].strip().upper().startswith('STREAM')
1347    i += 1
1348
1349    assert lines[i].strip().upper().startswith('CROSS')
1350    i += 1
1351
1352    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1353    projection = lines[i].strip().split(':')[1]
1354    i += 1
1355
1356    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1357    zone = int(lines[i].strip().split(':')[1])
1358    i += 1
1359
1360    assert lines[i].strip().upper().startswith('DATUM:')
1361    datum = lines[i].strip().split(':')[1]
1362    i += 1
1363
1364    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1365    i += 1
1366
1367    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1368    i += 1
1369
1370    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1371    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1372    i += 1
1373
1374
1375    #Now read all points
1376    points = []
1377    elevation = []
1378    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1379        for k, entry in enumerate(entries):
1380            points.append(entry[:2])
1381            elevation.append(entry[2])
1382
1383
1384    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1385          %(j+1, number_of_cross_sections)
1386    assert j+1 == number_of_cross_sections, msg
1387
1388    #Get output file
1389    if basename_out == None:
1390        ptsname = root + '.pts'
1391    else:
1392        ptsname = basename_out + '.pts'
1393
1394    #FIXME (DSG-ON): use loadASCII export_points_file
1395    if verbose: print 'Store to NetCDF file %s' %ptsname
1396    # NetCDF file definition
1397    outfile = NetCDFFile(ptsname, 'w')
1398
1399    #Create new file
1400    outfile.institution = 'Geoscience Australia'
1401    outfile.description = 'NetCDF pts format for compact and portable ' +\
1402                          'storage of spatial point data derived from HEC-RAS'
1403
1404    #Georeferencing
1405    outfile.zone = zone
1406    outfile.xllcorner = 0.0
1407    outfile.yllcorner = 0.0
1408    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1409    outfile.false_northing = 1000000
1410
1411    outfile.projection = projection
1412    outfile.datum = datum
1413    outfile.units = units
1414
1415
1416    outfile.createDimension('number_of_points', len(points))
1417    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1418
1419    # variable definitions
1420    outfile.createVariable('points', Float, ('number_of_points',
1421                                             'number_of_dimensions'))
1422    outfile.createVariable('elevation', Float, ('number_of_points',))
1423
1424    # Get handles to the variables
1425    outfile.variables['points'][:] = points
1426    outfile.variables['elevation'][:] = elevation
1427
1428    outfile.close()
1429
1430
1431
1432def sww2dem(basename_in, basename_out = None,
1433            quantity = None,
1434            timestep = None,
1435            reduction = None,
1436            cellsize = 10,
1437            NODATA_value = -9999,
1438            easting_min = None,
1439            easting_max = None,
1440            northing_min = None,
1441            northing_max = None,
1442            expand_search = False, #To avoid intractable situations (This will be fixed when least_squares gets redesigned)
1443            verbose = False,
1444            origin = None,
1445            datum = 'WGS84',
1446            format = 'ers'):
1447
1448    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1449
1450    Example (ASC):
1451
1452    ncols         3121
1453    nrows         1800
1454    xllcorner     722000
1455    yllcorner     5893000
1456    cellsize      25
1457    NODATA_value  -9999
1458    138.3698 137.4194 136.5062 135.5558 ..........
1459
1460    Also write accompanying file with same basename_in but extension .prj
1461    used to fix the UTM zone, datum, false northings and eastings.
1462
1463    The prj format is assumed to be as
1464
1465    Projection    UTM
1466    Zone          56
1467    Datum         WGS84
1468    Zunits        NO
1469    Units         METERS
1470    Spheroid      WGS84
1471    Xshift        0.0000000000
1472    Yshift        10000000.0000000000
1473    Parameters
1474
1475
1476    The parameter quantity must be the name of an existing quantity or
1477    an expression involving existing quantities. The default is
1478    'elevation'.
1479
1480    if timestep (an index) is given, output quantity at that timestep
1481
1482    if reduction is given use that to reduce quantity over all timesteps.
1483
1484    datum
1485
1486    format can be either 'asc' or 'ers'
1487    """
1488
1489    import sys
1490    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1491    from Numeric import array2string
1492
1493    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1494    from util import apply_expression_to_dictionary
1495
1496    msg = 'Format must be either asc or ers'
1497    assert format.lower() in ['asc', 'ers'], msg
1498
1499
1500    false_easting = 500000
1501    false_northing = 10000000
1502
1503    if quantity is None:
1504        quantity = 'elevation'
1505
1506    if reduction is None:
1507        reduction = max
1508
1509    if basename_out is None:
1510        basename_out = basename_in + '_%s' %quantity
1511
1512    swwfile = basename_in + '.sww'
1513    demfile = basename_out + '.' + format
1514    # Note the use of a .ers extension is optional (write_ermapper_grid will
1515    # deal with either option
1516
1517    #Read sww file
1518    if verbose: print 'Reading from %s' %swwfile
1519    from Scientific.IO.NetCDF import NetCDFFile
1520    fid = NetCDFFile(swwfile)
1521
1522    #Get extent and reference
1523    x = fid.variables['x'][:]
1524    y = fid.variables['y'][:]
1525    volumes = fid.variables['volumes'][:]
1526
1527    number_of_timesteps = fid.dimensions['number_of_timesteps']
1528    number_of_points = fid.dimensions['number_of_points']
1529    if origin is None:
1530
1531        #Get geo_reference
1532        #sww files don't have to have a geo_ref
1533        try:
1534            geo_reference = Geo_reference(NetCDFObject=fid)
1535        except AttributeError, e:
1536            geo_reference = Geo_reference() #Default georef object
1537
1538        xllcorner = geo_reference.get_xllcorner()
1539        yllcorner = geo_reference.get_yllcorner()
1540        zone = geo_reference.get_zone()
1541    else:
1542        zone = origin[0]
1543        xllcorner = origin[1]
1544        yllcorner = origin[2]
1545
1546
1547
1548    #FIXME: Refactor using code from file_function.statistics
1549    #Something like print swwstats(swwname)
1550    if verbose:
1551        x = fid.variables['x'][:]
1552        y = fid.variables['y'][:]
1553        times = fid.variables['time'][:]
1554        print '------------------------------------------------'
1555        print 'Statistics of SWW file:'
1556        print '  Name: %s' %swwfile
1557        print '  Reference:'
1558        print '    Lower left corner: [%f, %f]'\
1559              %(xllcorner, yllcorner)
1560        print '    Start time: %f' %fid.starttime[0]
1561        print '  Extent:'
1562        print '    x [m] in [%f, %f], len(x) == %d'\
1563              %(min(x.flat), max(x.flat), len(x.flat))
1564        print '    y [m] in [%f, %f], len(y) == %d'\
1565              %(min(y.flat), max(y.flat), len(y.flat))
1566        print '    t [s] in [%f, %f], len(t) == %d'\
1567              %(min(times), max(times), len(times))
1568        print '  Quantities [SI units]:'
1569        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1570            q = fid.variables[name][:].flat
1571            print '    %s in [%f, %f]' %(name, min(q), max(q))
1572
1573
1574
1575
1576
1577    #Get quantity and reduce if applicable
1578    if verbose: print 'Processing quantity %s' %quantity
1579
1580    #Turn NetCDF objects into Numeric arrays
1581    quantity_dict = {}
1582    for name in fid.variables.keys():
1583        quantity_dict[name] = fid.variables[name][:]
1584
1585
1586    q = apply_expression_to_dictionary(quantity, quantity_dict)
1587
1588
1589
1590    if len(q.shape) == 2:
1591        #q has a time component and needs to be reduced along
1592        #the temporal dimension
1593        if verbose: print 'Reducing quantity %s' %quantity
1594        q_reduced = zeros( number_of_points, Float )
1595
1596        for k in range(number_of_points):
1597            q_reduced[k] = reduction( q[:,k] )
1598
1599        q = q_reduced
1600
1601    #Post condition: Now q has dimension: number_of_points
1602    assert len(q.shape) == 1
1603    assert q.shape[0] == number_of_points
1604
1605
1606    if verbose:
1607        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1608
1609
1610    #Create grid and update xll/yll corner and x,y
1611
1612    #Relative extent
1613    if easting_min is None:
1614        xmin = min(x)
1615    else:
1616        xmin = easting_min - xllcorner
1617
1618    if easting_max is None:
1619        xmax = max(x)
1620    else:
1621        xmax = easting_max - xllcorner
1622
1623    if northing_min is None:
1624        ymin = min(y)
1625    else:
1626        ymin = northing_min - yllcorner
1627
1628    if northing_max is None:
1629        ymax = max(y)
1630    else:
1631        ymax = northing_max - yllcorner
1632
1633
1634
1635    if verbose: print 'Creating grid'
1636    ncols = int((xmax-xmin)/cellsize)+1
1637    nrows = int((ymax-ymin)/cellsize)+1
1638
1639
1640    #New absolute reference and coordinates
1641    newxllcorner = xmin+xllcorner
1642    newyllcorner = ymin+yllcorner
1643
1644    x = x+xllcorner-newxllcorner
1645    y = y+yllcorner-newyllcorner
1646
1647    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1648    assert len(vertex_points.shape) == 2
1649
1650
1651
1652    grid_points = zeros ( (ncols*nrows, 2), Float )
1653
1654
1655    for i in xrange(nrows):
1656        if format.lower() == 'asc':
1657            yg = i*cellsize
1658        else:
1659            #this will flip the order of the y values for ers
1660            yg = (nrows-i)*cellsize
1661
1662        for j in xrange(ncols):
1663            xg = j*cellsize
1664            k = i*ncols + j
1665
1666            grid_points[k,0] = xg
1667            grid_points[k,1] = yg
1668
1669    #Interpolate
1670    from least_squares import Interpolation
1671
1672
1673    #FIXME: This should be done with precrop = True (?), otherwise it'll
1674    #take forever. With expand_search  set to False, some grid points might
1675    #miss out.... This will be addressed though Duncan's refactoring of least_squares
1676
1677    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1678                           precrop = False,
1679                           expand_search = expand_search,
1680                           verbose = verbose)
1681
1682
1683
1684    #Interpolate using quantity values
1685    if verbose: print 'Interpolating'
1686    grid_values = interp.interpolate(q).flat
1687
1688
1689    if verbose:
1690        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
1691                                                      max(grid_values))
1692
1693    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
1694    P = interp.mesh.get_boundary_polygon()
1695    outside_indices = outside_polygon(grid_points, P, closed=True)
1696
1697    for i in outside_indices:
1698        grid_values[i] = NODATA_value
1699
1700
1701
1702
1703    if format.lower() == 'ers':
1704        # setup ERS header information
1705        grid_values = reshape(grid_values,(nrows, ncols))
1706        header = {}
1707        header['datum'] = '"' + datum + '"'
1708        # FIXME The use of hardwired UTM and zone number needs to be made optional
1709        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1710        header['projection'] = '"UTM-' + str(zone) + '"'
1711        header['coordinatetype'] = 'EN'
1712        if header['coordinatetype'] == 'LL':
1713            header['longitude'] = str(newxllcorner)
1714            header['latitude'] = str(newyllcorner)
1715        elif header['coordinatetype'] == 'EN':
1716            header['eastings'] = str(newxllcorner)
1717            header['northings'] = str(newyllcorner)
1718        header['nullcellvalue'] = str(NODATA_value)
1719        header['xdimension'] = str(cellsize)
1720        header['ydimension'] = str(cellsize)
1721        header['value'] = '"' + quantity + '"'
1722        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
1723
1724
1725        #Write
1726        if verbose: print 'Writing %s' %demfile
1727        import ermapper_grids
1728        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1729
1730        fid.close()
1731    else:
1732        #Write to Ascii format
1733
1734        #Write prj file
1735        prjfile = basename_out + '.prj'
1736
1737        if verbose: print 'Writing %s' %prjfile
1738        prjid = open(prjfile, 'w')
1739        prjid.write('Projection    %s\n' %'UTM')
1740        prjid.write('Zone          %d\n' %zone)
1741        prjid.write('Datum         %s\n' %datum)
1742        prjid.write('Zunits        NO\n')
1743        prjid.write('Units         METERS\n')
1744        prjid.write('Spheroid      %s\n' %datum)
1745        prjid.write('Xshift        %d\n' %false_easting)
1746        prjid.write('Yshift        %d\n' %false_northing)
1747        prjid.write('Parameters\n')
1748        prjid.close()
1749
1750
1751
1752        if verbose: print 'Writing %s' %demfile
1753
1754        ascid = open(demfile, 'w')
1755
1756        ascid.write('ncols         %d\n' %ncols)
1757        ascid.write('nrows         %d\n' %nrows)
1758        ascid.write('xllcorner     %d\n' %newxllcorner)
1759        ascid.write('yllcorner     %d\n' %newyllcorner)
1760        ascid.write('cellsize      %f\n' %cellsize)
1761        ascid.write('NODATA_value  %d\n' %NODATA_value)
1762
1763
1764        #Get bounding polygon from mesh
1765        #P = interp.mesh.get_boundary_polygon()
1766        #inside_indices = inside_polygon(grid_points, P)
1767
1768        for i in range(nrows):
1769            if verbose and i%((nrows+10)/10)==0:
1770                print 'Doing row %d of %d' %(i, nrows)
1771
1772            base_index = (nrows-i-1)*ncols
1773
1774            slice = grid_values[base_index:base_index+ncols]
1775            s = array2string(slice, max_line_width=sys.maxint)
1776            ascid.write(s[1:-1] + '\n')
1777
1778
1779            #print
1780            #for j in range(ncols):
1781            #    index = base_index+j#
1782            #    print grid_values[index],
1783            #    ascid.write('%f ' %grid_values[index])
1784            #ascid.write('\n')
1785
1786        #Close
1787        ascid.close()
1788        fid.close()
1789
1790#Backwards compatibility
1791def sww2asc(basename_in, basename_out = None,
1792            quantity = None,
1793            timestep = None,
1794            reduction = None,
1795            cellsize = 10,
1796            verbose = False,
1797            origin = None):
1798    print 'sww2asc will soon be obsoleted - please use sww2dem'
1799    sww2dem(basename_in,
1800            basename_out = basename_out,
1801            quantity = quantity,
1802            timestep = timestep,
1803            reduction = reduction,
1804            cellsize = cellsize,
1805            verbose = verbose,
1806            origin = origin,
1807            datum = 'WGS84',
1808            format = 'asc')
1809
1810def sww2ers(basename_in, basename_out = None,
1811            quantity = None,
1812            timestep = None,
1813            reduction = None,
1814            cellsize = 10,
1815            verbose = False,
1816            origin = None,
1817            datum = 'WGS84'):
1818    print 'sww2ers will soon be obsoleted - please use sww2dem'
1819    sww2dem(basename_in,
1820            basename_out = basename_out,
1821            quantity = quantity,
1822            timestep = timestep,
1823            reduction = reduction,
1824            cellsize = cellsize,
1825            verbose = verbose,
1826            origin = origin,
1827            datum = datum,
1828            format = 'ers')
1829################################# END COMPATIBILITY ##############
1830
1831
1832
1833
1834def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1835                                  verbose=False):
1836    """Read Digitial Elevation model from the following ASCII format (.asc)
1837
1838    Example:
1839
1840    ncols         3121
1841    nrows         1800
1842    xllcorner     722000
1843    yllcorner     5893000
1844    cellsize      25
1845    NODATA_value  -9999
1846    138.3698 137.4194 136.5062 135.5558 ..........
1847
1848    Convert basename_in + '.asc' to NetCDF format (.dem)
1849    mimicking the ASCII format closely.
1850
1851
1852    An accompanying file with same basename_in but extension .prj must exist
1853    and is used to fix the UTM zone, datum, false northings and eastings.
1854
1855    The prj format is assumed to be as
1856
1857    Projection    UTM
1858    Zone          56
1859    Datum         WGS84
1860    Zunits        NO
1861    Units         METERS
1862    Spheroid      WGS84
1863    Xshift        0.0000000000
1864    Yshift        10000000.0000000000
1865    Parameters
1866    """
1867
1868    import os
1869    from Scientific.IO.NetCDF import NetCDFFile
1870    from Numeric import Float, array
1871
1872    #root, ext = os.path.splitext(basename_in)
1873    root = basename_in
1874
1875    ###########################################
1876    # Read Meta data
1877    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1878    metadatafile = open(root + '.prj')
1879    metalines = metadatafile.readlines()
1880    metadatafile.close()
1881
1882    L = metalines[0].strip().split()
1883    assert L[0].strip().lower() == 'projection'
1884    projection = L[1].strip()                   #TEXT
1885
1886    L = metalines[1].strip().split()
1887    assert L[0].strip().lower() == 'zone'
1888    zone = int(L[1].strip())
1889
1890    L = metalines[2].strip().split()
1891    assert L[0].strip().lower() == 'datum'
1892    datum = L[1].strip()                        #TEXT
1893
1894    L = metalines[3].strip().split()
1895    assert L[0].strip().lower() == 'zunits'     #IGNORE
1896    zunits = L[1].strip()                       #TEXT
1897
1898    L = metalines[4].strip().split()
1899    assert L[0].strip().lower() == 'units'
1900    units = L[1].strip()                        #TEXT
1901
1902    L = metalines[5].strip().split()
1903    assert L[0].strip().lower() == 'spheroid'   #IGNORE
1904    spheroid = L[1].strip()                     #TEXT
1905
1906    L = metalines[6].strip().split()
1907    assert L[0].strip().lower() == 'xshift'
1908    false_easting = float(L[1].strip())
1909
1910    L = metalines[7].strip().split()
1911    assert L[0].strip().lower() == 'yshift'
1912    false_northing = float(L[1].strip())
1913
1914    #print false_easting, false_northing, zone, datum
1915
1916
1917    ###########################################
1918    #Read DEM data
1919
1920    datafile = open(basename_in + '.asc')
1921
1922    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
1923    lines = datafile.readlines()
1924    datafile.close()
1925
1926    if verbose: print 'Got', len(lines), ' lines'
1927
1928    ncols = int(lines[0].split()[1].strip())
1929    nrows = int(lines[1].split()[1].strip())
1930    xllcorner = float(lines[2].split()[1].strip())
1931    yllcorner = float(lines[3].split()[1].strip())
1932    cellsize = float(lines[4].split()[1].strip())
1933    NODATA_value = int(lines[5].split()[1].strip())
1934
1935    assert len(lines) == nrows + 6
1936
1937
1938    ##########################################
1939
1940
1941    if basename_out == None:
1942        netcdfname = root + '.dem'
1943    else:
1944        netcdfname = basename_out + '.dem'
1945
1946    if verbose: print 'Store to NetCDF file %s' %netcdfname
1947    # NetCDF file definition
1948    fid = NetCDFFile(netcdfname, 'w')
1949
1950    #Create new file
1951    fid.institution = 'Geoscience Australia'
1952    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
1953                      'of spatial point data'
1954
1955    fid.ncols = ncols
1956    fid.nrows = nrows
1957    fid.xllcorner = xllcorner
1958    fid.yllcorner = yllcorner
1959    fid.cellsize = cellsize
1960    fid.NODATA_value = NODATA_value
1961
1962    fid.zone = zone
1963    fid.false_easting = false_easting
1964    fid.false_northing = false_northing
1965    fid.projection = projection
1966    fid.datum = datum
1967    fid.units = units
1968
1969
1970    # dimension definitions
1971    fid.createDimension('number_of_rows', nrows)
1972    fid.createDimension('number_of_columns', ncols)
1973
1974    # variable definitions
1975    fid.createVariable('elevation', Float, ('number_of_rows',
1976                                            'number_of_columns'))
1977
1978    # Get handles to the variables
1979    elevation = fid.variables['elevation']
1980
1981    #Store data
1982    n = len(lines[6:])
1983    for i, line in enumerate(lines[6:]):
1984        fields = line.split()
1985        if verbose and i%((n+10)/10)==0:
1986            print 'Processing row %d of %d' %(i, nrows)
1987
1988        elevation[i, :] = array([float(x) for x in fields])
1989
1990    fid.close()
1991
1992
1993
1994
1995
1996def ferret2sww(basename_in, basename_out = None,
1997               verbose = False,
1998               minlat = None, maxlat = None,
1999               minlon = None, maxlon = None,
2000               mint = None, maxt = None, mean_stage = 0,
2001               origin = None, zscale = 1,
2002               fail_on_NaN = True,
2003               NaN_filler = 0,
2004               elevation = None,
2005               inverted_bathymetry = False
2006               ): #FIXME: Bathymetry should be obtained
2007                                  #from MOST somehow.
2008                                  #Alternatively from elsewhere
2009                                  #or, as a last resort,
2010                                  #specified here.
2011                                  #The value of -100 will work
2012                                  #for the Wollongong tsunami
2013                                  #scenario but is very hacky
2014    """Convert 'Ferret' NetCDF format for wave propagation to
2015    sww format native to pyvolution.
2016
2017    Specify only basename_in and read files of the form
2018    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2019    relative height, x-velocity and y-velocity, respectively.
2020
2021    Also convert latitude and longitude to UTM. All coordinates are
2022    assumed to be given in the GDA94 datum.
2023
2024    min's and max's: If omitted - full extend is used.
2025    To include a value min may equal it, while max must exceed it.
2026    Lat and lon are assuemd to be in decimal degrees
2027
2028    origin is a 3-tuple with geo referenced
2029    UTM coordinates (zone, easting, northing)
2030
2031    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2032    which means that longitude is the fastest
2033    varying dimension (row major order, so to speak)
2034
2035    ferret2sww uses grid points as vertices in a triangular grid
2036    counting vertices from lower left corner upwards, then right
2037    """
2038
2039    import os
2040    from Scientific.IO.NetCDF import NetCDFFile
2041    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2042    from Numeric import allclose, around
2043
2044    precision = Float
2045
2046
2047    #Get NetCDF data
2048    if verbose: print 'Reading files %s_*.nc' %basename_in
2049    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2050    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2051    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2052    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
2053
2054    if basename_out is None:
2055        swwname = basename_in + '.sww'
2056    else:
2057        swwname = basename_out + '.sww'
2058
2059    times = file_h.variables['TIME']
2060    latitudes = file_h.variables['LAT']
2061    longitudes = file_h.variables['LON']
2062
2063
2064
2065    #Precision used by most for lat/lon is 4 or 5 decimals
2066    e_lat = around(file_e.variables['LAT'][:], 5)
2067    e_lon = around(file_e.variables['LON'][:], 5)
2068
2069    #Check that files are compatible
2070    assert allclose(latitudes, file_u.variables['LAT'])
2071    assert allclose(latitudes, file_v.variables['LAT'])
2072    assert allclose(latitudes, e_lat)
2073
2074    assert allclose(longitudes, file_u.variables['LON'])
2075    assert allclose(longitudes, file_v.variables['LON'])
2076    assert allclose(longitudes, e_lon)
2077
2078
2079
2080    if mint == None:
2081        jmin = 0
2082    else:
2083        jmin = searchsorted(times, mint)
2084
2085    if maxt == None:
2086        jmax=len(times)
2087    else:
2088        jmax = searchsorted(times, maxt)
2089
2090    if minlat == None:
2091        kmin=0
2092    else:
2093        kmin = searchsorted(latitudes, minlat)
2094
2095    if maxlat == None:
2096        kmax = len(latitudes)
2097    else:
2098        kmax = searchsorted(latitudes, maxlat)
2099
2100    if minlon == None:
2101        lmin=0
2102    else:
2103        lmin = searchsorted(longitudes, minlon)
2104
2105    if maxlon == None:
2106        lmax = len(longitudes)
2107    else:
2108        lmax = searchsorted(longitudes, maxlon)
2109
2110
2111
2112    times = times[jmin:jmax]
2113    latitudes = latitudes[kmin:kmax]
2114    longitudes = longitudes[lmin:lmax]
2115
2116
2117    if verbose: print 'cropping'
2118    zname = 'ELEVATION'
2119
2120
2121    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2122    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2123    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2124    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2125
2126    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2127    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2128    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2129    #        from Numeric import asarray
2130    #        elevations=elevations.tolist()
2131    #        elevations.reverse()
2132    #        elevations=asarray(elevations)
2133    #    else:
2134    #        from Numeric import asarray
2135    #        elevations=elevations.tolist()
2136    #        elevations.reverse()
2137    #        elevations=asarray(elevations)
2138    #        'print hmmm'
2139
2140
2141
2142    #Get missing values
2143    nan_ha = file_h.variables['HA'].missing_value[0]
2144    nan_ua = file_u.variables['UA'].missing_value[0]
2145    nan_va = file_v.variables['VA'].missing_value[0]
2146    if hasattr(file_e.variables[zname],'missing_value'):
2147        nan_e  = file_e.variables[zname].missing_value[0]
2148    else:
2149        nan_e = None
2150
2151    #Cleanup
2152    from Numeric import sometrue
2153
2154    missing = (amplitudes == nan_ha)
2155    if sometrue (missing):
2156        if fail_on_NaN:
2157            msg = 'NetCDFFile %s contains missing values'\
2158                  %(basename_in+'_ha.nc')
2159            raise msg
2160        else:
2161            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2162
2163    missing = (uspeed == nan_ua)
2164    if sometrue (missing):
2165        if fail_on_NaN:
2166            msg = 'NetCDFFile %s contains missing values'\
2167                  %(basename_in+'_ua.nc')
2168            raise msg
2169        else:
2170            uspeed = uspeed*(missing==0) + missing*NaN_filler
2171
2172    missing = (vspeed == nan_va)
2173    if sometrue (missing):
2174        if fail_on_NaN:
2175            msg = 'NetCDFFile %s contains missing values'\
2176                  %(basename_in+'_va.nc')
2177            raise msg
2178        else:
2179            vspeed = vspeed*(missing==0) + missing*NaN_filler
2180
2181
2182    missing = (elevations == nan_e)
2183    if sometrue (missing):
2184        if fail_on_NaN:
2185            msg = 'NetCDFFile %s contains missing values'\
2186                  %(basename_in+'_e.nc')
2187            raise msg
2188        else:
2189            elevations = elevations*(missing==0) + missing*NaN_filler
2190
2191    #######
2192
2193
2194
2195    number_of_times = times.shape[0]
2196    number_of_latitudes = latitudes.shape[0]
2197    number_of_longitudes = longitudes.shape[0]
2198
2199    assert amplitudes.shape[0] == number_of_times
2200    assert amplitudes.shape[1] == number_of_latitudes
2201    assert amplitudes.shape[2] == number_of_longitudes
2202
2203    if verbose:
2204        print '------------------------------------------------'
2205        print 'Statistics:'
2206        print '  Extent (lat/lon):'
2207        print '    lat in [%f, %f], len(lat) == %d'\
2208              %(min(latitudes.flat), max(latitudes.flat),
2209                len(latitudes.flat))
2210        print '    lon in [%f, %f], len(lon) == %d'\
2211              %(min(longitudes.flat), max(longitudes.flat),
2212                len(longitudes.flat))
2213        print '    t in [%f, %f], len(t) == %d'\
2214              %(min(times.flat), max(times.flat), len(times.flat))
2215
2216        q = amplitudes.flat
2217        name = 'Amplitudes (ha) [cm]'
2218        print %s in [%f, %f]' %(name, min(q), max(q))
2219
2220        q = uspeed.flat
2221        name = 'Speeds (ua) [cm/s]'
2222        print %s in [%f, %f]' %(name, min(q), max(q))
2223
2224        q = vspeed.flat
2225        name = 'Speeds (va) [cm/s]'
2226        print %s in [%f, %f]' %(name, min(q), max(q))
2227
2228        q = elevations.flat
2229        name = 'Elevations (e) [m]'
2230        print %s in [%f, %f]' %(name, min(q), max(q))
2231
2232
2233    #print number_of_latitudes, number_of_longitudes
2234    number_of_points = number_of_latitudes*number_of_longitudes
2235    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2236
2237
2238    file_h.close()
2239    file_u.close()
2240    file_v.close()
2241    file_e.close()
2242
2243
2244    # NetCDF file definition
2245    outfile = NetCDFFile(swwname, 'w')
2246
2247    #Create new file
2248    outfile.institution = 'Geoscience Australia'
2249    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2250                          %(basename_in + '_ha.nc',
2251                            basename_in + '_ua.nc',
2252                            basename_in + '_va.nc',
2253                            basename_in + '_e.nc')
2254
2255
2256    #For sww compatibility
2257    outfile.smoothing = 'Yes'
2258    outfile.order = 1
2259
2260    #Start time in seconds since the epoch (midnight 1/1/1970)
2261    outfile.starttime = starttime = times[0]
2262    times = times - starttime  #Store relative times
2263
2264    # dimension definitions
2265    outfile.createDimension('number_of_volumes', number_of_volumes)
2266
2267    outfile.createDimension('number_of_vertices', 3)
2268    outfile.createDimension('number_of_points', number_of_points)
2269
2270
2271    #outfile.createDimension('number_of_timesteps', len(times))
2272    outfile.createDimension('number_of_timesteps', len(times))
2273
2274    # variable definitions
2275    outfile.createVariable('x', precision, ('number_of_points',))
2276    outfile.createVariable('y', precision, ('number_of_points',))
2277    outfile.createVariable('elevation', precision, ('number_of_points',))
2278
2279    #FIXME: Backwards compatibility
2280    outfile.createVariable('z', precision, ('number_of_points',))
2281    #################################
2282
2283    outfile.createVariable('volumes', Int, ('number_of_volumes',
2284                                            'number_of_vertices'))
2285
2286    outfile.createVariable('time', precision,
2287                           ('number_of_timesteps',))
2288
2289    outfile.createVariable('stage', precision,
2290                           ('number_of_timesteps',
2291                            'number_of_points'))
2292
2293    outfile.createVariable('xmomentum', precision,
2294                           ('number_of_timesteps',
2295                            'number_of_points'))
2296
2297    outfile.createVariable('ymomentum', precision,
2298                           ('number_of_timesteps',
2299                            'number_of_points'))
2300
2301
2302    #Store
2303    from coordinate_transforms.redfearn import redfearn
2304    x = zeros(number_of_points, Float)  #Easting
2305    y = zeros(number_of_points, Float)  #Northing
2306
2307
2308    if verbose: print 'Making triangular grid'
2309    #Check zone boundaries
2310    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2311
2312    vertices = {}
2313    i = 0
2314    for k, lat in enumerate(latitudes):       #Y direction
2315        for l, lon in enumerate(longitudes):  #X direction
2316
2317            vertices[l,k] = i
2318
2319            zone, easting, northing = redfearn(lat,lon)
2320
2321            msg = 'Zone boundary crossed at longitude =', lon
2322            #assert zone == refzone, msg
2323            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2324            x[i] = easting
2325            y[i] = northing
2326            i += 1
2327
2328
2329    #Construct 2 triangles per 'rectangular' element
2330    volumes = []
2331    for l in range(number_of_longitudes-1):    #X direction
2332        for k in range(number_of_latitudes-1): #Y direction
2333            v1 = vertices[l,k+1]
2334            v2 = vertices[l,k]
2335            v3 = vertices[l+1,k+1]
2336            v4 = vertices[l+1,k]
2337
2338            volumes.append([v1,v2,v3]) #Upper element
2339            volumes.append([v4,v3,v2]) #Lower element
2340
2341    volumes = array(volumes)
2342
2343    if origin == None:
2344        zone = refzone
2345        xllcorner = min(x)
2346        yllcorner = min(y)
2347    else:
2348        zone = origin[0]
2349        xllcorner = origin[1]
2350        yllcorner = origin[2]
2351
2352
2353    outfile.xllcorner = xllcorner
2354    outfile.yllcorner = yllcorner
2355    outfile.zone = zone
2356
2357
2358    if elevation is not None:
2359        z = elevation
2360    else:
2361        if inverted_bathymetry:
2362            z = -1*elevations
2363        else:
2364            z = elevations
2365        #FIXME: z should be obtained from MOST and passed in here
2366
2367    from Numeric import resize
2368    z = resize(z,outfile.variables['z'][:].shape)
2369    outfile.variables['x'][:] = x - xllcorner
2370    outfile.variables['y'][:] = y - yllcorner
2371    outfile.variables['z'][:] = z             #FIXME HACK
2372    outfile.variables['elevation'][:] = z
2373    outfile.variables['time'][:] = times   #Store time relative
2374    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2375
2376
2377
2378    #Time stepping
2379    stage = outfile.variables['stage']
2380    xmomentum = outfile.variables['xmomentum']
2381    ymomentum = outfile.variables['ymomentum']
2382
2383    if verbose: print 'Converting quantities'
2384    n = len(times)
2385    for j in range(n):
2386        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2387        i = 0
2388        for k in range(number_of_latitudes):      #Y direction
2389            for l in range(number_of_longitudes): #X direction
2390                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2391                stage[j,i] = w
2392                h = w - z[i]
2393                xmomentum[j,i] = uspeed[j,k,l]/100*h
2394                ymomentum[j,i] = vspeed[j,k,l]/100*h
2395                i += 1
2396
2397    #outfile.close()
2398
2399    #FIXME: Refactor using code from file_function.statistics
2400    #Something like print swwstats(swwname)
2401    if verbose:
2402        x = outfile.variables['x'][:]
2403        y = outfile.variables['y'][:]
2404        times = outfile.variables['time'][:]
2405        print '------------------------------------------------'
2406        print 'Statistics of output file:'
2407        print '  Name: %s' %swwname
2408        print '  Reference:'
2409        print '    Lower left corner: [%f, %f]'\
2410              %(xllcorner, yllcorner)
2411        print '    Start time: %f' %starttime
2412        print '  Extent:'
2413        print '    x [m] in [%f, %f], len(x) == %d'\
2414              %(min(x.flat), max(x.flat), len(x.flat))
2415        print '    y [m] in [%f, %f], len(y) == %d'\
2416              %(min(y.flat), max(y.flat), len(y.flat))
2417        print '    t [s] in [%f, %f], len(t) == %d'\
2418              %(min(times), max(times), len(times))
2419        print '  Quantities [SI units]:'
2420        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2421            q = outfile.variables[name][:].flat
2422            print '    %s in [%f, %f]' %(name, min(q), max(q))
2423
2424
2425
2426    outfile.close()
2427
2428
2429
2430
2431
2432def timefile2netcdf(filename, quantity_names = None):
2433    """Template for converting typical text files with time series to
2434    NetCDF tms file.
2435
2436
2437    The file format is assumed to be either two fields separated by a comma:
2438
2439        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2440
2441    E.g
2442
2443      31/08/04 00:00:00, 1.328223 0 0
2444      31/08/04 00:15:00, 1.292912 0 0
2445
2446    will provide a time dependent function f(t) with three attributes
2447
2448    filename is assumed to be the rootname with extenisons .txt and .sww
2449    """
2450
2451    import time, calendar
2452    from Numeric import array
2453    from config import time_format
2454    from util import ensure_numeric
2455
2456
2457    fid = open(filename + '.txt')
2458    line = fid.readline()
2459    fid.close()
2460
2461    fields = line.split(',')
2462    msg = 'File %s must have the format date, value0 value1 value2 ...'
2463    assert len(fields) == 2, msg
2464
2465    try:
2466        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2467    except ValueError:
2468        msg = 'First field in file %s must be' %filename
2469        msg += ' date-time with format %s.\n' %time_format
2470        msg += 'I got %s instead.' %fields[0]
2471        raise msg
2472
2473
2474    #Split values
2475    values = []
2476    for value in fields[1].split():
2477        values.append(float(value))
2478
2479    q = ensure_numeric(values)
2480
2481    msg = 'ERROR: File must contain at least one independent value'
2482    assert len(q.shape) == 1, msg
2483
2484
2485
2486    #Read times proper
2487    from Numeric import zeros, Float, alltrue
2488    from config import time_format
2489    import time, calendar
2490
2491    fid = open(filename + '.txt')
2492    lines = fid.readlines()
2493    fid.close()
2494
2495    N = len(lines)
2496    d = len(q)
2497
2498    T = zeros(N, Float)       #Time
2499    Q = zeros((N, d), Float)  #Values
2500
2501    for i, line in enumerate(lines):
2502        fields = line.split(',')
2503        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2504
2505        T[i] = realtime - starttime
2506
2507        for j, value in enumerate(fields[1].split()):
2508            Q[i, j] = float(value)
2509
2510    msg = 'File %s must list time as a monotonuosly ' %filename
2511    msg += 'increasing sequence'
2512    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2513
2514
2515    #Create NetCDF file
2516    from Scientific.IO.NetCDF import NetCDFFile
2517
2518    fid = NetCDFFile(filename + '.tms', 'w')
2519
2520
2521    fid.institution = 'Geoscience Australia'
2522    fid.description = 'Time series'
2523
2524
2525    #Reference point
2526    #Start time in seconds since the epoch (midnight 1/1/1970)
2527    #FIXME: Use Georef
2528    fid.starttime = starttime
2529
2530
2531    # dimension definitions
2532    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2533    #fid.createDimension('number_of_vertices', 3)
2534
2535
2536    fid.createDimension('number_of_timesteps', len(T))
2537
2538    fid.createVariable('time', Float, ('number_of_timesteps',))
2539
2540    fid.variables['time'][:] = T
2541
2542    for i in range(Q.shape[1]):
2543        try:
2544            name = quantity_names[i]
2545        except:
2546            name = 'Attribute%d'%i
2547
2548        fid.createVariable(name, Float, ('number_of_timesteps',))
2549        fid.variables[name][:] = Q[:,i]
2550
2551    fid.close()
2552
2553
2554def extent_sww(file_name):
2555    """
2556    Read in an sww file.
2557
2558    Input;
2559    file_name - the sww file
2560
2561    Output;
2562    z - Vector of bed elevation
2563    volumes - Array.  Each row has 3 values, representing
2564    the vertices that define the volume
2565    time - Vector of the times where there is stage information
2566    stage - array with respect to time and vertices (x,y)
2567    """
2568
2569
2570    from Scientific.IO.NetCDF import NetCDFFile
2571
2572    #Check contents
2573    #Get NetCDF
2574    fid = NetCDFFile(file_name, 'r')
2575
2576    # Get the variables
2577    x = fid.variables['x'][:]
2578    y = fid.variables['y'][:]
2579    stage = fid.variables['stage'][:]
2580    #print "stage",stage
2581    #print "stage.shap",stage.shape
2582    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2583    #print "min(stage)",min(stage)
2584
2585    fid.close()
2586
2587    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2588
2589
2590def sww2domain(filename,boundary=None,t=None,\
2591               fail_if_NaN=True,NaN_filler=0\
2592               ,verbose = False,very_verbose = False):
2593    """
2594    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2595
2596    Boundary is not recommended if domain.smooth is not selected, as it
2597    uses unique coordinates, but not unique boundaries. This means that
2598    the boundary file will not be compatable with the coordinates, and will
2599    give a different final boundary, or crash.
2600    """
2601    NaN=9.969209968386869e+036
2602    #initialise NaN.
2603
2604    from Scientific.IO.NetCDF import NetCDFFile
2605    from shallow_water import Domain
2606    from Numeric import asarray, transpose, resize
2607
2608    if verbose: print 'Reading from ', filename
2609    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2610    time = fid.variables['time']       #Timesteps
2611    if t is None:
2612        t = time[-1]
2613    time_interp = get_time_interp(time,t)
2614
2615    # Get the variables as Numeric arrays
2616    x = fid.variables['x'][:]             #x-coordinates of vertices
2617    y = fid.variables['y'][:]             #y-coordinates of vertices
2618    elevation = fid.variables['elevation']     #Elevation
2619    stage = fid.variables['stage']     #Water level
2620    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2621    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2622
2623    starttime = fid.starttime[0]
2624    volumes = fid.variables['volumes'][:] #Connectivity
2625    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2626
2627    conserved_quantities = []
2628    interpolated_quantities = {}
2629    other_quantities = []
2630
2631    # get geo_reference
2632    #sww files don't have to have a geo_ref
2633    try:
2634        geo_reference = Geo_reference(NetCDFObject=fid)
2635    except: #AttributeError, e:
2636        geo_reference = None
2637
2638    if verbose: print '    getting quantities'
2639    for quantity in fid.variables.keys():
2640        dimensions = fid.variables[quantity].dimensions
2641        if 'number_of_timesteps' in dimensions:
2642            conserved_quantities.append(quantity)
2643            interpolated_quantities[quantity]=\
2644                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2645        else: other_quantities.append(quantity)
2646
2647    other_quantities.remove('x')
2648    other_quantities.remove('y')
2649    other_quantities.remove('z')
2650    other_quantities.remove('volumes')
2651
2652    conserved_quantities.remove('time')
2653
2654    if verbose: print '    building domain'
2655    #    From domain.Domain:
2656    #    domain = Domain(coordinates, volumes,\
2657    #                    conserved_quantities = conserved_quantities,\
2658    #                    other_quantities = other_quantities,zone=zone,\
2659    #                    xllcorner=xllcorner, yllcorner=yllcorner)
2660
2661    #   From shallow_water.Domain:
2662    coordinates=coordinates.tolist()
2663    volumes=volumes.tolist()
2664    #FIXME:should this be in mesh?(peter row)
2665    if fid.smoothing == 'Yes': unique = False
2666    else: unique = True
2667    if unique:
2668        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2669
2670
2671    domain = Domain(coordinates, volumes, boundary)
2672
2673    if not boundary is None:
2674        domain.boundary = boundary
2675
2676    domain.geo_reference = geo_reference
2677
2678    domain.starttime=float(starttime)+float(t)
2679    domain.time=0.0
2680
2681    for quantity in other_quantities:
2682        try:
2683            NaN = fid.variables[quantity].missing_value
2684        except:
2685            pass #quantity has no missing_value number
2686        X = fid.variables[quantity][:]
2687        if very_verbose:
2688            print '       ',quantity
2689            print '        NaN =',NaN
2690            print '        max(X)'
2691            print '       ',max(X)
2692            print '        max(X)==NaN'
2693            print '       ',max(X)==NaN
2694            print ''
2695        if (max(X)==NaN) or (min(X)==NaN):
2696            if fail_if_NaN:
2697                msg = 'quantity "%s" contains no_data entry'%quantity
2698                raise msg
2699            else:
2700                data = (X<>NaN)
2701                X = (X*data)+(data==0)*NaN_filler
2702        if unique:
2703            X = resize(X,(len(X)/3,3))
2704        domain.set_quantity(quantity,X)
2705    #
2706    for quantity in conserved_quantities:
2707        try:
2708            NaN = fid.variables[quantity].missing_value
2709        except:
2710            pass #quantity has no missing_value number
2711        X = interpolated_quantities[quantity]
2712        if very_verbose:
2713            print '       ',quantity
2714            print '        NaN =',NaN
2715            print '        max(X)'
2716            print '       ',max(X)
2717            print '        max(X)==NaN'
2718            print '       ',max(X)==NaN
2719            print ''
2720        if (max(X)==NaN) or (min(X)==NaN):
2721            if fail_if_NaN:
2722                msg = 'quantity "%s" contains no_data entry'%quantity
2723                raise msg
2724            else:
2725                data = (X<>NaN)
2726                X = (X*data)+(data==0)*NaN_filler
2727        if unique:
2728            X = resize(X,(X.shape[0]/3,3))
2729        domain.set_quantity(quantity,X)
2730    fid.close()
2731    return domain
2732
2733def interpolated_quantity(saved_quantity,time_interp):
2734
2735    #given an index and ratio, interpolate quantity with respect to time.
2736    index,ratio = time_interp
2737    Q = saved_quantity
2738    if ratio > 0:
2739        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2740    else:
2741        q = Q[index]
2742    #Return vector of interpolated values
2743    return q
2744
2745def get_time_interp(time,t=None):
2746    #Finds the ratio and index for time interpolation.
2747    #It is borrowed from previous pyvolution code.
2748    if t is None:
2749        t=time[-1]
2750        index = -1
2751        ratio = 0.
2752    else:
2753        T = time
2754        tau = t
2755        index=0
2756        msg = 'Time interval derived from file %s [%s:%s]'\
2757            %('FIXMEfilename', T[0], T[-1])
2758        msg += ' does not match model time: %s' %tau
2759        if tau < time[0]: raise msg
2760        if tau > time[-1]: raise msg
2761        while tau > time[index]: index += 1
2762        while tau < time[index]: index -= 1
2763        if tau == time[index]:
2764            #Protect against case where tau == time[-1] (last time)
2765            # - also works in general when tau == time[i]
2766            ratio = 0
2767        else:
2768            #t is now between index and index+1
2769            ratio = (tau - time[index])/(time[index+1] - time[index])
2770    return (index,ratio)
2771
2772
2773def weed(coordinates,volumes,boundary = None):
2774    if type(coordinates)=='array':
2775        coordinates = coordinates.tolist()
2776    if type(volumes)=='array':
2777        volumes = volumes.tolist()
2778
2779    unique = False
2780    point_dict = {}
2781    same_point = {}
2782    for i in range(len(coordinates)):
2783        point = tuple(coordinates[i])
2784        if point_dict.has_key(point):
2785            unique = True
2786            same_point[i]=point
2787            #to change all point i references to point j
2788        else:
2789            point_dict[point]=i
2790            same_point[i]=point
2791
2792    coordinates = []
2793    i = 0
2794    for point in point_dict.keys():
2795        point = tuple(point)
2796        coordinates.append(list(point))
2797        point_dict[point]=i
2798        i+=1
2799
2800
2801    for volume in volumes:
2802        for i in range(len(volume)):
2803            index = volume[i]
2804            if index>-1:
2805                volume[i]=point_dict[same_point[index]]
2806
2807    new_boundary = {}
2808    if not boundary is None:
2809        for segment in boundary.keys():
2810            point0 = point_dict[same_point[segment[0]]]
2811            point1 = point_dict[same_point[segment[1]]]
2812            label = boundary[segment]
2813            #FIXME should the bounday attributes be concaterated
2814            #('exterior, pond') or replaced ('pond')(peter row)
2815
2816            if new_boundary.has_key((point0,point1)):
2817                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2818                                              #+','+label
2819
2820            elif new_boundary.has_key((point1,point0)):
2821                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2822                                              #+','+label
2823            else: new_boundary[(point0,point1)]=label
2824
2825        boundary = new_boundary
2826
2827    return coordinates,volumes,boundary
2828
2829
2830
2831
2832
2833
2834
2835
2836
2837
2838###############################################
2839#OBSOLETE STUFF
2840#Native checkpoint format.
2841#Information needed to recreate a state is preserved
2842#FIXME: Rethink and maybe use netcdf format
2843def cpt_variable_writer(filename, t, v0, v1, v2):
2844    """Store all conserved quantities to file
2845    """
2846
2847    M, N = v0.shape
2848
2849    FN = create_filename(filename, 'cpt', M, t)
2850    #print 'Writing to %s' %FN
2851
2852    fid = open(FN, 'w')
2853    for i in range(M):
2854        for j in range(N):
2855            fid.write('%.16e ' %v0[i,j])
2856        for j in range(N):
2857            fid.write('%.16e ' %v1[i,j])
2858        for j in range(N):
2859            fid.write('%.16e ' %v2[i,j])
2860
2861        fid.write('\n')
2862    fid.close()
2863
2864
2865def cpt_variable_reader(filename, t, v0, v1, v2):
2866    """Store all conserved quantities to file
2867    """
2868
2869    M, N = v0.shape
2870
2871    FN = create_filename(filename, 'cpt', M, t)
2872    #print 'Reading from %s' %FN
2873
2874    fid = open(FN)
2875
2876
2877    for i in range(M):
2878        values = fid.readline().split() #Get one line
2879
2880        for j in range(N):
2881            v0[i,j] = float(values[j])
2882            v1[i,j] = float(values[3+j])
2883            v2[i,j] = float(values[6+j])
2884
2885    fid.close()
2886
2887def cpt_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2888    """Writes x,y,z,z,z coordinates of triangles constituting the bed
2889    elevation.
2890    FIXME: Not in use pt
2891    """
2892
2893    M, N = v0.shape
2894
2895
2896    print X0
2897    import sys; sys.exit()
2898    FN = create_filename(filename, 'cpt', M)
2899    print 'Writing to %s' %FN
2900
2901    fid = open(FN, 'w')
2902    for i in range(M):
2903        for j in range(2):
2904            fid.write('%.16e ' %X0[i,j])   #x, y
2905        for j in range(N):
2906            fid.write('%.16e ' %v0[i,j])       #z,z,z,
2907
2908        for j in range(2):
2909            fid.write('%.16e ' %X1[i,j])   #x, y
2910        for j in range(N):
2911            fid.write('%.16e ' %v1[i,j])
2912
2913        for j in range(2):
2914            fid.write('%.16e ' %X2[i,j])   #x, y
2915        for j in range(N):
2916            fid.write('%.16e ' %v2[i,j])
2917
2918        fid.write('\n')
2919    fid.close()
2920
2921
2922
2923#Function for storing out to e.g. visualisation
2924#FIXME: Do we want this?
2925#FIXME: Not done yet for this version
2926def dat_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2927    """Writes x,y,z coordinates of triangles constituting the bed elevation.
2928    """
2929
2930    M, N = v0.shape
2931
2932    FN = create_filename(filename, 'dat', M)
2933    #print 'Writing to %s' %FN
2934
2935    fid = open(FN, 'w')
2936    for i in range(M):
2937        for j in range(2):
2938            fid.write('%f ' %X0[i,j])   #x, y
2939        fid.write('%f ' %v0[i,0])       #z
2940
2941        for j in range(2):
2942            fid.write('%f ' %X1[i,j])   #x, y
2943        fid.write('%f ' %v1[i,0])       #z
2944
2945        for j in range(2):
2946            fid.write('%f ' %X2[i,j])   #x, y
2947        fid.write('%f ' %v2[i,0])       #z
2948
2949        fid.write('\n')
2950    fid.close()
2951
2952
2953
2954def dat_variable_writer(filename, t, v0, v1, v2):
2955    """Store water height to file
2956    """
2957
2958    M, N = v0.shape
2959
2960    FN = create_filename(filename, 'dat', M, t)
2961    #print 'Writing to %s' %FN
2962
2963    fid = open(FN, 'w')
2964    for i in range(M):
2965        fid.write('%.4f ' %v0[i,0])
2966        fid.write('%.4f ' %v1[i,0])
2967        fid.write('%.4f ' %v2[i,0])
2968
2969        fid.write('\n')
2970    fid.close()
2971
2972
2973def read_sww(filename):
2974    """Read sww Net CDF file containing Shallow Water Wave simulation
2975
2976    The integer array volumes is of shape Nx3 where N is the number of
2977    triangles in the mesh.
2978
2979    Each entry in volumes is an index into the x,y arrays (the location).
2980
2981    Quantities stage, elevation, xmomentum and ymomentum are all in arrays of dimensions
2982    number_of_timesteps, number_of_points.
2983
2984    The momentum is not always stored.
2985
2986    """
2987    from Scientific.IO.NetCDF import NetCDFFile
2988    print 'Reading from ', filename
2989    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2990#latitude, longitude
2991    # Get the variables as Numeric arrays
2992    x = fid.variables['x']             #x-coordinates of vertices
2993    y = fid.variables['y']             #y-coordinates of vertices
2994    z = fid.variables['elevation']     #Elevation
2995    time = fid.variables['time']       #Timesteps
2996    stage = fid.variables['stage']     #Water level
2997    #xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2998    #ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2999
3000    volumes = fid.variables['volumes'] #Connectivity
3001
3002
3003def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3004                 verbose=False):
3005    """Read Digitial Elevation model from the following NetCDF format (.dem)
3006
3007    Example:
3008
3009    ncols         3121
3010    nrows         1800
3011    xllcorner     722000
3012    yllcorner     5893000
3013    cellsize      25
3014    NODATA_value  -9999
3015    138.3698 137.4194 136.5062 135.5558 ..........
3016
3017    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3018    """
3019
3020    import os
3021    from Scientific.IO.NetCDF import NetCDFFile
3022    from Numeric import Float, zeros, sum, reshape, equal
3023
3024    root = basename_in
3025    inname = root + '.dem'
3026
3027    #Open existing netcdf file to read
3028    infile = NetCDFFile(inname, 'r')
3029    if verbose: print 'Reading DEM from %s' %inname
3030
3031    #Read metadata
3032    ncols = infile.ncols[0]
3033    nrows = infile.nrows[0]
3034    xllcorner = infile.xllcorner[0]
3035    yllcorner = infile.yllcorner[0]
3036    cellsize = infile.cellsize[0]
3037    NODATA_value = infile.NODATA_value[0]
3038    zone = infile.zone[0]
3039    false_easting = infile.false_easting[0]
3040    false_northing = infile.false_northing[0]
3041    projection = infile.projection
3042    datum = infile.datum
3043    units = infile.units
3044
3045    dem_elevation = infile.variables['elevation']
3046
3047    #Get output file name
3048    if basename_out == None:
3049        outname = root + '_' + repr(cellsize_new) + '.dem'
3050    else:
3051        outname = basename_out + '.dem'
3052
3053    if verbose: print 'Write decimated NetCDF file to %s' %outname
3054
3055    #Determine some dimensions for decimated grid
3056    (nrows_stencil, ncols_stencil) = stencil.shape
3057    x_offset = ncols_stencil / 2
3058    y_offset = nrows_stencil / 2
3059    cellsize_ratio = int(cellsize_new / cellsize)
3060    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3061    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3062
3063    #Open netcdf file for output
3064    outfile = NetCDFFile(outname, 'w')
3065
3066    #Create new file
3067    outfile.institution = 'Geoscience Australia'
3068    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3069                           'of spatial point data'
3070    #Georeferencing
3071    outfile.zone = zone
3072    outfile.projection = projection
3073    outfile.datum = datum
3074    outfile.units = units
3075
3076    outfile.cellsize = cellsize_new
3077    outfile.NODATA_value = NODATA_value
3078    outfile.false_easting = false_easting
3079    outfile.false_northing = false_northing
3080
3081    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3082    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3083    outfile.ncols = ncols_new
3084    outfile.nrows = nrows_new
3085
3086    # dimension definition
3087    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3088
3089    # variable definition
3090    outfile.createVariable('elevation', Float, ('number_of_points',))
3091
3092    # Get handle to the variable
3093    elevation = outfile.variables['elevation']
3094
3095    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3096
3097    #Store data
3098    global_index = 0
3099    for i in range(nrows_new):
3100        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3101        lower_index = global_index
3102        telev =  zeros(ncols_new, Float)
3103        local_index = 0
3104        trow = i * cellsize_ratio
3105
3106        for j in range(ncols_new):
3107            tcol = j * cellsize_ratio
3108            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3109
3110            #if dem contains 1 or more NODATA_values set value in
3111            #decimated dem to NODATA_value, else compute decimated
3112            #value using stencil
3113            if sum(sum(equal(tmp, NODATA_value))) > 0:
3114                telev[local_index] = NODATA_value
3115            else:
3116                telev[local_index] = sum(sum(tmp * stencil))
3117
3118            global_index += 1
3119            local_index += 1
3120
3121        upper_index = global_index
3122
3123        elevation[lower_index:upper_index] = telev
3124
3125    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3126
3127    infile.close()
3128    outfile.close()
3129
3130
3131
3132def sww2asc_obsolete(basename_in, basename_out = None,
3133            quantity = None,
3134            timestep = None,
3135            reduction = None,
3136            cellsize = 10,
3137            verbose = False,
3138            origin = None):
3139    """Read SWW file and convert to Digitial Elevation model format (.asc)
3140
3141    Example:
3142
3143    ncols         3121
3144    nrows         1800
3145    xllcorner     722000
3146    yllcorner     5893000
3147    cellsize      25
3148    NODATA_value  -9999
3149    138.3698 137.4194 136.5062 135.5558 ..........
3150
3151    Also write accompanying file with same basename_in but extension .prj
3152    used to fix the UTM zone, datum, false northings and eastings.
3153
3154    The prj format is assumed to be as
3155
3156    Projection    UTM
3157    Zone          56
3158    Datum         WGS84
3159    Zunits        NO
3160    Units         METERS
3161    Spheroid      WGS84
3162    Xshift        0.0000000000
3163    Yshift        10000000.0000000000
3164    Parameters
3165
3166
3167    if quantity is given, out values from quantity otherwise default to
3168    elevation
3169
3170    if timestep (an index) is given, output quantity at that timestep
3171
3172    if reduction is given use that to reduce quantity over all timesteps.
3173
3174    """
3175    from Numeric import array, Float, concatenate, NewAxis, zeros,\
3176         sometrue
3177    from utilities.polygon import inside_polygon
3178
3179    #FIXME: Should be variable
3180    datum = 'WGS84'
3181    false_easting = 500000
3182    false_northing = 10000000
3183
3184    if quantity is None:
3185        quantity = 'elevation'
3186
3187    if reduction is None:
3188        reduction = max
3189
3190    if basename_out is None:
3191        basename_out = basename_in + '_%s' %quantity
3192
3193    swwfile = basename_in + '.sww'
3194    ascfile = basename_out + '.asc'
3195    prjfile = basename_out + '.prj'
3196
3197
3198    if verbose: print 'Reading from %s' %swwfile
3199    #Read sww file
3200    from Scientific.IO.NetCDF import NetCDFFile
3201    fid = NetCDFFile(swwfile)
3202
3203    #Get extent and reference
3204    x = fid.variables['x'][:]
3205    y = fid.variables['y'][:]
3206    volumes = fid.variables['volumes'][:]
3207
3208    ymin = min(y); ymax = max(y)
3209    xmin = min(x); xmax = max(x)
3210
3211    number_of_timesteps = fid.dimensions['number_of_timesteps']
3212    number_of_points = fid.dimensions['number_of_points']
3213    if origin is None:
3214
3215        #Get geo_reference
3216        #sww files don't have to have a geo_ref
3217        try:
3218            geo_reference = Geo_reference(NetCDFObject=fid)
3219        except AttributeError, e:
3220            geo_reference = Geo_reference() #Default georef object
3221
3222        xllcorner = geo_reference.get_xllcorner()
3223        yllcorner = geo_reference.get_yllcorner()
3224        zone = geo_reference.get_zone()
3225    else:
3226        zone = origin[0]
3227        xllcorner = origin[1]
3228        yllcorner = origin[2]
3229
3230
3231    #Get quantity and reduce if applicable
3232    if verbose: print 'Reading quantity %s' %quantity
3233
3234    if quantity.lower() == 'depth':
3235        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
3236    else:
3237        q = fid.variables[quantity][:]
3238
3239
3240    if len(q.shape) == 2:
3241        if verbose: print 'Reducing quantity %s' %quantity
3242        q_reduced = zeros( number_of_points, Float )
3243
3244        for k in range(number_of_points):
3245            q_reduced[k] = reduction( q[:,k] )
3246
3247        q = q_reduced
3248
3249    #Now q has dimension: number_of_points
3250
3251    #Create grid and update xll/yll corner and x,y
3252    if verbose: print 'Creating grid'
3253    ncols = int((xmax-xmin)/cellsize)+1
3254    nrows = int((ymax-ymin)/cellsize)+1
3255
3256    newxllcorner = xmin+xllcorner
3257    newyllcorner = ymin+yllcorner
3258
3259    x = x+xllcorner-newxllcorner
3260    y = y+yllcorner-newyllcorner
3261
3262    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
3263    assert len(vertex_points.shape) == 2
3264
3265
3266    from Numeric import zeros, Float
3267    grid_points = zeros ( (ncols*nrows, 2), Float )
3268
3269
3270    for i in xrange(nrows):
3271        yg = i*cellsize
3272        for j in xrange(ncols):
3273            xg = j*cellsize
3274            k = i*ncols + j
3275
3276            grid_points[k,0] = xg
3277            grid_points[k,1] = yg
3278
3279    #Interpolate
3280    from least_squares import Interpolation
3281
3282
3283    #FIXME: This should be done with precrop = True, otherwise it'll
3284    #take forever. With expand_search  set to False, some grid points might
3285    #miss out....
3286
3287    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
3288                           precrop = False, expand_search = True,
3289                           verbose = verbose)
3290
3291    #Interpolate using quantity values
3292    if verbose: print 'Interpolating'
3293    grid_values = interp.interpolate(q).flat
3294
3295    #Write
3296    #Write prj file
3297    if verbose: print 'Writing %s' %prjfile
3298    prjid = open(prjfile, 'w')
3299    prjid.write('Projection    %s\n' %'UTM')
3300    prjid.write('Zone          %d\n' %zone)
3301    prjid.write('Datum         %s\n' %datum)
3302    prjid.write('Zunits        NO\n')
3303    prjid.write('Units         METERS\n')
3304    prjid.write('Spheroid      %s\n' %datum)
3305    prjid.write('Xshift        %d\n' %false_easting)
3306    prjid.write('Yshift        %d\n' %false_northing)
3307    prjid.write('Parameters\n')
3308    prjid.close()
3309
3310
3311
3312    if verbose: print 'Writing %s' %ascfile
3313    NODATA_value = -9999
3314
3315    ascid = open(ascfile, 'w')
3316
3317    ascid.write('ncols         %d\n' %ncols)
3318    ascid.write('nrows         %d\n' %nrows)
3319    ascid.write('xllcorner     %d\n' %newxllcorner)
3320    ascid.write('yllcorner     %d\n' %newyllcorner)
3321    ascid.write('cellsize      %f\n' %cellsize)
3322    ascid.write('NODATA_value  %d\n' %NODATA_value)
3323
3324
3325    #Get bounding polygon from mesh
3326    P = interp.mesh.get_boundary_polygon()
3327    inside_indices = inside_polygon(grid_points, P)
3328
3329    for i in range(nrows):
3330        if verbose and i%((nrows+10)/10)==0:
3331            print 'Doing row %d of %d' %(i, nrows)
3332
3333        for j in range(ncols):
3334            index = (nrows-i-1)*ncols+j
3335
3336            if sometrue(inside_indices == index):
3337                ascid.write('%f ' %grid_values[index])
3338            else:
3339                ascid.write('%d ' %NODATA_value)
3340
3341        ascid.write('\n')
3342
3343    #Close
3344    ascid.close()
3345    fid.close()
3346
3347#********************
3348#*** END OF OBSOLETE FUNCTIONS
3349#***************
3350
3351
3352def tsh2sww(filename, verbose=False): #test_tsh2sww
3353    """
3354    to check if a tsh/msh file 'looks' good.
3355    """
3356
3357    #FIXME Move to data_manager
3358    from shallow_water import Domain
3359    from pmesh2domain import pmesh_to_domain_instance
3360    import time, os
3361    from data_manager import get_dataobject
3362    from os import sep, path
3363    from util import mean
3364
3365    if verbose == True:print 'Creating domain from', filename
3366    domain = pmesh_to_domain_instance(filename, Domain)
3367    if verbose == True:print "Number of triangles = ", len(domain)
3368
3369    domain.smooth = True
3370    domain.format = 'sww'   #Native netcdf visualisation format
3371    file_path, filename = path.split(filename)
3372    filename, ext = path.splitext(filename)
3373    domain.filename = filename
3374    domain.reduction = mean
3375    if verbose == True:print "file_path",file_path
3376    if file_path == "":file_path = "."
3377    domain.set_datadir(file_path)
3378
3379    if verbose == True:
3380        print "Output written to " + domain.get_datadir() + sep + \
3381              domain.filename + "." + domain.format
3382    sww = get_dataobject(domain)
3383    sww.store_connectivity()
3384    sww.store_timestep('stage')
3385
3386
3387def asc_csiro2sww(bath_dir,
3388                  elevation_dir,
3389                  ucur_dir,
3390                  vcur_dir,
3391                  sww_file,
3392                  minlat = None, maxlat = None,
3393                  minlon = None, maxlon = None,
3394                  zscale=1,
3395                  mean_stage = 0,
3396                  fail_on_NaN = True,
3397                  elevation_NaN_filler = 0,
3398                  bath_prefix='ba',
3399                  elevation_prefix='el',
3400                  verbose=False):
3401    """
3402    Produce an sww boundary file, from esri ascii data from CSIRO.
3403
3404    Also convert latitude and longitude to UTM. All coordinates are
3405    assumed to be given in the GDA94 datum.
3406
3407    assume:
3408    All files are in esri ascii format
3409
3410    4 types of information
3411    bathymetry
3412    elevation
3413    u velocity
3414    v velocity
3415
3416    Assumptions
3417    The metadata of all the files is the same
3418    Each type is in a seperate directory
3419    One bath file with extention .000
3420    The time period is less than 24hrs and uniform.
3421    """
3422    from Scientific.IO.NetCDF import NetCDFFile
3423
3424    from coordinate_transforms.redfearn import redfearn
3425
3426    precision = Float # So if we want to change the precision its done here
3427
3428    # go in to the bath dir and load the only file,
3429    bath_files = os.listdir(bath_dir)
3430    #print "bath_files",bath_files
3431
3432    #fixme: make more general?
3433    bath_file = bath_files[0]
3434    bath_dir_file =  bath_dir + os.sep + bath_file
3435    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3436    #print "bath_metadata",bath_metadata
3437    #print "bath_grid",bath_grid
3438
3439    #Use the date.time of the bath file as a basis for
3440    #the start time for other files
3441    base_start = bath_file[-12:]
3442
3443    #go into the elevation dir and load the 000 file
3444    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3445                         + base_start
3446    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3447    #print "elevation_dir_file",elevation_dir_file
3448    #print "elevation_grid", elevation_grid
3449
3450    elevation_files = os.listdir(elevation_dir)
3451    ucur_files = os.listdir(ucur_dir)
3452    vcur_files = os.listdir(vcur_dir)
3453
3454    # the first elevation file should be the
3455    # file with the same base name as the bath data
3456    #print "elevation_files[0]",elevation_files[0]
3457    #print "'el' + base_start",'el' + base_start
3458    assert elevation_files[0] == 'el' + base_start
3459
3460    #assert bath_metadata == elevation_metadata
3461
3462
3463
3464    number_of_latitudes = bath_grid.shape[0]
3465    number_of_longitudes = bath_grid.shape[1]
3466    #number_of_times = len(os.listdir(elevation_dir))
3467    #number_of_points = number_of_latitudes*number_of_longitudes
3468    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3469
3470    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3471                  for x in range(number_of_longitudes)]
3472    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3473                 for y in range(number_of_latitudes)]
3474
3475     # reverse order of lat, so the fist lat represents the first grid row
3476    latitudes.reverse()
3477
3478    #print "latitudes - before _get_min_max_indexes",latitudes
3479    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3480                                                 minlat=minlat, maxlat=maxlat,
3481                                                 minlon=minlon, maxlon=maxlon)
3482
3483
3484    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3485    #print "bath_grid",bath_grid
3486    latitudes = latitudes[kmin:kmax]
3487    longitudes = longitudes[lmin:lmax]
3488    number_of_latitudes = len(latitudes)
3489    number_of_longitudes = len(longitudes)
3490    number_of_times = len(os.listdir(elevation_dir))
3491    number_of_points = number_of_latitudes*number_of_longitudes
3492    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3493    #print "number_of_points",number_of_points
3494
3495    #Work out the times
3496    if len(elevation_files) > 1:
3497        # Assume: The time period is less than 24hrs.
3498        time_period = (int(elevation_files[1][-3:]) - \
3499                      int(elevation_files[0][-3:]))*60*60
3500        times = [x*time_period for x in range(len(elevation_files))]
3501    else:
3502        times = [0.0]
3503    #print "times", times
3504    #print "number_of_latitudes", number_of_latitudes
3505    #print "number_of_longitudes", number_of_longitudes
3506    #print "number_of_times", number_of_times
3507    #print "latitudes", latitudes
3508    #print "longitudes", longitudes
3509
3510
3511    if verbose:
3512        print '------------------------------------------------'
3513        print 'Statistics:'
3514        print '  Extent (lat/lon):'
3515        print '    lat in [%f, %f], len(lat) == %d'\
3516              %(min(latitudes), max(latitudes),
3517                len(latitudes))
3518        print '    lon in [%f, %f], len(lon) == %d'\
3519              %(min(longitudes), max(longitudes),
3520                len(longitudes))
3521        print '    t in [%f, %f], len(t) == %d'\
3522              %(min(times), max(times), len(times))
3523
3524    ######### WRITE THE SWW FILE #############
3525    # NetCDF file definition
3526    outfile = NetCDFFile(sww_file, 'w')
3527
3528    #Create new file
3529    outfile.institution = 'Geoscience Australia'
3530    outfile.description = 'Converted from XXX'
3531
3532
3533    #For sww compatibility
3534    outfile.smoothing = 'Yes'
3535    outfile.order = 1
3536
3537    #Start time in seconds since the epoch (midnight 1/1/1970)
3538    outfile.starttime = starttime = times[0]
3539
3540
3541    # dimension definitions
3542    outfile.createDimension('number_of_volumes', number_of_volumes)
3543
3544    outfile.createDimension('number_of_vertices', 3)
3545    outfile.createDimension('number_of_points', number_of_points)
3546    outfile.createDimension('number_of_timesteps', number_of_times)
3547
3548    # variable definitions
3549    outfile.createVariable('x', precision, ('number_of_points',))
3550    outfile.createVariable('y', precision, ('number_of_points',))
3551    outfile.createVariable('elevation', precision, ('number_of_points',))
3552
3553    #FIXME: Backwards compatibility
3554    outfile.createVariable('z', precision, ('number_of_points',))
3555    #################################
3556
3557    outfile.createVariable('volumes', Int, ('number_of_volumes',
3558                                            'number_of_vertices'))
3559
3560    outfile.createVariable('time', precision,
3561                           ('number_of_timesteps',))
3562
3563    outfile.createVariable('stage', precision,
3564                           ('number_of_timesteps',
3565                            'number_of_points'))
3566
3567    outfile.createVariable('xmomentum', precision,
3568                           ('number_of_timesteps',
3569                            'number_of_points'))
3570
3571    outfile.createVariable('ymomentum', precision,
3572                           ('number_of_timesteps',
3573                            'number_of_points'))
3574
3575    #Store
3576    from coordinate_transforms.redfearn import redfearn
3577    x = zeros(number_of_points, Float)  #Easting
3578    y = zeros(number_of_points, Float)  #Northing
3579
3580    if verbose: print 'Making triangular grid'
3581    #Get zone of 1st point.
3582    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3583
3584    vertices = {}
3585    i = 0
3586    for k, lat in enumerate(latitudes):
3587        for l, lon in enumerate(longitudes):
3588
3589            vertices[l,k] = i
3590
3591            zone, easting, northing = redfearn(lat,lon)
3592
3593            msg = 'Zone boundary crossed at longitude =', lon
3594            #assert zone == refzone, msg
3595            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3596            x[i] = easting
3597            y[i] = northing
3598            i += 1
3599
3600
3601    #Construct 2 triangles per 'rectangular' element
3602    volumes = []
3603    for l in range(number_of_longitudes-1):    #X direction
3604        for k in range(number_of_latitudes-1): #Y direction
3605            v1 = vertices[l,k+1]
3606            v2 = vertices[l,k]
3607            v3 = vertices[l+1,k+1]
3608            v4 = vertices[l+1,k]
3609
3610            #Note, this is different to the ferrit2sww code
3611            #since the order of the lats is reversed.
3612            volumes.append([v1,v3,v2]) #Upper element
3613            volumes.append([v4,v2,v3]) #Lower element
3614
3615    volumes = array(volumes)
3616
3617    geo_ref = Geo_reference(refzone,min(x),min(y))
3618    geo_ref.write_NetCDF(outfile)
3619
3620    # This will put the geo ref in the middle
3621    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3622
3623
3624    if verbose:
3625        print '------------------------------------------------'
3626        print 'More Statistics:'
3627        print '  Extent (/lon):'
3628        print '    x in [%f, %f], len(lat) == %d'\
3629              %(min(x), max(x),
3630                len(x))
3631        print '    y in [%f, %f], len(lon) == %d'\
3632              %(min(y), max(y),
3633                len(y))
3634        print 'geo_ref: ',geo_ref
3635
3636    z = resize(bath_grid,outfile.variables['z'][:].shape)
3637    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3638    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3639    outfile.variables['z'][:] = z
3640    outfile.variables['elevation'][:] = z  #FIXME HACK
3641    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3642
3643    # do this to create an ok sww file.
3644    #outfile.variables['time'][:] = [0]   #Store time relative
3645    #outfile.variables['stage'] = z
3646    # put the next line up in the code after outfile.order = 1
3647    #number_of_times = 1
3648
3649    stage = outfile.variables['stage']
3650    xmomentum = outfile.variables['xmomentum']
3651    ymomentum = outfile.variables['ymomentum']
3652
3653    outfile.variables['time'][:] = times   #Store time relative
3654
3655    if verbose: print 'Converting quantities'
3656    n = number_of_times
3657    for j in range(number_of_times):
3658        # load in files
3659        elevation_meta, elevation_grid = \
3660           _read_asc(elevation_dir + os.sep + elevation_files[j])
3661
3662        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3663        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3664
3665        #print "elevation_grid",elevation_grid
3666        #cut matrix to desired size
3667        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3668        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3669        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3670        #print "elevation_grid",elevation_grid
3671        # handle missing values
3672        missing = (elevation_grid == elevation_meta['NODATA_value'])
3673        if sometrue (missing):
3674            if fail_on_NaN:
3675                msg = 'File %s contains missing values'\
3676                      %(elevation_files[j])
3677                raise msg
3678            else:
3679                elevation_grid = elevation_grid*(missing==0) + \
3680                                 missing*elevation_NaN_filler
3681
3682
3683        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3684        i = 0
3685        for k in range(number_of_latitudes):      #Y direction
3686            for l in range(number_of_longitudes): #X direction
3687                w = zscale*elevation_grid[k,l] + mean_stage
3688                stage[j,i] = w
3689                h = w - z[i]
3690                xmomentum[j,i] = u_momentum_grid[k,l]*h
3691                ymomentum[j,i] = v_momentum_grid[k,l]*h
3692                i += 1
3693    outfile.close()
3694
3695def _get_min_max_indexes(latitudes,longitudes,
3696                        minlat=None, maxlat=None,
3697                        minlon=None, maxlon=None):
3698    """
3699    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3700    specified with min/max lat/long
3701
3702    Think of the latitudes and longitudes describing a 2d surface.
3703    The area returned is, if possible, just big enough to cover the
3704    inputed max/min area. (This will not be possible if the max/min area
3705    has a section outside of the latitudes/longitudes area.)
3706
3707    assume latitudess & longitudes are sorted,
3708    long - from low to high (west to east, eg 148 - 151)
3709    lat - from high to low (north to south, eg -35 - -38)
3710    """
3711
3712    # reverse order of lat, so it's in ascending order
3713    latitudes.reverse()
3714    largest_lat_index = len(latitudes)-1
3715    #Cut out a smaller extent.
3716    if minlat == None:
3717        lat_min_index = 0
3718    else:
3719        lat_min_index = searchsorted(latitudes, minlat)-1
3720        if lat_min_index <0:
3721            lat_min_index = 0
3722
3723
3724    if maxlat == None:
3725        lat_max_index = largest_lat_index #len(latitudes)
3726    else:
3727        lat_max_index = searchsorted(latitudes, maxlat)
3728        if lat_max_index > largest_lat_index:
3729            lat_max_index = largest_lat_index
3730
3731    if minlon == None:
3732        lon_min_index = 0
3733    else:
3734        lon_min_index = searchsorted(longitudes, minlon)-1
3735        if lon_min_index <0:
3736            lon_min_index = 0
3737
3738    if maxlon == None:
3739        lon_max_index = len(longitudes)
3740    else:
3741        lon_max_index = searchsorted(longitudes, maxlon)
3742
3743    #Take into account that the latitude list was reversed
3744    latitudes.reverse() # Python passes by reference, need to swap back
3745    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3746                                   largest_lat_index - lat_min_index
3747    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3748    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3749
3750    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3751
3752
3753def _read_asc(filename, verbose=False):
3754    """Read esri file from the following ASCII format (.asc)
3755
3756    Example:
3757
3758    ncols         3121
3759    nrows         1800
3760    xllcorner     722000
3761    yllcorner     5893000
3762    cellsize      25
3763    NODATA_value  -9999
3764    138.3698 137.4194 136.5062 135.5558 ..........
3765
3766    """
3767
3768    datafile = open(filename)
3769
3770    if verbose: print 'Reading DEM from %s' %(filename)
3771    lines = datafile.readlines()
3772    datafile.close()
3773
3774    if verbose: print 'Got', len(lines), ' lines'
3775
3776    ncols = int(lines.pop(0).split()[1].strip())
3777    nrows = int(lines.pop(0).split()[1].strip())
3778    xllcorner = float(lines.pop(0).split()[1].strip())
3779    yllcorner = float(lines.pop(0).split()[1].strip())
3780    cellsize = float(lines.pop(0).split()[1].strip())
3781    NODATA_value = float(lines.pop(0).split()[1].strip())
3782
3783    assert len(lines) == nrows
3784
3785    #Store data
3786    grid = []
3787
3788    n = len(lines)
3789    for i, line in enumerate(lines):
3790        cells = line.split()
3791        assert len(cells) == ncols
3792        grid.append(array([float(x) for x in cells]))
3793    grid = array(grid)
3794
3795    return {'xllcorner':xllcorner,
3796            'yllcorner':yllcorner,
3797            'cellsize':cellsize,
3798            'NODATA_value':NODATA_value}, grid
Note: See TracBrowser for help on using the repository browser.