source: inundation/pyvolution/data_manager.py @ 2334

Last change on this file since 2334 was 2305, checked in by ole, 19 years ago

Fixed problem with leaking files from tests as per ticket:42

File size: 115.0 KB
RevLine 
[2105]1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using pyvolution
48
49"""
50import os
51
52from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
53     searchsorted, zeros, allclose, around
54
55
56from coordinate_transforms.geo_reference import Geo_reference
57
58def make_filename(s):
59    """Transform argument string into a suitable filename
60    """
61
62    s = s.strip()
63    s = s.replace(' ', '_')
64    s = s.replace('(', '')
65    s = s.replace(')', '')
66    s = s.replace('__', '_')
67
68    return s
69
70
71def check_dir(path, verbose=None):
72    """Check that specified path exists.
73    If path does not exist it will be created if possible
74
75    USAGE:
76       checkdir(path, verbose):
77
78    ARGUMENTS:
79        path -- Directory
80        verbose -- Flag verbose output (default: None)
81
82    RETURN VALUE:
83        Verified path including trailing separator
84
85    """
86
87    import os, sys
88    import os.path
89
90    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
91        unix = 0
92    else:
93        unix = 1
94
95
96    if path[-1] != os.sep:
97        path = path + os.sep  # Add separator for directories
98
99    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
100    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
101        try:
102            exitcode=os.mkdir(path)
103
104            # Change access rights if possible
105            #
106            if unix:
107                exitcode=os.system('chmod 775 '+path)
108            else:
109                pass  # FIXME: What about acces rights under Windows?
110
111            if verbose: print 'MESSAGE: Directory', path, 'created.'
112
113        except:
114            print 'WARNING: Directory', path, 'could not be created.'
115            if unix:
116                path = '/tmp/'
117            else:
118                path = 'C:'
119
120            print 'Using directory %s instead' %path
121
122    return(path)
123
124
125
126def del_dir(path):
127    """Recursively delete directory path and all its contents
128    """
129
130    import os
131
132    if os.path.isdir(path):
133        for file in os.listdir(path):
134            X = os.path.join(path, file)
135
136
137            if os.path.isdir(X) and not os.path.islink(X):
138                del_dir(X)
139            else:
140                try:
141                    os.remove(X)
142                except:
143                    print "Could not remove file %s" %X
144
145        os.rmdir(path)
146
147
148
149def create_filename(datadir, filename, format, size=None, time=None):
150
151    import os
152    #from config import data_dir
153
154    FN = check_dir(datadir) + filename
155
156    if size is not None:
157        FN += '_size%d' %size
158
159    if time is not None:
160        FN += '_time%.2f' %time
161
162    FN += '.' + format
163    return FN
164
165
166def get_files(datadir, filename, format, size):
167    """Get all file (names) with gven name, size and format
168    """
169
170    import glob
171
172    import os
173    #from config import data_dir
174
175    dir = check_dir(datadir)
176
177    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
178    return glob.glob(pattern)
179
180
181
182#Generic class for storing output to e.g. visualisation or checkpointing
183class Data_format:
184    """Generic interface to data formats
185    """
186
187
188    def __init__(self, domain, extension, mode = 'w'):
189        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
190                                        '''   'w' (write)'''+\
191                                        '''   'r' (read)''' +\
192                                        '''   'a' (append)'''
193
194        #Create filename
195        #self.filename = create_filename(domain.get_datadir(),
196        #domain.get_name(), extension, len(domain))
197
198
199        self.filename = create_filename(domain.get_datadir(),
200                        domain.get_name(), extension)
201
202        #print 'F', self.filename
203        self.timestep = 0
204        self.number_of_volumes = len(domain)
205        self.domain = domain
206
207
208        #FIXME: Should we have a general set_precision function?
209
210
211
212#Class for storing output to e.g. visualisation
213class Data_format_sww(Data_format):
214    """Interface to native NetCDF format (.sww) for storing model output
215
216    There are two kinds of data
217
218    1: Constant data: Vertex coordinates and field values. Stored once
219    2: Variable data: Conserved quantities. Stored once per timestep.
220
221    All data is assumed to reside at vertex locations.
222    """
223
224
225    def __init__(self, domain, mode = 'w',\
226                 max_size = 2000000000,
227                 recursion = False):
228        from Scientific.IO.NetCDF import NetCDFFile
229        from Numeric import Int, Float, Float32
230
231        self.precision = Float32 #Use single precision
232        if hasattr(domain, 'max_size'):
233            self.max_size = domain.max_size #file size max is 2Gig
234        else:
235            self.max_size = max_size
236        self.recursion = recursion
237        self.mode = mode
238
239        Data_format.__init__(self, domain, 'sww', mode)
240
241
242        # NetCDF file definition
243        fid = NetCDFFile(self.filename, mode)
244
245        if mode == 'w':
246
247            #Create new file
248            fid.institution = 'Geoscience Australia'
249            fid.description = 'Output from pyvolution suitable for plotting'
250
251            if domain.smooth:
252                fid.smoothing = 'Yes'
253            else:
254                fid.smoothing = 'No'
255
256            fid.order = domain.default_order
257
258            if hasattr(domain, 'texture'):
259                fid.texture = domain.texture
260            #else:
261            #    fid.texture = 'None'
262
263            #Reference point
264            #Start time in seconds since the epoch (midnight 1/1/1970)
265            #FIXME: Use Georef
266            fid.starttime = domain.starttime
267
268            # dimension definitions
269            fid.createDimension('number_of_volumes', self.number_of_volumes)
270            fid.createDimension('number_of_vertices', 3)
271
272            if domain.smooth is True:
273                fid.createDimension('number_of_points', len(domain.vertexlist))
274            else:
275                fid.createDimension('number_of_points', 3*self.number_of_volumes)
276
277            fid.createDimension('number_of_timesteps', None) #extensible
278
279            # variable definitions
280            fid.createVariable('x', self.precision, ('number_of_points',))
281            fid.createVariable('y', self.precision, ('number_of_points',))
282            fid.createVariable('elevation', self.precision, ('number_of_points',))
283            if domain.geo_reference is not None:
284                domain.geo_reference.write_NetCDF(fid)
285
286            #FIXME: Backwards compatibility
287            fid.createVariable('z', self.precision, ('number_of_points',))
288            #################################
289
290            fid.createVariable('volumes', Int, ('number_of_volumes',
291                                                'number_of_vertices'))
292
293            fid.createVariable('time', self.precision,
294                               ('number_of_timesteps',))
295
296            fid.createVariable('stage', self.precision,
297                               ('number_of_timesteps',
298                                'number_of_points'))
299
300            fid.createVariable('xmomentum', self.precision,
301                               ('number_of_timesteps',
302                                'number_of_points'))
303
304            fid.createVariable('ymomentum', self.precision,
305                               ('number_of_timesteps',
306                                'number_of_points'))
307
308        #Close
309        fid.close()
310
311
312    def store_connectivity(self):
313        """Specialisation of store_connectivity for net CDF format
314
315        Writes x,y,z coordinates of triangles constituting
316        the bed elevation.
317        """
318
319        from Scientific.IO.NetCDF import NetCDFFile
320
321        from Numeric import concatenate, Int
322
323        domain = self.domain
324
325        #Get NetCDF
326        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
327
328        # Get the variables
329        x = fid.variables['x']
330        y = fid.variables['y']
331        z = fid.variables['elevation']
332
333        volumes = fid.variables['volumes']
334
335        # Get X, Y and bed elevation Z
336        Q = domain.quantities['elevation']
337        X,Y,Z,V = Q.get_vertex_values(xy=True,
338                                      precision = self.precision)
339
340
341
342        x[:] = X.astype(self.precision)
343        y[:] = Y.astype(self.precision)
344        z[:] = Z.astype(self.precision)
345
346        #FIXME: Backwards compatibility
347        z = fid.variables['z']
348        z[:] = Z.astype(self.precision)
349        ################################
350
351        volumes[:] = V.astype(volumes.typecode())
352
353        #Close
354        fid.close()
355
356    def store_timestep(self, names):
357        """Store time and named quantities to file
358        """
359        from Scientific.IO.NetCDF import NetCDFFile
360        import types
361        from time import sleep
362        from os import stat
363
364        #minimum_allowed_depth = 0.001
365        minimum_allowed_depth = 0.0  #FIXME pass in or read from domain
366        from Numeric import choose
367
368        #Get NetCDF
369        retries = 0
370        file_open = False
371        while not file_open and retries < 10:
372            try:
373                fid = NetCDFFile(self.filename, 'a') #Open existing file
374            except IOError:
375                #This could happen if someone was reading the file.
376                #In that case, wait a while and try again
377                msg = 'Warning (store_timestep): File %s could not be opened'\
378                      %self.filename
379                msg += ' - trying step %s again' %self.domain.time
380                print msg
381                retries += 1
382                sleep(1)
383            else:
384                file_open = True
385
386        if not file_open:
387            msg = 'File %s could not be opened for append' %self.filename
388            raise msg
389
390
391
392        #Check to see if the file is already too big:
393        time = fid.variables['time']
394        i = len(time)+1
395        file_size = stat(self.filename)[6]
396        file_size_increase =  file_size/i
397        if file_size + file_size_increase > self.max_size*(2**self.recursion):
398            #in order to get the file name and start time correct,
399            #I change the domian.filename and domain.starttime.
400            #This is the only way to do this without changing
401            #other modules (I think).
402
403            #write a filename addon that won't break swollens reader
404            #(10.sww is bad)
405            filename_ext = '_time_%s'%self.domain.time
406            filename_ext = filename_ext.replace('.', '_')
407            #remember the old filename, then give domain a
408            #name with the extension
409            old_domain_filename = self.domain.filename
410            if not self.recursion:
411                self.domain.filename = self.domain.filename+filename_ext
412
413            #change the domain starttime to the current time
414            old_domain_starttime = self.domain.starttime
415            self.domain.starttime = self.domain.time
416
417            #build a new data_structure.
418            next_data_structure=\
419                Data_format_sww(self.domain, mode=self.mode,\
420                                max_size = self.max_size,\
421                                recursion = self.recursion+1)
422            if not self.recursion:
423                print '    file_size = %s'%file_size
424                print '    saving file to %s'%next_data_structure.filename
425            #set up the new data_structure
426            self.domain.writer = next_data_structure
427
428            #FIXME - could be cleaner to use domain.store_timestep etc.
429            next_data_structure.store_connectivity()
430            next_data_structure.store_timestep(names)
431            fid.sync()
432            fid.close()
433
434            #restore the old starttime and filename
435            self.domain.starttime = old_domain_starttime
436            self.domain.filename = old_domain_filename
437        else:
438            self.recursion = False
439            domain = self.domain
440
441            # Get the variables
442            time = fid.variables['time']
443            stage = fid.variables['stage']
444            xmomentum = fid.variables['xmomentum']
445            ymomentum = fid.variables['ymomentum']
446            i = len(time)
447
448            #Store time
449            time[i] = self.domain.time
450
451
452            if type(names) not in [types.ListType, types.TupleType]:
453                names = [names]
454
455            for name in names:
456                # Get quantity
457                Q = domain.quantities[name]
458                A,V = Q.get_vertex_values(xy = False,
459                                          precision = self.precision)
460
461                #FIXME: Make this general (see below)
462                if name == 'stage':
463                    z = fid.variables['elevation']
464                    #print z[:]
465                    #print A-z[:]
466                    A = choose( A-z[:] >= minimum_allowed_depth, (z[:], A))
467                    stage[i,:] = A.astype(self.precision)
468                elif name == 'xmomentum':
469                    xmomentum[i,:] = A.astype(self.precision)
470                elif name == 'ymomentum':
471                    ymomentum[i,:] = A.astype(self.precision)
472
473                #As in....
474                #eval( name + '[i,:] = A.astype(self.precision)' )
475                #FIXME: But we need a UNIT test for that before refactoring
476
477
478
479            #Flush and close
480            fid.sync()
481            fid.close()
482
483
484
485#Class for handling checkpoints data
486class Data_format_cpt(Data_format):
487    """Interface to native NetCDF format (.cpt)
488    """
489
490
491    def __init__(self, domain, mode = 'w'):
492        from Scientific.IO.NetCDF import NetCDFFile
493        from Numeric import Int, Float, Float
494
495        self.precision = Float #Use full precision
496
497        Data_format.__init__(self, domain, 'sww', mode)
498
499
500        # NetCDF file definition
501        fid = NetCDFFile(self.filename, mode)
502
503        if mode == 'w':
504            #Create new file
505            fid.institution = 'Geoscience Australia'
506            fid.description = 'Checkpoint data'
507            #fid.smooth = domain.smooth
508            fid.order = domain.default_order
509
510            # dimension definitions
511            fid.createDimension('number_of_volumes', self.number_of_volumes)
512            fid.createDimension('number_of_vertices', 3)
513
514            #Store info at all vertices (no smoothing)
515            fid.createDimension('number_of_points', 3*self.number_of_volumes)
516            fid.createDimension('number_of_timesteps', None) #extensible
517
518            # variable definitions
519
520            #Mesh
521            fid.createVariable('x', self.precision, ('number_of_points',))
522            fid.createVariable('y', self.precision, ('number_of_points',))
523
524
525            fid.createVariable('volumes', Int, ('number_of_volumes',
526                                                'number_of_vertices'))
527
528            fid.createVariable('time', self.precision,
529                               ('number_of_timesteps',))
530
531            #Allocate space for all quantities
532            for name in domain.quantities.keys():
533                fid.createVariable(name, self.precision,
534                                   ('number_of_timesteps',
535                                    'number_of_points'))
536
537        #Close
538        fid.close()
539
540
541    def store_checkpoint(self):
542        """
543        Write x,y coordinates of triangles.
544        Write connectivity (
545        constituting
546        the bed elevation.
547        """
548
549        from Scientific.IO.NetCDF import NetCDFFile
550
551        from Numeric import concatenate
552
553        domain = self.domain
554
555        #Get NetCDF
556        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
557
558        # Get the variables
559        x = fid.variables['x']
560        y = fid.variables['y']
561
562        volumes = fid.variables['volumes']
563
564        # Get X, Y and bed elevation Z
565        Q = domain.quantities['elevation']
566        X,Y,Z,V = Q.get_vertex_values(xy=True,
567                                      precision = self.precision)
568
569
570
571        x[:] = X.astype(self.precision)
572        y[:] = Y.astype(self.precision)
573        z[:] = Z.astype(self.precision)
574
575        volumes[:] = V
576
577        #Close
578        fid.close()
579
580
581    def store_timestep(self, name):
582        """Store time and named quantity to file
583        """
584        from Scientific.IO.NetCDF import NetCDFFile
585        from time import sleep
586
587        #Get NetCDF
588        retries = 0
589        file_open = False
590        while not file_open and retries < 10:
591            try:
592                fid = NetCDFFile(self.filename, 'a') #Open existing file
593            except IOError:
594                #This could happen if someone was reading the file.
595                #In that case, wait a while and try again
596                msg = 'Warning (store_timestep): File %s could not be opened'\
597                          %self.filename
598                msg += ' - trying again'
599                print msg
600                retries += 1
601                sleep(1)
602            else:
603                file_open = True
604
605            if not file_open:
606                msg = 'File %s could not be opened for append' %self.filename
607            raise msg
608
609
610        domain = self.domain
611
612        # Get the variables
613        time = fid.variables['time']
614        stage = fid.variables['stage']
615        i = len(time)
616
617        #Store stage
618        time[i] = self.domain.time
619
620        # Get quantity
621        Q = domain.quantities[name]
622        A,V = Q.get_vertex_values(xy=False,
623                                  precision = self.precision)
624
625        stage[i,:] = A.astype(self.precision)
626
627        #Flush and close
628        fid.sync()
629        fid.close()
630
631
632
633
634
635#Function for storing xya output
636#FIXME Not done yet for this version
637class Data_format_xya(Data_format):
638    """Generic interface to data formats
639    """
640
641
642    def __init__(self, domain, mode = 'w'):
643        from Scientific.IO.NetCDF import NetCDFFile
644        from Numeric import Int, Float, Float32
645
646        self.precision = Float32 #Use single precision
647
648        Data_format.__init__(self, domain, 'xya', mode)
649
650
651
652    #FIXME -This is the old xya format
653    def store_all(self):
654        """Specialisation of store all for xya format
655
656        Writes x,y,z coordinates of triangles constituting
657        the bed elevation.
658        """
659
660        from Numeric import concatenate
661
662        domain = self.domain
663
664        fd = open(self.filename, 'w')
665
666
667        if domain.smooth is True:
668            number_of_points =  len(domain.vertexlist)
669        else:
670            number_of_points = 3*self.number_of_volumes
671
672        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
673
674        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
675                 " # <vertex #> <x> <y> [attributes]" + "\n")
676
677
678        # Get X, Y, bed elevation and friction (index=0,1)
679        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
680                                           indices = (0,1), precision = self.precision)
681
682        bed_eles = A[:,0]
683        fricts = A[:,1]
684
685        # Get stage (index=0)
686        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
687                                       indices = (0,), precision = self.precision)
688
689        stages = B[:,0]
690
691        #<vertex #> <x> <y> [attributes]
692        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
693                                               stages, fricts):
694
695            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
696            fd.write(s)
697
698        #close
699        fd.close()
700
701
702    def store_timestep(self, t, V0, V1, V2):
703        """Store time, water heights (and momentums) to file
704        """
705        pass
706
707
708#Auxiliary
709def write_obj(filename,x,y,z):
710    """Store x,y,z vectors into filename (obj format)
711       Vectors are assumed to have dimension (M,3) where
712       M corresponds to the number elements.
713       triangles are assumed to be disconnected
714
715       The three numbers in each vector correspond to three vertices,
716
717       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
718
719    """
720    #print 'Writing obj to %s' % filename
721
722    import os.path
723
724    root, ext = os.path.splitext(filename)
725    if ext == '.obj':
726        FN = filename
727    else:
728        FN = filename + '.obj'
729
730
731    outfile = open(FN, 'wb')
732    outfile.write("# Triangulation as an obj file\n")
733
734    M, N = x.shape
735    assert N==3  #Assuming three vertices per element
736
737    for i in range(M):
738        for j in range(N):
739            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
740
741    for i in range(M):
742        base = i*N
743        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
744
745    outfile.close()
746
747
748#########################################################
749#Conversion routines
750########################################################
751
752def sww2obj(basefilename, size):
753    """Convert netcdf based data output to obj
754    """
755    from Scientific.IO.NetCDF import NetCDFFile
756
757    from Numeric import Float, zeros
758
759    #Get NetCDF
760    FN = create_filename('.', basefilename, 'sww', size)
761    print 'Reading from ', FN
762    fid = NetCDFFile(FN, 'r')  #Open existing file for read
763
764
765    # Get the variables
766    x = fid.variables['x']
767    y = fid.variables['y']
768    z = fid.variables['elevation']
769    time = fid.variables['time']
770    stage = fid.variables['stage']
771
772    M = size  #Number of lines
773    xx = zeros((M,3), Float)
774    yy = zeros((M,3), Float)
775    zz = zeros((M,3), Float)
776
777    for i in range(M):
778        for j in range(3):
779            xx[i,j] = x[i+j*M]
780            yy[i,j] = y[i+j*M]
781            zz[i,j] = z[i+j*M]
782
783    #Write obj for bathymetry
784    FN = create_filename('.', basefilename, 'obj', size)
785    write_obj(FN,xx,yy,zz)
786
787
788    #Now read all the data with variable information, combine with
789    #x,y info and store as obj
790
791    for k in range(len(time)):
792        t = time[k]
793        print 'Processing timestep %f' %t
794
795        for i in range(M):
796            for j in range(3):
797                zz[i,j] = stage[k,i+j*M]
798
799
800        #Write obj for variable data
801        #FN = create_filename(basefilename, 'obj', size, time=t)
802        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
803        write_obj(FN,xx,yy,zz)
804
805
806def dat2obj(basefilename):
807    """Convert line based data output to obj
808    FIXME: Obsolete?
809    """
810
811    import glob, os
812    from config import data_dir
813
814
815    #Get bathymetry and x,y's
816    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
817
818    from Numeric import zeros, Float
819
820    M = len(lines)  #Number of lines
821    x = zeros((M,3), Float)
822    y = zeros((M,3), Float)
823    z = zeros((M,3), Float)
824
825    ##i = 0
826    for i, line in enumerate(lines):
827        tokens = line.split()
828        values = map(float,tokens)
829
830        for j in range(3):
831            x[i,j] = values[j*3]
832            y[i,j] = values[j*3+1]
833            z[i,j] = values[j*3+2]
834
835        ##i += 1
836
837
838    #Write obj for bathymetry
839    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
840
841
842    #Now read all the data files with variable information, combine with
843    #x,y info
844    #and store as obj
845
846    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
847
848    for filename in files:
849        print 'Processing %s' % filename
850
851        lines = open(data_dir+os.sep+filename,'r').readlines()
852        assert len(lines) == M
853        root, ext = os.path.splitext(filename)
854
855        #Get time from filename
856        i0 = filename.find('_time=')
857        if i0 == -1:
858            #Skip bathymetry file
859            continue
860
861        i0 += 6  #Position where time starts
862        i1 = filename.find('.dat')
863
864        if i1 > i0:
865            t = float(filename[i0:i1])
866        else:
867            raise 'Hmmmm'
868
869
870
871        ##i = 0
872        for i, line in enumerate(lines):
873            tokens = line.split()
874            values = map(float,tokens)
875
876            for j in range(3):
877                z[i,j] = values[j]
878
879            ##i += 1
880
881        #Write obj for variable data
882        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
883
884
885def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
886    """Read netcdf filename1, pick timesteps first:step:last and save to
887    nettcdf file filename2
888    """
889    from Scientific.IO.NetCDF import NetCDFFile
890
891    #Get NetCDF
892    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
893    outfile = NetCDFFile(filename2, 'w')  #Open new file
894
895
896    #Copy dimensions
897    for d in infile.dimensions:
898        outfile.createDimension(d, infile.dimensions[d])
899
900    for name in infile.variables:
901        var = infile.variables[name]
902        outfile.createVariable(name, var.typecode(), var.dimensions)
903
904
905    #Copy the static variables
906    for name in infile.variables:
907        if name == 'time' or name == 'stage':
908            pass
909        else:
910            #Copy
911            outfile.variables[name][:] = infile.variables[name][:]
912
913    #Copy selected timesteps
914    time = infile.variables['time']
915    stage = infile.variables['stage']
916
917    newtime = outfile.variables['time']
918    newstage = outfile.variables['stage']
919
920    if last is None:
921        last = len(time)
922
923    selection = range(first, last, step)
924    for i, j in enumerate(selection):
925        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
926        newtime[i] = time[j]
927        newstage[i,:] = stage[j,:]
928
929    #Close
930    infile.close()
931    outfile.close()
932
933
934#Get data objects
935def get_dataobject(domain, mode='w'):
936    """Return instance of class of given format using filename
937    """
938
939    cls = eval('Data_format_%s' %domain.format)
940    return cls(domain, mode)
941
942def xya2pts(basename_in, basename_out=None, verbose=False,
943            #easting_min=None, easting_max=None,
944            #northing_min=None, northing_max=None,
945            stride = 1,
946            attribute_name = 'elevation',
947            z_func = None):
948    """Read points data from ascii (.xya)
949
950    Example:
951
952              x(m)        y(m)        z(m)
953         0.00000e+00  0.00000e+00  1.3535000e-01
954         0.00000e+00  1.40000e-02  1.3535000e-01
955
956
957
958    Convert to NetCDF pts format which is
959
960    points:  (Nx2) Float array
961    elevation: N Float array
962
963    Only lines that contain three numeric values are processed
964
965    If z_func is specified, it will be applied to the third field
966    """
967
968    import os
969    #from Scientific.IO.NetCDF import NetCDFFile
970    from Numeric import Float, arrayrange, concatenate
971
972    root, ext = os.path.splitext(basename_in)
973
974    if ext == '': ext = '.xya'
975
976    #Get NetCDF
977    infile = open(root + ext, 'r')  #Open existing xya file for read
978
979    if verbose: print 'Reading xya points from %s' %(root + ext)
980
981    points = []
982    attribute = []
983    for i, line in enumerate(infile.readlines()):
984
985        if i % stride != 0: continue
986
987        fields = line.split()
988
989        try:
990            assert len(fields) == 3
991        except:
992            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
993
994        try:
995            x = float( fields[0] )
996            y = float( fields[1] )
997            z = float( fields[2] )
998        except:
999            continue
1000
1001        points.append( [x, y] )
1002
1003        if callable(z_func):
1004            attribute.append(z_func(z))
1005        else:
1006            attribute.append(z)
1007
1008
1009    #Get output file
1010    if basename_out == None:
1011        ptsname = root + '.pts'
1012    else:
1013        ptsname = basename_out + '.pts'
1014
1015    if verbose: print 'Store to NetCDF file %s' %ptsname
1016    write_ptsfile(ptsname, points, attribute, attribute_name)
1017
1018
[2256]1019
1020######Obsoleted by export_points in load_mesh
1021def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1022                  zone=None, xllcorner=None, yllcorner=None):
[2105]1023    """Write points and associated attribute to pts (NetCDF) format
1024    """
1025
[2262]1026    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1027
[2105]1028    from Numeric import Float
1029
1030    if attribute_name is None:
1031        attribute_name = 'attribute'
1032
1033
1034    from Scientific.IO.NetCDF import NetCDFFile
1035
1036    # NetCDF file definition
1037    outfile = NetCDFFile(ptsname, 'w')
1038
1039
1040    #Create new file
1041    outfile.institution = 'Geoscience Australia'
1042    outfile.description = 'NetCDF pts format for compact and '\
1043                          'portable storage of spatial point data'
1044
1045
1046    #Georeferencing
1047    from coordinate_transforms.geo_reference import Geo_reference
[2256]1048    if zone is None:
1049        assert xllcorner is None, 'xllcorner must be None'
1050        assert yllcorner is None, 'yllcorner must be None'       
1051        Geo_reference().write_NetCDF(outfile)
1052    else:
1053        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)       
1054       
[2105]1055
1056
1057    outfile.createDimension('number_of_points', len(points))
1058    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1059
1060    # variable definitions
1061    outfile.createVariable('points', Float, ('number_of_points',
1062                                             'number_of_dimensions'))
1063    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1064
1065    # Get handles to the variables
1066    nc_points = outfile.variables['points']
1067    nc_attribute = outfile.variables[attribute_name]
1068
1069    #Store data
1070    nc_points[:, :] = points
1071    nc_attribute[:] = attribute
1072
1073    outfile.close()
1074
1075
1076def dem2pts(basename_in, basename_out=None, verbose=False,
1077            easting_min=None, easting_max=None,
1078            northing_min=None, northing_max=None):
1079    """Read Digitial Elevation model from the following NetCDF format (.dem)
1080
1081    Example:
1082
1083    ncols         3121
1084    nrows         1800
1085    xllcorner     722000
1086    yllcorner     5893000
1087    cellsize      25
1088    NODATA_value  -9999
1089    138.3698 137.4194 136.5062 135.5558 ..........
1090
1091    Convert to NetCDF pts format which is
1092
1093    points:  (Nx2) Float array
1094    elevation: N Float array
1095    """
1096
1097    #FIXME: Can this be written feasibly using write_pts?
1098
1099    import os
1100    from Scientific.IO.NetCDF import NetCDFFile
1101    from Numeric import Float, zeros, reshape
1102
1103    root = basename_in
1104
1105    #Get NetCDF
1106    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1107
1108    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1109
1110    ncols = infile.ncols[0]
1111    nrows = infile.nrows[0]
1112    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1113    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1114    cellsize = infile.cellsize[0]
1115    NODATA_value = infile.NODATA_value[0]
1116    dem_elevation = infile.variables['elevation']
1117
1118    zone = infile.zone[0]
1119    false_easting = infile.false_easting[0]
1120    false_northing = infile.false_northing[0]
1121
1122    #Text strings
1123    projection = infile.projection
1124    datum = infile.datum
1125    units = infile.units
1126
1127
1128    #Get output file
1129    if basename_out == None:
1130        ptsname = root + '.pts'
1131    else:
1132        ptsname = basename_out + '.pts'
1133
1134    if verbose: print 'Store to NetCDF file %s' %ptsname
1135    # NetCDF file definition
1136    outfile = NetCDFFile(ptsname, 'w')
1137
1138    #Create new file
1139    outfile.institution = 'Geoscience Australia'
1140    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1141                      'of spatial point data'
1142    #assign default values
1143    if easting_min is None: easting_min = xllcorner
1144    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1145    if northing_min is None: northing_min = yllcorner
1146    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1147
1148    #compute offsets to update georeferencing
1149    easting_offset = xllcorner - easting_min
1150    northing_offset = yllcorner - northing_min
1151
1152    #Georeferencing
1153    outfile.zone = zone
1154    outfile.xllcorner = easting_min #Easting of lower left corner
1155    outfile.yllcorner = northing_min #Northing of lower left corner
1156    outfile.false_easting = false_easting
1157    outfile.false_northing = false_northing
1158
1159    outfile.projection = projection
1160    outfile.datum = datum
1161    outfile.units = units
1162
1163
1164    #Grid info (FIXME: probably not going to be used, but heck)
1165    outfile.ncols = ncols
1166    outfile.nrows = nrows
1167
1168
1169    # dimension definitions
1170    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1171    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1172    outfile.createDimension('number_of_points', nrows_in_bounding_box*ncols_in_bounding_box)
1173    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1174
1175    # variable definitions
1176    outfile.createVariable('points', Float, ('number_of_points',
1177                                             'number_of_dimensions'))
1178    outfile.createVariable('elevation', Float, ('number_of_points',))
1179
1180    # Get handles to the variables
1181    points = outfile.variables['points']
1182    elevation = outfile.variables['elevation']
1183
1184    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1185
1186    #Store data
1187    #FIXME: Could perhaps be faster using array operations (Fixed 27/7/05)
1188    global_index = 0
1189    for i in range(nrows):
1190        if verbose and i%((nrows+10)/10)==0:
1191            print 'Processing row %d of %d' %(i, nrows)
1192
1193        lower_index = global_index
1194        tpoints = zeros((ncols_in_bounding_box, 2), Float)
1195        telev =  zeros(ncols_in_bounding_box, Float)
1196        local_index = 0
1197
1198        y = (nrows-i)*cellsize + yllcorner
1199        for j in range(ncols):
1200
1201            x = j*cellsize + xllcorner
1202            if easting_min <= x <= easting_max and \
1203               northing_min <= y <= northing_max:
1204                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1205                telev[local_index] = dem_elevation_r[i, j]
1206                global_index += 1
1207                local_index += 1
1208
1209        upper_index = global_index
1210
1211        if upper_index == lower_index + ncols_in_bounding_box:
1212            points[lower_index:upper_index, :] = tpoints
1213            elevation[lower_index:upper_index] = telev
1214
1215    assert global_index == nrows_in_bounding_box*ncols_in_bounding_box, 'index not equal to number of points'
1216
1217    infile.close()
1218    outfile.close()
1219
1220
1221
1222def _read_hecras_cross_sections(lines):
1223    """Return block of surface lines for each cross section
1224    Starts with SURFACE LINE,
1225    Ends with END CROSS-SECTION
1226    """
1227
1228    points = []
1229
1230    reading_surface = False
1231    for i, line in enumerate(lines):
1232
1233        if len(line.strip()) == 0:    #Ignore blanks
1234            continue
1235
1236        if lines[i].strip().startswith('SURFACE LINE'):
1237            reading_surface = True
1238            continue
1239
1240        if lines[i].strip().startswith('END') and reading_surface:
1241            yield points
1242            reading_surface = False
1243            points = []
1244
1245        if reading_surface:
1246            fields = line.strip().split(',')
1247            easting = float(fields[0])
1248            northing = float(fields[1])
1249            elevation = float(fields[2])
1250            points.append([easting, northing, elevation])
1251
1252
1253
1254
1255def hecras_cross_sections2pts(basename_in,
1256                              basename_out=None,
1257                              verbose=False):
1258    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1259
1260    Example:
1261
1262
1263# RAS export file created on Mon 15Aug2005 11:42
1264# by HEC-RAS Version 3.1.1
1265
1266BEGIN HEADER:
1267  UNITS: METRIC
1268  DTM TYPE: TIN
1269  DTM: v:\1\cit\perth_topo\river_tin
1270  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1271  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1272  MAP PROJECTION: UTM
1273  PROJECTION ZONE: 50
1274  DATUM: AGD66
1275  VERTICAL DATUM:
1276  NUMBER OF REACHES:  19
1277  NUMBER OF CROSS-SECTIONS:  14206
1278END HEADER:
1279
1280
1281Only the SURFACE LINE data of the following form will be utilised
1282
1283  CROSS-SECTION:
1284    STREAM ID:Southern-Wungong
1285    REACH ID:Southern-Wungong
1286    STATION:19040.*
1287    CUT LINE:
1288      405548.671603161 , 6438142.7594925
1289      405734.536092045 , 6438326.10404912
1290      405745.130459356 , 6438331.48627354
1291      405813.89633823 , 6438368.6272789
1292    SURFACE LINE:
1293     405548.67,   6438142.76,   35.37
1294     405552.24,   6438146.28,   35.41
1295     405554.78,   6438148.78,   35.44
1296     405555.80,   6438149.79,   35.44
1297     405559.37,   6438153.31,   35.45
1298     405560.88,   6438154.81,   35.44
1299     405562.93,   6438156.83,   35.42
1300     405566.50,   6438160.35,   35.38
1301     405566.99,   6438160.83,   35.37
1302     ...
1303   END CROSS-SECTION
1304
1305    Convert to NetCDF pts format which is
1306
1307    points:  (Nx2) Float array
1308    elevation: N Float array
1309    """
1310
1311    #FIXME: Can this be written feasibly using write_pts?
1312
1313    import os
1314    from Scientific.IO.NetCDF import NetCDFFile
1315    from Numeric import Float, zeros, reshape
1316
1317    root = basename_in
1318
1319    #Get ASCII file
1320    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1321
1322    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1323
1324    lines = infile.readlines()
1325    infile.close()
1326
1327    if verbose: print 'Converting to pts format'
1328
1329    i = 0
1330    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1331        i += 1
1332
1333    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1334    i += 1
1335
1336    assert lines[i].strip().upper().startswith('UNITS:')
1337    units = lines[i].strip().split()[1]
1338    i += 1
1339
1340    assert lines[i].strip().upper().startswith('DTM TYPE:')
1341    i += 1
1342
1343    assert lines[i].strip().upper().startswith('DTM:')
1344    i += 1
1345
1346    assert lines[i].strip().upper().startswith('STREAM')
1347    i += 1
1348
1349    assert lines[i].strip().upper().startswith('CROSS')
1350    i += 1
1351
1352    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1353    projection = lines[i].strip().split(':')[1]
1354    i += 1
1355
1356    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1357    zone = int(lines[i].strip().split(':')[1])
1358    i += 1
1359
1360    assert lines[i].strip().upper().startswith('DATUM:')
1361    datum = lines[i].strip().split(':')[1]
1362    i += 1
1363
1364    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1365    i += 1
1366
1367    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1368    i += 1
1369
1370    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1371    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1372    i += 1
1373
1374
1375    #Now read all points
1376    points = []
1377    elevation = []
1378    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1379        for k, entry in enumerate(entries):
1380            points.append(entry[:2])
1381            elevation.append(entry[2])
1382
1383
1384    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1385          %(j+1, number_of_cross_sections)
1386    assert j+1 == number_of_cross_sections, msg
1387
1388    #Get output file
1389    if basename_out == None:
1390        ptsname = root + '.pts'
1391    else:
1392        ptsname = basename_out + '.pts'
1393
1394    #FIXME (DSG-ON): use loadASCII export_points_file
1395    if verbose: print 'Store to NetCDF file %s' %ptsname
1396    # NetCDF file definition
1397    outfile = NetCDFFile(ptsname, 'w')
1398
1399    #Create new file
1400    outfile.institution = 'Geoscience Australia'
1401    outfile.description = 'NetCDF pts format for compact and portable ' +\
1402                          'storage of spatial point data derived from HEC-RAS'
1403
1404    #Georeferencing
1405    outfile.zone = zone
1406    outfile.xllcorner = 0.0
1407    outfile.yllcorner = 0.0
1408    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1409    outfile.false_northing = 1000000
1410
1411    outfile.projection = projection
1412    outfile.datum = datum
1413    outfile.units = units
1414
1415
1416    outfile.createDimension('number_of_points', len(points))
1417    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1418
1419    # variable definitions
1420    outfile.createVariable('points', Float, ('number_of_points',
1421                                             'number_of_dimensions'))
1422    outfile.createVariable('elevation', Float, ('number_of_points',))
1423
1424    # Get handles to the variables
1425    outfile.variables['points'][:] = points
1426    outfile.variables['elevation'][:] = elevation
1427
1428    outfile.close()
1429
1430
1431
1432def sww2dem(basename_in, basename_out = None,
1433            quantity = None,
1434            timestep = None,
1435            reduction = None,
1436            cellsize = 10,
1437            NODATA_value = -9999,
1438            easting_min = None,
1439            easting_max = None,
1440            northing_min = None,
1441            northing_max = None,
1442            expand_search = False, #To avoid intractable situations (This will be fixed when least_squares gets redesigned)
1443            verbose = False,
1444            origin = None,
1445            datum = 'WGS84',
1446            format = 'ers'):
1447
1448    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1449
1450    Example (ASC):
1451
1452    ncols         3121
1453    nrows         1800
1454    xllcorner     722000
1455    yllcorner     5893000
1456    cellsize      25
1457    NODATA_value  -9999
1458    138.3698 137.4194 136.5062 135.5558 ..........
1459
1460    Also write accompanying file with same basename_in but extension .prj
1461    used to fix the UTM zone, datum, false northings and eastings.
1462
1463    The prj format is assumed to be as
1464
1465    Projection    UTM
1466    Zone          56
1467    Datum         WGS84
1468    Zunits        NO
1469    Units         METERS
1470    Spheroid      WGS84
1471    Xshift        0.0000000000
1472    Yshift        10000000.0000000000
1473    Parameters
1474
1475
1476    The parameter quantity must be the name of an existing quantity or
1477    an expression involving existing quantities. The default is
1478    'elevation'.
1479
1480    if timestep (an index) is given, output quantity at that timestep
1481
1482    if reduction is given use that to reduce quantity over all timesteps.
1483
1484    datum
1485
1486    format can be either 'asc' or 'ers'
1487    """
1488
1489    import sys
1490    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1491    from Numeric import array2string
1492
1493    from utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1494    from util import apply_expression_to_dictionary
1495
1496    msg = 'Format must be either asc or ers'
1497    assert format.lower() in ['asc', 'ers'], msg
1498
1499
1500    false_easting = 500000
1501    false_northing = 10000000
1502
1503    if quantity is None:
1504        quantity = 'elevation'
1505
1506    if reduction is None:
1507        reduction = max
1508
1509    if basename_out is None:
1510        basename_out = basename_in + '_%s' %quantity
1511
1512    swwfile = basename_in + '.sww'
1513    demfile = basename_out + '.' + format
1514    # Note the use of a .ers extension is optional (write_ermapper_grid will
1515    # deal with either option
1516
1517    #Read sww file
1518    if verbose: print 'Reading from %s' %swwfile
1519    from Scientific.IO.NetCDF import NetCDFFile
1520    fid = NetCDFFile(swwfile)
1521
1522    #Get extent and reference
1523    x = fid.variables['x'][:]
1524    y = fid.variables['y'][:]
1525    volumes = fid.variables['volumes'][:]
1526
1527    number_of_timesteps = fid.dimensions['number_of_timesteps']
1528    number_of_points = fid.dimensions['number_of_points']
1529    if origin is None:
1530
1531        #Get geo_reference
1532        #sww files don't have to have a geo_ref
1533        try:
1534            geo_reference = Geo_reference(NetCDFObject=fid)
1535        except AttributeError, e:
1536            geo_reference = Geo_reference() #Default georef object
1537
1538        xllcorner = geo_reference.get_xllcorner()
1539        yllcorner = geo_reference.get_yllcorner()
1540        zone = geo_reference.get_zone()
1541    else:
1542        zone = origin[0]
1543        xllcorner = origin[1]
1544        yllcorner = origin[2]
1545
1546
1547
1548    #FIXME: Refactor using code from file_function.statistics
1549    #Something like print swwstats(swwname)
1550    if verbose:
1551        x = fid.variables['x'][:]
1552        y = fid.variables['y'][:]
1553        times = fid.variables['time'][:]
1554        print '------------------------------------------------'
1555        print 'Statistics of SWW file:'
1556        print '  Name: %s' %swwfile
1557        print '  Reference:'
1558        print '    Lower left corner: [%f, %f]'\
1559              %(xllcorner, yllcorner)
1560        print '    Start time: %f' %fid.starttime[0]
1561        print '  Extent:'
1562        print '    x [m] in [%f, %f], len(x) == %d'\
1563              %(min(x.flat), max(x.flat), len(x.flat))
1564        print '    y [m] in [%f, %f], len(y) == %d'\
1565              %(min(y.flat), max(y.flat), len(y.flat))
1566        print '    t [s] in [%f, %f], len(t) == %d'\
1567              %(min(times), max(times), len(times))
1568        print '  Quantities [SI units]:'
1569        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1570            q = fid.variables[name][:].flat
1571            print '    %s in [%f, %f]' %(name, min(q), max(q))
1572
1573
1574
1575
1576
1577    #Get quantity and reduce if applicable
1578    if verbose: print 'Processing quantity %s' %quantity
1579
1580    #Turn NetCDF objects into Numeric arrays
1581    quantity_dict = {}
1582    for name in fid.variables.keys():
1583        quantity_dict[name] = fid.variables[name][:]
1584
1585
1586    q = apply_expression_to_dictionary(quantity, quantity_dict)
1587
1588
1589
1590    if len(q.shape) == 2:
1591        #q has a time component and needs to be reduced along
1592        #the temporal dimension
1593        if verbose: print 'Reducing quantity %s' %quantity
1594        q_reduced = zeros( number_of_points, Float )
1595
1596        for k in range(number_of_points):
1597            q_reduced[k] = reduction( q[:,k] )
1598
1599        q = q_reduced
1600
1601    #Post condition: Now q has dimension: number_of_points
1602    assert len(q.shape) == 1
1603    assert q.shape[0] == number_of_points
1604
1605
1606    if verbose:
1607        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1608
1609
1610    #Create grid and update xll/yll corner and x,y
1611
1612    #Relative extent
1613    if easting_min is None:
1614        xmin = min(x)
1615    else:
1616        xmin = easting_min - xllcorner
1617
1618    if easting_max is None:
1619        xmax = max(x)
1620    else:
1621        xmax = easting_max - xllcorner
1622
1623    if northing_min is None:
1624        ymin = min(y)
1625    else:
1626        ymin = northing_min - yllcorner
1627
1628    if northing_max is None:
1629        ymax = max(y)
1630    else:
1631        ymax = northing_max - yllcorner
1632
1633
1634
1635    if verbose: print 'Creating grid'
1636    ncols = int((xmax-xmin)/cellsize)+1
1637    nrows = int((ymax-ymin)/cellsize)+1
1638
1639
1640    #New absolute reference and coordinates
1641    newxllcorner = xmin+xllcorner
1642    newyllcorner = ymin+yllcorner
1643
1644    x = x+xllcorner-newxllcorner
1645    y = y+yllcorner-newyllcorner
1646
1647    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1648    assert len(vertex_points.shape) == 2
1649
1650
1651
1652    grid_points = zeros ( (ncols*nrows, 2), Float )
1653
1654
1655    for i in xrange(nrows):
1656        if format.lower() == 'asc':
1657            yg = i*cellsize
1658        else:
1659            #this will flip the order of the y values for ers
1660            yg = (nrows-i)*cellsize
1661
1662        for j in xrange(ncols):
1663            xg = j*cellsize
1664            k = i*ncols + j
1665
1666            grid_points[k,0] = xg
1667            grid_points[k,1] = yg
1668
1669    #Interpolate
1670    from least_squares import Interpolation
1671
1672
1673    #FIXME: This should be done with precrop = True (?), otherwise it'll
1674    #take forever. With expand_search  set to False, some grid points might
1675    #miss out.... This will be addressed though Duncan's refactoring of least_squares
1676
1677    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
1678                           precrop = False,
1679                           expand_search = expand_search,
1680                           verbose = verbose)
1681
1682
1683
1684    #Interpolate using quantity values
1685    if verbose: print 'Interpolating'
1686    grid_values = interp.interpolate(q).flat
1687
1688
1689    if verbose:
1690        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
1691                                                      max(grid_values))
1692
1693    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
1694    P = interp.mesh.get_boundary_polygon()
1695    outside_indices = outside_polygon(grid_points, P, closed=True)
1696
1697    for i in outside_indices:
1698        grid_values[i] = NODATA_value
1699
1700
1701
1702
1703    if format.lower() == 'ers':
1704        # setup ERS header information
1705        grid_values = reshape(grid_values,(nrows, ncols))
1706        header = {}
1707        header['datum'] = '"' + datum + '"'
1708        # FIXME The use of hardwired UTM and zone number needs to be made optional
1709        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
1710        header['projection'] = '"UTM-' + str(zone) + '"'
1711        header['coordinatetype'] = 'EN'
1712        if header['coordinatetype'] == 'LL':
1713            header['longitude'] = str(newxllcorner)
1714            header['latitude'] = str(newyllcorner)
1715        elif header['coordinatetype'] == 'EN':
1716            header['eastings'] = str(newxllcorner)
1717            header['northings'] = str(newyllcorner)
1718        header['nullcellvalue'] = str(NODATA_value)
1719        header['xdimension'] = str(cellsize)
1720        header['ydimension'] = str(cellsize)
1721        header['value'] = '"' + quantity + '"'
1722        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
1723
1724
1725        #Write
1726        if verbose: print 'Writing %s' %demfile
1727        import ermapper_grids
1728        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
1729
1730        fid.close()
1731    else:
1732        #Write to Ascii format
1733
1734        #Write prj file
1735        prjfile = basename_out + '.prj'
1736
1737        if verbose: print 'Writing %s' %prjfile
1738        prjid = open(prjfile, 'w')
1739        prjid.write('Projection    %s\n' %'UTM')
1740        prjid.write('Zone          %d\n' %zone)
1741        prjid.write('Datum         %s\n' %datum)
1742        prjid.write('Zunits        NO\n')
1743        prjid.write('Units         METERS\n')
1744        prjid.write('Spheroid      %s\n' %datum)
1745        prjid.write('Xshift        %d\n' %false_easting)
1746        prjid.write('Yshift        %d\n' %false_northing)
1747        prjid.write('Parameters\n')
1748        prjid.close()
1749
1750
1751
1752        if verbose: print 'Writing %s' %demfile
1753
1754        ascid = open(demfile, 'w')
1755
1756        ascid.write('ncols         %d\n' %ncols)
1757        ascid.write('nrows         %d\n' %nrows)
1758        ascid.write('xllcorner     %d\n' %newxllcorner)
1759        ascid.write('yllcorner     %d\n' %newyllcorner)
1760        ascid.write('cellsize      %f\n' %cellsize)
1761        ascid.write('NODATA_value  %d\n' %NODATA_value)
1762
1763
1764        #Get bounding polygon from mesh
1765        #P = interp.mesh.get_boundary_polygon()
1766        #inside_indices = inside_polygon(grid_points, P)
1767
1768        for i in range(nrows):
1769            if verbose and i%((nrows+10)/10)==0:
1770                print 'Doing row %d of %d' %(i, nrows)
1771
1772            base_index = (nrows-i-1)*ncols
1773
1774            slice = grid_values[base_index:base_index+ncols]
1775            s = array2string(slice, max_line_width=sys.maxint)
1776            ascid.write(s[1:-1] + '\n')
1777
1778
1779            #print
1780            #for j in range(ncols):
1781            #    index = base_index+j#
1782            #    print grid_values[index],
1783            #    ascid.write('%f ' %grid_values[index])
1784            #ascid.write('\n')
1785
1786        #Close
1787        ascid.close()
1788        fid.close()
1789
1790#Backwards compatibility
1791def sww2asc(basename_in, basename_out = None,
1792            quantity = None,
1793            timestep = None,
1794            reduction = None,
1795            cellsize = 10,
1796            verbose = False,
1797            origin = None):
1798    print 'sww2asc will soon be obsoleted - please use sww2dem'
1799    sww2dem(basename_in,
1800            basename_out = basename_out,
1801            quantity = quantity,
1802            timestep = timestep,
1803            reduction = reduction,
1804            cellsize = cellsize,
1805            verbose = verbose,
1806            origin = origin,
1807            datum = 'WGS84',
1808            format = 'asc')
1809
1810def sww2ers(basename_in, basename_out = None,
1811            quantity = None,
1812            timestep = None,
1813            reduction = None,
1814            cellsize = 10,
1815            verbose = False,
1816            origin = None,
1817            datum = 'WGS84'):
1818    print 'sww2ers will soon be obsoleted - please use sww2dem'
1819    sww2dem(basename_in,
1820            basename_out = basename_out,
1821            quantity = quantity,
1822            timestep = timestep,
1823            reduction = reduction,
1824            cellsize = cellsize,
1825            verbose = verbose,
1826            origin = origin,
1827            datum = datum,
1828            format = 'ers')
1829################################# END COMPATIBILITY ##############
1830
1831
1832
1833
1834def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
1835                                  verbose=False):
1836    """Read Digitial Elevation model from the following ASCII format (.asc)
1837
1838    Example:
1839
1840    ncols         3121
1841    nrows         1800
1842    xllcorner     722000
1843    yllcorner     5893000
1844    cellsize      25
1845    NODATA_value  -9999
1846    138.3698 137.4194 136.5062 135.5558 ..........
1847
1848    Convert basename_in + '.asc' to NetCDF format (.dem)
1849    mimicking the ASCII format closely.
1850
1851
1852    An accompanying file with same basename_in but extension .prj must exist
1853    and is used to fix the UTM zone, datum, false northings and eastings.
1854
1855    The prj format is assumed to be as
1856
1857    Projection    UTM
1858    Zone          56
1859    Datum         WGS84
1860    Zunits        NO
1861    Units         METERS
1862    Spheroid      WGS84
1863    Xshift        0.0000000000
1864    Yshift        10000000.0000000000
1865    Parameters
1866    """
1867
1868    import os
1869    from Scientific.IO.NetCDF import NetCDFFile
1870    from Numeric import Float, array
1871
1872    #root, ext = os.path.splitext(basename_in)
1873    root = basename_in
1874
1875    ###########################################
1876    # Read Meta data
1877    if verbose: print 'Reading METADATA from %s' %root + '.prj'
1878    metadatafile = open(root + '.prj')
1879    metalines = metadatafile.readlines()
1880    metadatafile.close()
1881
1882    L = metalines[0].strip().split()
1883    assert L[0].strip().lower() == 'projection'
1884    projection = L[1].strip()                   #TEXT
1885
1886    L = metalines[1].strip().split()
1887    assert L[0].strip().lower() == 'zone'
1888    zone = int(L[1].strip())
1889
1890    L = metalines[2].strip().split()
1891    assert L[0].strip().lower() == 'datum'
1892    datum = L[1].strip()                        #TEXT
1893
1894    L = metalines[3].strip().split()
1895    assert L[0].strip().lower() == 'zunits'     #IGNORE
1896    zunits = L[1].strip()                       #TEXT
1897
1898    L = metalines[4].strip().split()
1899    assert L[0].strip().lower() == 'units'
1900    units = L[1].strip()                        #TEXT
1901
1902    L = metalines[5].strip().split()
1903    assert L[0].strip().lower() == 'spheroid'   #IGNORE
1904    spheroid = L[1].strip()                     #TEXT
1905
1906    L = metalines[6].strip().split()
1907    assert L[0].strip().lower() == 'xshift'
1908    false_easting = float(L[1].strip())
1909
1910    L = metalines[7].strip().split()
1911    assert L[0].strip().lower() == 'yshift'
1912    false_northing = float(L[1].strip())
1913
1914    #print false_easting, false_northing, zone, datum
1915
1916
1917    ###########################################
1918    #Read DEM data
1919
1920    datafile = open(basename_in + '.asc')
1921
1922    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
1923    lines = datafile.readlines()
1924    datafile.close()
1925
1926    if verbose: print 'Got', len(lines), ' lines'
1927
1928    ncols = int(lines[0].split()[1].strip())
1929    nrows = int(lines[1].split()[1].strip())
1930    xllcorner = float(lines[2].split()[1].strip())
1931    yllcorner = float(lines[3].split()[1].strip())
1932    cellsize = float(lines[4].split()[1].strip())
1933    NODATA_value = int(lines[5].split()[1].strip())
1934
1935    assert len(lines) == nrows + 6
1936
1937
1938    ##########################################
1939
1940
1941    if basename_out == None:
1942        netcdfname = root + '.dem'
1943    else:
1944        netcdfname = basename_out + '.dem'
1945
1946    if verbose: print 'Store to NetCDF file %s' %netcdfname
1947    # NetCDF file definition
1948    fid = NetCDFFile(netcdfname, 'w')
1949
1950    #Create new file
1951    fid.institution = 'Geoscience Australia'
1952    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
1953                      'of spatial point data'
1954
1955    fid.ncols = ncols
1956    fid.nrows = nrows
1957    fid.xllcorner = xllcorner
1958    fid.yllcorner = yllcorner
1959    fid.cellsize = cellsize
1960    fid.NODATA_value = NODATA_value
1961
1962    fid.zone = zone
1963    fid.false_easting = false_easting
1964    fid.false_northing = false_northing
1965    fid.projection = projection
1966    fid.datum = datum
1967    fid.units = units
1968
1969
1970    # dimension definitions
1971    fid.createDimension('number_of_rows', nrows)
1972    fid.createDimension('number_of_columns', ncols)
1973
1974    # variable definitions
1975    fid.createVariable('elevation', Float, ('number_of_rows',
1976                                            'number_of_columns'))
1977
1978    # Get handles to the variables
1979    elevation = fid.variables['elevation']
1980
1981    #Store data
1982    n = len(lines[6:])
1983    for i, line in enumerate(lines[6:]):
1984        fields = line.split()
1985        if verbose and i%((n+10)/10)==0:
1986            print 'Processing row %d of %d' %(i, nrows)
1987
1988        elevation[i, :] = array([float(x) for x in fields])
1989
1990    fid.close()
1991
1992
1993
1994
1995
1996def ferret2sww(basename_in, basename_out = None,
1997               verbose = False,
1998               minlat = None, maxlat = None,
1999               minlon = None, maxlon = None,
2000               mint = None, maxt = None, mean_stage = 0,
2001               origin = None, zscale = 1,
2002               fail_on_NaN = True,
2003               NaN_filler = 0,
2004               elevation = None,
2005               inverted_bathymetry = False
2006               ): #FIXME: Bathymetry should be obtained
2007                                  #from MOST somehow.
2008                                  #Alternatively from elsewhere
2009                                  #or, as a last resort,
2010                                  #specified here.
2011                                  #The value of -100 will work
2012                                  #for the Wollongong tsunami
2013                                  #scenario but is very hacky
2014    """Convert 'Ferret' NetCDF format for wave propagation to
2015    sww format native to pyvolution.
2016
2017    Specify only basename_in and read files of the form
2018    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2019    relative height, x-velocity and y-velocity, respectively.
2020
2021    Also convert latitude and longitude to UTM. All coordinates are
2022    assumed to be given in the GDA94 datum.
2023
2024    min's and max's: If omitted - full extend is used.
2025    To include a value min may equal it, while max must exceed it.
2026    Lat and lon are assuemd to be in decimal degrees
2027
2028    origin is a 3-tuple with geo referenced
2029    UTM coordinates (zone, easting, northing)
2030
2031    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2032    which means that longitude is the fastest
2033    varying dimension (row major order, so to speak)
2034
2035    ferret2sww uses grid points as vertices in a triangular grid
2036    counting vertices from lower left corner upwards, then right
2037    """
2038
2039    import os
2040    from Scientific.IO.NetCDF import NetCDFFile
2041    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2042    from Numeric import allclose, around
2043
2044    precision = Float
2045
2046
2047    #Get NetCDF data
2048    if verbose: print 'Reading files %s_*.nc' %basename_in
2049    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2050    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2051    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2052    file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m)
2053
2054    if basename_out is None:
2055        swwname = basename_in + '.sww'
2056    else:
2057        swwname = basename_out + '.sww'
2058
2059    times = file_h.variables['TIME']
2060    latitudes = file_h.variables['LAT']
2061    longitudes = file_h.variables['LON']
2062
2063
2064
2065    #Precision used by most for lat/lon is 4 or 5 decimals
2066    e_lat = around(file_e.variables['LAT'][:], 5)
2067    e_lon = around(file_e.variables['LON'][:], 5)
2068
2069    #Check that files are compatible
2070    assert allclose(latitudes, file_u.variables['LAT'])
2071    assert allclose(latitudes, file_v.variables['LAT'])
2072    assert allclose(latitudes, e_lat)
2073
2074    assert allclose(longitudes, file_u.variables['LON'])
2075    assert allclose(longitudes, file_v.variables['LON'])
2076    assert allclose(longitudes, e_lon)
2077
2078
2079
2080    if mint == None:
2081        jmin = 0
2082    else:
2083        jmin = searchsorted(times, mint)
2084
2085    if maxt == None:
2086        jmax=len(times)
2087    else:
2088        jmax = searchsorted(times, maxt)
2089
2090    if minlat == None:
2091        kmin=0
2092    else:
2093        kmin = searchsorted(latitudes, minlat)
2094
2095    if maxlat == None:
2096        kmax = len(latitudes)
2097    else:
2098        kmax = searchsorted(latitudes, maxlat)
2099
2100    if minlon == None:
2101        lmin=0
2102    else:
2103        lmin = searchsorted(longitudes, minlon)
2104
2105    if maxlon == None:
2106        lmax = len(longitudes)
2107    else:
2108        lmax = searchsorted(longitudes, maxlon)
2109
2110
2111
2112    times = times[jmin:jmax]
2113    latitudes = latitudes[kmin:kmax]
2114    longitudes = longitudes[lmin:lmax]
2115
2116
2117    if verbose: print 'cropping'
2118    zname = 'ELEVATION'
2119
2120
2121    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2122    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2123    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2124    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2125
2126    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2127    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2128    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2129    #        from Numeric import asarray
2130    #        elevations=elevations.tolist()
2131    #        elevations.reverse()
2132    #        elevations=asarray(elevations)
2133    #    else:
2134    #        from Numeric import asarray
2135    #        elevations=elevations.tolist()
2136    #        elevations.reverse()
2137    #        elevations=asarray(elevations)
2138    #        'print hmmm'
2139
2140
2141
2142    #Get missing values
2143    nan_ha = file_h.variables['HA'].missing_value[0]
2144    nan_ua = file_u.variables['UA'].missing_value[0]
2145    nan_va = file_v.variables['VA'].missing_value[0]
2146    if hasattr(file_e.variables[zname],'missing_value'):
2147        nan_e  = file_e.variables[zname].missing_value[0]
2148    else:
2149        nan_e = None
2150
2151    #Cleanup
2152    from Numeric import sometrue
2153
2154    missing = (amplitudes == nan_ha)
2155    if sometrue (missing):
2156        if fail_on_NaN:
2157            msg = 'NetCDFFile %s contains missing values'\
2158                  %(basename_in+'_ha.nc')
2159            raise msg
2160        else:
2161            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2162
2163    missing = (uspeed == nan_ua)
2164    if sometrue (missing):
2165        if fail_on_NaN:
2166            msg = 'NetCDFFile %s contains missing values'\
2167                  %(basename_in+'_ua.nc')
2168            raise msg
2169        else:
2170            uspeed = uspeed*(missing==0) + missing*NaN_filler
2171
2172    missing = (vspeed == nan_va)
2173    if sometrue (missing):
2174        if fail_on_NaN:
2175            msg = 'NetCDFFile %s contains missing values'\
2176                  %(basename_in+'_va.nc')
2177            raise msg
2178        else:
2179            vspeed = vspeed*(missing==0) + missing*NaN_filler
2180
2181
2182    missing = (elevations == nan_e)
2183    if sometrue (missing):
2184        if fail_on_NaN:
2185            msg = 'NetCDFFile %s contains missing values'\
2186                  %(basename_in+'_e.nc')
2187            raise msg
2188        else:
2189            elevations = elevations*(missing==0) + missing*NaN_filler
2190
2191    #######
2192
2193
2194
2195    number_of_times = times.shape[0]
2196    number_of_latitudes = latitudes.shape[0]
2197    number_of_longitudes = longitudes.shape[0]
2198
2199    assert amplitudes.shape[0] == number_of_times
2200    assert amplitudes.shape[1] == number_of_latitudes
2201    assert amplitudes.shape[2] == number_of_longitudes
2202
2203    if verbose:
2204        print '------------------------------------------------'
2205        print 'Statistics:'
2206        print '  Extent (lat/lon):'
2207        print '    lat in [%f, %f], len(lat) == %d'\
2208              %(min(latitudes.flat), max(latitudes.flat),
2209                len(latitudes.flat))
2210        print '    lon in [%f, %f], len(lon) == %d'\
2211              %(min(longitudes.flat), max(longitudes.flat),
2212                len(longitudes.flat))
2213        print '    t in [%f, %f], len(t) == %d'\
2214              %(min(times.flat), max(times.flat), len(times.flat))
2215
2216        q = amplitudes.flat
2217        name = 'Amplitudes (ha) [cm]'
2218        print %s in [%f, %f]' %(name, min(q), max(q))
2219
2220        q = uspeed.flat
2221        name = 'Speeds (ua) [cm/s]'
2222        print %s in [%f, %f]' %(name, min(q), max(q))
2223
2224        q = vspeed.flat
2225        name = 'Speeds (va) [cm/s]'
2226        print %s in [%f, %f]' %(name, min(q), max(q))
2227
2228        q = elevations.flat
2229        name = 'Elevations (e) [m]'
2230        print %s in [%f, %f]' %(name, min(q), max(q))
2231
2232
2233    #print number_of_latitudes, number_of_longitudes
2234    number_of_points = number_of_latitudes*number_of_longitudes
2235    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2236
2237
2238    file_h.close()
2239    file_u.close()
2240    file_v.close()
2241    file_e.close()
2242
2243
2244    # NetCDF file definition
2245    outfile = NetCDFFile(swwname, 'w')
2246
2247    #Create new file
2248    outfile.institution = 'Geoscience Australia'
2249    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2250                          %(basename_in + '_ha.nc',
2251                            basename_in + '_ua.nc',
2252                            basename_in + '_va.nc',
2253                            basename_in + '_e.nc')
2254
2255
2256    #For sww compatibility
2257    outfile.smoothing = 'Yes'
2258    outfile.order = 1
2259
2260    #Start time in seconds since the epoch (midnight 1/1/1970)
2261    outfile.starttime = starttime = times[0]
2262    times = times - starttime  #Store relative times
2263
2264    # dimension definitions
2265    outfile.createDimension('number_of_volumes', number_of_volumes)
2266
2267    outfile.createDimension('number_of_vertices', 3)
2268    outfile.createDimension('number_of_points', number_of_points)
2269
2270
2271    #outfile.createDimension('number_of_timesteps', len(times))
2272    outfile.createDimension('number_of_timesteps', len(times))
2273
2274    # variable definitions
2275    outfile.createVariable('x', precision, ('number_of_points',))
2276    outfile.createVariable('y', precision, ('number_of_points',))
2277    outfile.createVariable('elevation', precision, ('number_of_points',))
2278
2279    #FIXME: Backwards compatibility
2280    outfile.createVariable('z', precision, ('number_of_points',))
2281    #################################
2282
2283    outfile.createVariable('volumes', Int, ('number_of_volumes',
2284                                            'number_of_vertices'))
2285
2286    outfile.createVariable('time', precision,
2287                           ('number_of_timesteps',))
2288
2289    outfile.createVariable('stage', precision,
2290                           ('number_of_timesteps',
2291                            'number_of_points'))
2292
2293    outfile.createVariable('xmomentum', precision,
2294                           ('number_of_timesteps',
2295                            'number_of_points'))
2296
2297    outfile.createVariable('ymomentum', precision,
2298                           ('number_of_timesteps',
2299                            'number_of_points'))
2300
2301
2302    #Store
2303    from coordinate_transforms.redfearn import redfearn
2304    x = zeros(number_of_points, Float)  #Easting
2305    y = zeros(number_of_points, Float)  #Northing
2306
2307
2308    if verbose: print 'Making triangular grid'
2309    #Check zone boundaries
2310    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2311
2312    vertices = {}
2313    i = 0
2314    for k, lat in enumerate(latitudes):       #Y direction
2315        for l, lon in enumerate(longitudes):  #X direction
2316
2317            vertices[l,k] = i
2318
2319            zone, easting, northing = redfearn(lat,lon)
2320
2321            msg = 'Zone boundary crossed at longitude =', lon
2322            #assert zone == refzone, msg
2323            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2324            x[i] = easting
2325            y[i] = northing
2326            i += 1
2327
2328
2329    #Construct 2 triangles per 'rectangular' element
2330    volumes = []
2331    for l in range(number_of_longitudes-1):    #X direction
2332        for k in range(number_of_latitudes-1): #Y direction
2333            v1 = vertices[l,k+1]
2334            v2 = vertices[l,k]
2335            v3 = vertices[l+1,k+1]
2336            v4 = vertices[l+1,k]
2337
2338            volumes.append([v1,v2,v3]) #Upper element
2339            volumes.append([v4,v3,v2]) #Lower element
2340
2341    volumes = array(volumes)
2342
2343    if origin == None:
2344        zone = refzone
2345        xllcorner = min(x)
2346        yllcorner = min(y)
2347    else:
2348        zone = origin[0]
2349        xllcorner = origin[1]
2350        yllcorner = origin[2]
2351
2352
2353    outfile.xllcorner = xllcorner
2354    outfile.yllcorner = yllcorner
2355    outfile.zone = zone
2356
2357
2358    if elevation is not None:
2359        z = elevation
2360    else:
2361        if inverted_bathymetry:
2362            z = -1*elevations
2363        else:
2364            z = elevations
2365        #FIXME: z should be obtained from MOST and passed in here
2366
2367    from Numeric import resize
2368    z = resize(z,outfile.variables['z'][:].shape)
2369    outfile.variables['x'][:] = x - xllcorner
2370    outfile.variables['y'][:] = y - yllcorner
2371    outfile.variables['z'][:] = z             #FIXME HACK
2372    outfile.variables['elevation'][:] = z
2373    outfile.variables['time'][:] = times   #Store time relative
2374    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2375
2376
2377
2378    #Time stepping
2379    stage = outfile.variables['stage']
2380    xmomentum = outfile.variables['xmomentum']
2381    ymomentum = outfile.variables['ymomentum']
2382
2383    if verbose: print 'Converting quantities'
2384    n = len(times)
2385    for j in range(n):
2386        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2387        i = 0
2388        for k in range(number_of_latitudes):      #Y direction
2389            for l in range(number_of_longitudes): #X direction
2390                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2391                stage[j,i] = w
2392                h = w - z[i]
2393                xmomentum[j,i] = uspeed[j,k,l]/100*h
2394                ymomentum[j,i] = vspeed[j,k,l]/100*h
2395                i += 1
2396
2397    #outfile.close()
2398
2399    #FIXME: Refactor using code from file_function.statistics
2400    #Something like print swwstats(swwname)
2401    if verbose:
2402        x = outfile.variables['x'][:]
2403        y = outfile.variables['y'][:]
2404        times = outfile.variables['time'][:]
2405        print '------------------------------------------------'
2406        print 'Statistics of output file:'
2407        print '  Name: %s' %swwname
2408        print '  Reference:'
2409        print '    Lower left corner: [%f, %f]'\
2410              %(xllcorner, yllcorner)
2411        print '    Start time: %f' %starttime
2412        print '  Extent:'
2413        print '    x [m] in [%f, %f], len(x) == %d'\
2414              %(min(x.flat), max(x.flat), len(x.flat))
2415        print '    y [m] in [%f, %f], len(y) == %d'\
2416              %(min(y.flat), max(y.flat), len(y.flat))
2417        print '    t [s] in [%f, %f], len(t) == %d'\
2418              %(min(times), max(times), len(times))
2419        print '  Quantities [SI units]:'
2420        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2421            q = outfile.variables[name][:].flat
2422            print '    %s in [%f, %f]' %(name, min(q), max(q))
2423
2424
2425
2426    outfile.close()
2427
2428
2429
2430
2431
2432def timefile2netcdf(filename, quantity_names = None):
2433    """Template for converting typical text files with time series to
2434    NetCDF tms file.
2435
2436
2437    The file format is assumed to be either two fields separated by a comma:
2438
2439        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
2440
2441    E.g
2442
2443      31/08/04 00:00:00, 1.328223 0 0
2444      31/08/04 00:15:00, 1.292912 0 0
2445
2446    will provide a time dependent function f(t) with three attributes
2447
2448    filename is assumed to be the rootname with extenisons .txt and .sww
2449    """
2450
2451    import time, calendar
2452    from Numeric import array
2453    from config import time_format
2454    from util import ensure_numeric
2455
2456
2457    fid = open(filename + '.txt')
2458    line = fid.readline()
2459    fid.close()
2460
2461    fields = line.split(',')
2462    msg = 'File %s must have the format date, value0 value1 value2 ...'
2463    assert len(fields) == 2, msg
2464
2465    try:
2466        starttime = calendar.timegm(time.strptime(fields[0], time_format))
2467    except ValueError:
2468        msg = 'First field in file %s must be' %filename
2469        msg += ' date-time with format %s.\n' %time_format
2470        msg += 'I got %s instead.' %fields[0]
2471        raise msg
2472
2473
2474    #Split values
2475    values = []
2476    for value in fields[1].split():
2477        values.append(float(value))
2478
2479    q = ensure_numeric(values)
2480
2481    msg = 'ERROR: File must contain at least one independent value'
2482    assert len(q.shape) == 1, msg
2483
2484
2485
2486    #Read times proper
2487    from Numeric import zeros, Float, alltrue
2488    from config import time_format
2489    import time, calendar
2490
2491    fid = open(filename + '.txt')
2492    lines = fid.readlines()
2493    fid.close()
2494
2495    N = len(lines)
2496    d = len(q)
2497
2498    T = zeros(N, Float)       #Time
2499    Q = zeros((N, d), Float)  #Values
2500
2501    for i, line in enumerate(lines):
2502        fields = line.split(',')
2503        realtime = calendar.timegm(time.strptime(fields[0], time_format))
2504
2505        T[i] = realtime - starttime
2506
2507        for j, value in enumerate(fields[1].split()):
2508            Q[i, j] = float(value)
2509
2510    msg = 'File %s must list time as a monotonuosly ' %filename
2511    msg += 'increasing sequence'
2512    assert alltrue( T[1:] - T[:-1] > 0 ), msg
2513
2514
2515    #Create NetCDF file
2516    from Scientific.IO.NetCDF import NetCDFFile
2517
2518    fid = NetCDFFile(filename + '.tms', 'w')
2519
2520
2521    fid.institution = 'Geoscience Australia'
2522    fid.description = 'Time series'
2523
2524
2525    #Reference point
2526    #Start time in seconds since the epoch (midnight 1/1/1970)
2527    #FIXME: Use Georef
2528    fid.starttime = starttime
2529
2530
2531    # dimension definitions
2532    #fid.createDimension('number_of_volumes', self.number_of_volumes)
2533    #fid.createDimension('number_of_vertices', 3)
2534
2535
2536    fid.createDimension('number_of_timesteps', len(T))
2537
2538    fid.createVariable('time', Float, ('number_of_timesteps',))
2539
2540    fid.variables['time'][:] = T
2541
2542    for i in range(Q.shape[1]):
2543        try:
2544            name = quantity_names[i]
2545        except:
2546            name = 'Attribute%d'%i
2547
2548        fid.createVariable(name, Float, ('number_of_timesteps',))
2549        fid.variables[name][:] = Q[:,i]
2550
2551    fid.close()
2552
2553
2554def extent_sww(file_name):
2555    """
2556    Read in an sww file.
2557
2558    Input;
2559    file_name - the sww file
2560
2561    Output;
2562    z - Vector of bed elevation
2563    volumes - Array.  Each row has 3 values, representing
2564    the vertices that define the volume
2565    time - Vector of the times where there is stage information
2566    stage - array with respect to time and vertices (x,y)
2567    """
2568
2569
2570    from Scientific.IO.NetCDF import NetCDFFile
2571
2572    #Check contents
2573    #Get NetCDF
2574    fid = NetCDFFile(file_name, 'r')
2575
2576    # Get the variables
2577    x = fid.variables['x'][:]
2578    y = fid.variables['y'][:]
2579    stage = fid.variables['stage'][:]
2580    #print "stage",stage
2581    #print "stage.shap",stage.shape
2582    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
2583    #print "min(stage)",min(stage)
2584
2585    fid.close()
2586
2587    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
2588
2589
2590def sww2domain(filename,boundary=None,t=None,\
2591               fail_if_NaN=True,NaN_filler=0\
2592               ,verbose = False,very_verbose = False):
2593    """
2594    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
2595
[2112]2596    Boundary is not recommended if domain.smooth is not selected, as it
[2105]2597    uses unique coordinates, but not unique boundaries. This means that
[2112]2598    the boundary file will not be compatable with the coordinates, and will
[2105]2599    give a different final boundary, or crash.
2600    """
2601    NaN=9.969209968386869e+036
2602    #initialise NaN.
2603
2604    from Scientific.IO.NetCDF import NetCDFFile
2605    from shallow_water import Domain
2606    from Numeric import asarray, transpose, resize
2607
2608    if verbose: print 'Reading from ', filename
2609    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2610    time = fid.variables['time']       #Timesteps
2611    if t is None:
2612        t = time[-1]
2613    time_interp = get_time_interp(time,t)
2614
2615    # Get the variables as Numeric arrays
2616    x = fid.variables['x'][:]             #x-coordinates of vertices
2617    y = fid.variables['y'][:]             #y-coordinates of vertices
2618    elevation = fid.variables['elevation']     #Elevation
2619    stage = fid.variables['stage']     #Water level
2620    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
2621    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
2622
2623    starttime = fid.starttime[0]
2624    volumes = fid.variables['volumes'][:] #Connectivity
2625    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
2626
2627    conserved_quantities = []
2628    interpolated_quantities = {}
2629    other_quantities = []
2630
2631    # get geo_reference
2632    #sww files don't have to have a geo_ref
2633    try:
2634        geo_reference = Geo_reference(NetCDFObject=fid)
2635    except: #AttributeError, e:
2636        geo_reference = None
2637
2638    if verbose: print '    getting quantities'
2639    for quantity in fid.variables.keys():
2640        dimensions = fid.variables[quantity].dimensions
2641        if 'number_of_timesteps' in dimensions:
2642            conserved_quantities.append(quantity)
2643            interpolated_quantities[quantity]=\
2644                  interpolated_quantity(fid.variables[quantity][:],time_interp)
2645        else: other_quantities.append(quantity)
2646
2647    other_quantities.remove('x')
2648    other_quantities.remove('y')
2649    other_quantities.remove('z')
2650    other_quantities.remove('volumes')
2651
2652    conserved_quantities.remove('time')
2653
2654    if verbose: print '    building domain'
2655    #    From domain.Domain:
2656    #    domain = Domain(coordinates, volumes,\
2657    #                    conserved_quantities = conserved_quantities,\
2658    #                    other_quantities = other_quantities,zone=zone,\
2659    #                    xllcorner=xllcorner, yllcorner=yllcorner)
2660
2661    #   From shallow_water.Domain:
2662    coordinates=coordinates.tolist()
2663    volumes=volumes.tolist()
2664    #FIXME:should this be in mesh?(peter row)
2665    if fid.smoothing == 'Yes': unique = False
2666    else: unique = True
2667    if unique:
2668        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
2669
2670
[2305]2671    try:   
2672        domain = Domain(coordinates, volumes, boundary)
2673    except AssertionError, e:
2674        fid.close()
2675        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
2676        raise msg
[2105]2677
2678    if not boundary is None:
2679        domain.boundary = boundary
2680
2681    domain.geo_reference = geo_reference
2682
2683    domain.starttime=float(starttime)+float(t)
2684    domain.time=0.0
2685
2686    for quantity in other_quantities:
2687        try:
2688            NaN = fid.variables[quantity].missing_value
2689        except:
2690            pass #quantity has no missing_value number
2691        X = fid.variables[quantity][:]
2692        if very_verbose:
2693            print '       ',quantity
2694            print '        NaN =',NaN
2695            print '        max(X)'
2696            print '       ',max(X)
2697            print '        max(X)==NaN'
2698            print '       ',max(X)==NaN
2699            print ''
2700        if (max(X)==NaN) or (min(X)==NaN):
2701            if fail_if_NaN:
2702                msg = 'quantity "%s" contains no_data entry'%quantity
2703                raise msg
2704            else:
2705                data = (X<>NaN)
2706                X = (X*data)+(data==0)*NaN_filler
2707        if unique:
2708            X = resize(X,(len(X)/3,3))
2709        domain.set_quantity(quantity,X)
2710    #
2711    for quantity in conserved_quantities:
2712        try:
2713            NaN = fid.variables[quantity].missing_value
2714        except:
2715            pass #quantity has no missing_value number
2716        X = interpolated_quantities[quantity]
2717        if very_verbose:
2718            print '       ',quantity
2719            print '        NaN =',NaN
2720            print '        max(X)'
2721            print '       ',max(X)
2722            print '        max(X)==NaN'
2723            print '       ',max(X)==NaN
2724            print ''
2725        if (max(X)==NaN) or (min(X)==NaN):
2726            if fail_if_NaN:
2727                msg = 'quantity "%s" contains no_data entry'%quantity
2728                raise msg
2729            else:
2730                data = (X<>NaN)
2731                X = (X*data)+(data==0)*NaN_filler
2732        if unique:
2733            X = resize(X,(X.shape[0]/3,3))
2734        domain.set_quantity(quantity,X)
[2305]2735       
[2105]2736    fid.close()
2737    return domain
2738
2739def interpolated_quantity(saved_quantity,time_interp):
2740
2741    #given an index and ratio, interpolate quantity with respect to time.
2742    index,ratio = time_interp
2743    Q = saved_quantity
2744    if ratio > 0:
2745        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
2746    else:
2747        q = Q[index]
2748    #Return vector of interpolated values
2749    return q
2750
2751def get_time_interp(time,t=None):
2752    #Finds the ratio and index for time interpolation.
2753    #It is borrowed from previous pyvolution code.
2754    if t is None:
2755        t=time[-1]
2756        index = -1
2757        ratio = 0.
2758    else:
2759        T = time
2760        tau = t
2761        index=0
2762        msg = 'Time interval derived from file %s [%s:%s]'\
2763            %('FIXMEfilename', T[0], T[-1])
2764        msg += ' does not match model time: %s' %tau
2765        if tau < time[0]: raise msg
2766        if tau > time[-1]: raise msg
2767        while tau > time[index]: index += 1
2768        while tau < time[index]: index -= 1
2769        if tau == time[index]:
2770            #Protect against case where tau == time[-1] (last time)
2771            # - also works in general when tau == time[i]
2772            ratio = 0
2773        else:
2774            #t is now between index and index+1
2775            ratio = (tau - time[index])/(time[index+1] - time[index])
2776    return (index,ratio)
2777
2778
2779def weed(coordinates,volumes,boundary = None):
2780    if type(coordinates)=='array':
2781        coordinates = coordinates.tolist()
2782    if type(volumes)=='array':
2783        volumes = volumes.tolist()
2784
2785    unique = False
2786    point_dict = {}
2787    same_point = {}
2788    for i in range(len(coordinates)):
2789        point = tuple(coordinates[i])
2790        if point_dict.has_key(point):
2791            unique = True
2792            same_point[i]=point
2793            #to change all point i references to point j
2794        else:
2795            point_dict[point]=i
2796            same_point[i]=point
2797
2798    coordinates = []
2799    i = 0
2800    for point in point_dict.keys():
2801        point = tuple(point)
2802        coordinates.append(list(point))
2803        point_dict[point]=i
2804        i+=1
2805
2806
2807    for volume in volumes:
2808        for i in range(len(volume)):
2809            index = volume[i]
2810            if index>-1:
2811                volume[i]=point_dict[same_point[index]]
2812
2813    new_boundary = {}
2814    if not boundary is None:
2815        for segment in boundary.keys():
2816            point0 = point_dict[same_point[segment[0]]]
2817            point1 = point_dict[same_point[segment[1]]]
2818            label = boundary[segment]
2819            #FIXME should the bounday attributes be concaterated
2820            #('exterior, pond') or replaced ('pond')(peter row)
2821
2822            if new_boundary.has_key((point0,point1)):
2823                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
2824                                              #+','+label
2825
2826            elif new_boundary.has_key((point1,point0)):
2827                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
2828                                              #+','+label
2829            else: new_boundary[(point0,point1)]=label
2830
2831        boundary = new_boundary
2832
2833    return coordinates,volumes,boundary
2834
2835
2836
2837
2838
2839
2840
2841
2842
2843
2844###############################################
2845#OBSOLETE STUFF
2846#Native checkpoint format.
2847#Information needed to recreate a state is preserved
2848#FIXME: Rethink and maybe use netcdf format
2849def cpt_variable_writer(filename, t, v0, v1, v2):
2850    """Store all conserved quantities to file
2851    """
2852
2853    M, N = v0.shape
2854
2855    FN = create_filename(filename, 'cpt', M, t)
2856    #print 'Writing to %s' %FN
2857
2858    fid = open(FN, 'w')
2859    for i in range(M):
2860        for j in range(N):
2861            fid.write('%.16e ' %v0[i,j])
2862        for j in range(N):
2863            fid.write('%.16e ' %v1[i,j])
2864        for j in range(N):
2865            fid.write('%.16e ' %v2[i,j])
2866
2867        fid.write('\n')
2868    fid.close()
2869
2870
2871def cpt_variable_reader(filename, t, v0, v1, v2):
2872    """Store all conserved quantities to file
2873    """
2874
2875    M, N = v0.shape
2876
2877    FN = create_filename(filename, 'cpt', M, t)
2878    #print 'Reading from %s' %FN
2879
2880    fid = open(FN)
2881
2882
2883    for i in range(M):
2884        values = fid.readline().split() #Get one line
2885
2886        for j in range(N):
2887            v0[i,j] = float(values[j])
2888            v1[i,j] = float(values[3+j])
2889            v2[i,j] = float(values[6+j])
2890
2891    fid.close()
2892
2893def cpt_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2894    """Writes x,y,z,z,z coordinates of triangles constituting the bed
2895    elevation.
2896    FIXME: Not in use pt
2897    """
2898
2899    M, N = v0.shape
2900
2901
2902    print X0
2903    import sys; sys.exit()
2904    FN = create_filename(filename, 'cpt', M)
2905    print 'Writing to %s' %FN
2906
2907    fid = open(FN, 'w')
2908    for i in range(M):
2909        for j in range(2):
2910            fid.write('%.16e ' %X0[i,j])   #x, y
2911        for j in range(N):
2912            fid.write('%.16e ' %v0[i,j])       #z,z,z,
2913
2914        for j in range(2):
2915            fid.write('%.16e ' %X1[i,j])   #x, y
2916        for j in range(N):
2917            fid.write('%.16e ' %v1[i,j])
2918
2919        for j in range(2):
2920            fid.write('%.16e ' %X2[i,j])   #x, y
2921        for j in range(N):
2922            fid.write('%.16e ' %v2[i,j])
2923
2924        fid.write('\n')
2925    fid.close()
2926
2927
2928
2929#Function for storing out to e.g. visualisation
2930#FIXME: Do we want this?
2931#FIXME: Not done yet for this version
2932def dat_constant_writer(filename, X0, X1, X2, v0, v1, v2):
2933    """Writes x,y,z coordinates of triangles constituting the bed elevation.
2934    """
2935
2936    M, N = v0.shape
2937
2938    FN = create_filename(filename, 'dat', M)
2939    #print 'Writing to %s' %FN
2940
2941    fid = open(FN, 'w')
2942    for i in range(M):
2943        for j in range(2):
2944            fid.write('%f ' %X0[i,j])   #x, y
2945        fid.write('%f ' %v0[i,0])       #z
2946
2947        for j in range(2):
2948            fid.write('%f ' %X1[i,j])   #x, y
2949        fid.write('%f ' %v1[i,0])       #z
2950
2951        for j in range(2):
2952            fid.write('%f ' %X2[i,j])   #x, y
2953        fid.write('%f ' %v2[i,0])       #z
2954
2955        fid.write('\n')
2956    fid.close()
2957
2958
2959
2960def dat_variable_writer(filename, t, v0, v1, v2):
2961    """Store water height to file
2962    """
2963
2964    M, N = v0.shape
2965
2966    FN = create_filename(filename, 'dat', M, t)
2967    #print 'Writing to %s' %FN
2968
2969    fid = open(FN, 'w')
2970    for i in range(M):
2971        fid.write('%.4f ' %v0[i,0])
2972        fid.write('%.4f ' %v1[i,0])
2973        fid.write('%.4f ' %v2[i,0])
2974
2975        fid.write('\n')
2976    fid.close()
2977
2978
2979def read_sww(filename):
2980    """Read sww Net CDF file containing Shallow Water Wave simulation
2981
2982    The integer array volumes is of shape Nx3 where N is the number of
2983    triangles in the mesh.
2984
2985    Each entry in volumes is an index into the x,y arrays (the location).
2986
2987    Quantities stage, elevation, xmomentum and ymomentum are all in arrays of dimensions
2988    number_of_timesteps, number_of_points.
2989
2990    The momentum is not always stored.
2991
2992    """
2993    from Scientific.IO.NetCDF import NetCDFFile
2994    print 'Reading from ', filename
2995    fid = NetCDFFile(filename, 'r')    #Open existing file for read
2996#latitude, longitude
2997    # Get the variables as Numeric arrays
2998    x = fid.variables['x']             #x-coordinates of vertices
2999    y = fid.variables['y']             #y-coordinates of vertices
3000    z = fid.variables['elevation']     #Elevation
3001    time = fid.variables['time']       #Timesteps
3002    stage = fid.variables['stage']     #Water level
3003    #xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3004    #ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3005
3006    volumes = fid.variables['volumes'] #Connectivity
3007
[2305]3008    #FIXME (Ole): What is this?
3009    #             Why isn't anything returned?
3010    #             Where's the unit test?
[2105]3011
3012def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3013                 verbose=False):
3014    """Read Digitial Elevation model from the following NetCDF format (.dem)
3015
3016    Example:
3017
3018    ncols         3121
3019    nrows         1800
3020    xllcorner     722000
3021    yllcorner     5893000
3022    cellsize      25
3023    NODATA_value  -9999
3024    138.3698 137.4194 136.5062 135.5558 ..........
3025
3026    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3027    """
3028
3029    import os
3030    from Scientific.IO.NetCDF import NetCDFFile
3031    from Numeric import Float, zeros, sum, reshape, equal
3032
3033    root = basename_in
3034    inname = root + '.dem'
3035
3036    #Open existing netcdf file to read
3037    infile = NetCDFFile(inname, 'r')
3038    if verbose: print 'Reading DEM from %s' %inname
3039
3040    #Read metadata
3041    ncols = infile.ncols[0]
3042    nrows = infile.nrows[0]
3043    xllcorner = infile.xllcorner[0]
3044    yllcorner = infile.yllcorner[0]
3045    cellsize = infile.cellsize[0]
3046    NODATA_value = infile.NODATA_value[0]
3047    zone = infile.zone[0]
3048    false_easting = infile.false_easting[0]
3049    false_northing = infile.false_northing[0]
3050    projection = infile.projection
3051    datum = infile.datum
3052    units = infile.units
3053
3054    dem_elevation = infile.variables['elevation']
3055
3056    #Get output file name
3057    if basename_out == None:
3058        outname = root + '_' + repr(cellsize_new) + '.dem'
3059    else:
3060        outname = basename_out + '.dem'
3061
3062    if verbose: print 'Write decimated NetCDF file to %s' %outname
3063
3064    #Determine some dimensions for decimated grid
3065    (nrows_stencil, ncols_stencil) = stencil.shape
3066    x_offset = ncols_stencil / 2
3067    y_offset = nrows_stencil / 2
3068    cellsize_ratio = int(cellsize_new / cellsize)
3069    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3070    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3071
3072    #Open netcdf file for output
3073    outfile = NetCDFFile(outname, 'w')
3074
3075    #Create new file
3076    outfile.institution = 'Geoscience Australia'
3077    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3078                           'of spatial point data'
3079    #Georeferencing
3080    outfile.zone = zone
3081    outfile.projection = projection
3082    outfile.datum = datum
3083    outfile.units = units
3084
3085    outfile.cellsize = cellsize_new
3086    outfile.NODATA_value = NODATA_value
3087    outfile.false_easting = false_easting
3088    outfile.false_northing = false_northing
3089
3090    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3091    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3092    outfile.ncols = ncols_new
3093    outfile.nrows = nrows_new
3094
3095    # dimension definition
3096    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3097
3098    # variable definition
3099    outfile.createVariable('elevation', Float, ('number_of_points',))
3100
3101    # Get handle to the variable
3102    elevation = outfile.variables['elevation']
3103
3104    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3105
3106    #Store data
3107    global_index = 0
3108    for i in range(nrows_new):
3109        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3110        lower_index = global_index
3111        telev =  zeros(ncols_new, Float)
3112        local_index = 0
3113        trow = i * cellsize_ratio
3114
3115        for j in range(ncols_new):
3116            tcol = j * cellsize_ratio
3117            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3118
3119            #if dem contains 1 or more NODATA_values set value in
3120            #decimated dem to NODATA_value, else compute decimated
3121            #value using stencil
3122            if sum(sum(equal(tmp, NODATA_value))) > 0:
3123                telev[local_index] = NODATA_value
3124            else:
3125                telev[local_index] = sum(sum(tmp * stencil))
3126
3127            global_index += 1
3128            local_index += 1
3129
3130        upper_index = global_index
3131
3132        elevation[lower_index:upper_index] = telev
3133
3134    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3135
3136    infile.close()
3137    outfile.close()
3138
3139
3140
3141def sww2asc_obsolete(basename_in, basename_out = None,
3142            quantity = None,
3143            timestep = None,
3144            reduction = None,
3145            cellsize = 10,
3146            verbose = False,
3147            origin = None):
3148    """Read SWW file and convert to Digitial Elevation model format (.asc)
3149
3150    Example:
3151
3152    ncols         3121
3153    nrows         1800
3154    xllcorner     722000
3155    yllcorner     5893000
3156    cellsize      25
3157    NODATA_value  -9999
3158    138.3698 137.4194 136.5062 135.5558 ..........
3159
3160    Also write accompanying file with same basename_in but extension .prj
3161    used to fix the UTM zone, datum, false northings and eastings.
3162
3163    The prj format is assumed to be as
3164
3165    Projection    UTM
3166    Zone          56
3167    Datum         WGS84
3168    Zunits        NO
3169    Units         METERS
3170    Spheroid      WGS84
3171    Xshift        0.0000000000
3172    Yshift        10000000.0000000000
3173    Parameters
3174
3175
3176    if quantity is given, out values from quantity otherwise default to
3177    elevation
3178
3179    if timestep (an index) is given, output quantity at that timestep
3180
3181    if reduction is given use that to reduce quantity over all timesteps.
3182
3183    """
3184    from Numeric import array, Float, concatenate, NewAxis, zeros,\
3185         sometrue
3186    from utilities.polygon import inside_polygon
3187
3188    #FIXME: Should be variable
3189    datum = 'WGS84'
3190    false_easting = 500000
3191    false_northing = 10000000
3192
3193    if quantity is None:
3194        quantity = 'elevation'
3195
3196    if reduction is None:
3197        reduction = max
3198
3199    if basename_out is None:
3200        basename_out = basename_in + '_%s' %quantity
3201
3202    swwfile = basename_in + '.sww'
3203    ascfile = basename_out + '.asc'
3204    prjfile = basename_out + '.prj'
3205
3206
3207    if verbose: print 'Reading from %s' %swwfile
3208    #Read sww file
3209    from Scientific.IO.NetCDF import NetCDFFile
3210    fid = NetCDFFile(swwfile)
3211
3212    #Get extent and reference
3213    x = fid.variables['x'][:]
3214    y = fid.variables['y'][:]
3215    volumes = fid.variables['volumes'][:]
3216
3217    ymin = min(y); ymax = max(y)
3218    xmin = min(x); xmax = max(x)
3219
3220    number_of_timesteps = fid.dimensions['number_of_timesteps']
3221    number_of_points = fid.dimensions['number_of_points']
3222    if origin is None:
3223
3224        #Get geo_reference
3225        #sww files don't have to have a geo_ref
3226        try:
3227            geo_reference = Geo_reference(NetCDFObject=fid)
3228        except AttributeError, e:
3229            geo_reference = Geo_reference() #Default georef object
3230
3231        xllcorner = geo_reference.get_xllcorner()
3232        yllcorner = geo_reference.get_yllcorner()
3233        zone = geo_reference.get_zone()
3234    else:
3235        zone = origin[0]
3236        xllcorner = origin[1]
3237        yllcorner = origin[2]
3238
3239
3240    #Get quantity and reduce if applicable
3241    if verbose: print 'Reading quantity %s' %quantity
3242
3243    if quantity.lower() == 'depth':
3244        q = fid.variables['stage'][:] - fid.variables['elevation'][:]
3245    else:
3246        q = fid.variables[quantity][:]
3247
3248
3249    if len(q.shape) == 2:
3250        if verbose: print 'Reducing quantity %s' %quantity
3251        q_reduced = zeros( number_of_points, Float )
3252
3253        for k in range(number_of_points):
3254            q_reduced[k] = reduction( q[:,k] )
3255
3256        q = q_reduced
3257
3258    #Now q has dimension: number_of_points
3259
3260    #Create grid and update xll/yll corner and x,y
3261    if verbose: print 'Creating grid'
3262    ncols = int((xmax-xmin)/cellsize)+1
3263    nrows = int((ymax-ymin)/cellsize)+1
3264
3265    newxllcorner = xmin+xllcorner
3266    newyllcorner = ymin+yllcorner
3267
3268    x = x+xllcorner-newxllcorner
3269    y = y+yllcorner-newyllcorner
3270
3271    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
3272    assert len(vertex_points.shape) == 2
3273
3274
3275    from Numeric import zeros, Float
3276    grid_points = zeros ( (ncols*nrows, 2), Float )
3277
3278
3279    for i in xrange(nrows):
3280        yg = i*cellsize
3281        for j in xrange(ncols):
3282            xg = j*cellsize
3283            k = i*ncols + j
3284
3285            grid_points[k,0] = xg
3286            grid_points[k,1] = yg
3287
3288    #Interpolate
3289    from least_squares import Interpolation
3290
3291
3292    #FIXME: This should be done with precrop = True, otherwise it'll
3293    #take forever. With expand_search  set to False, some grid points might
3294    #miss out....
3295
3296    interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0,
3297                           precrop = False, expand_search = True,
3298                           verbose = verbose)
3299
3300    #Interpolate using quantity values
3301    if verbose: print 'Interpolating'
3302    grid_values = interp.interpolate(q).flat
3303
3304    #Write
3305    #Write prj file
3306    if verbose: print 'Writing %s' %prjfile
3307    prjid = open(prjfile, 'w')
3308    prjid.write('Projection    %s\n' %'UTM')
3309    prjid.write('Zone          %d\n' %zone)
3310    prjid.write('Datum         %s\n' %datum)
3311    prjid.write('Zunits        NO\n')
3312    prjid.write('Units         METERS\n')
3313    prjid.write('Spheroid      %s\n' %datum)
3314    prjid.write('Xshift        %d\n' %false_easting)
3315    prjid.write('Yshift        %d\n' %false_northing)
3316    prjid.write('Parameters\n')
3317    prjid.close()
3318
3319
3320
3321    if verbose: print 'Writing %s' %ascfile
3322    NODATA_value = -9999
3323
3324    ascid = open(ascfile, 'w')
3325
3326    ascid.write('ncols         %d\n' %ncols)
3327    ascid.write('nrows         %d\n' %nrows)
3328    ascid.write('xllcorner     %d\n' %newxllcorner)
3329    ascid.write('yllcorner     %d\n' %newyllcorner)
3330    ascid.write('cellsize      %f\n' %cellsize)
3331    ascid.write('NODATA_value  %d\n' %NODATA_value)
3332
3333
3334    #Get bounding polygon from mesh
3335    P = interp.mesh.get_boundary_polygon()
3336    inside_indices = inside_polygon(grid_points, P)
3337
3338    for i in range(nrows):
3339        if verbose and i%((nrows+10)/10)==0:
3340            print 'Doing row %d of %d' %(i, nrows)
3341
3342        for j in range(ncols):
3343            index = (nrows-i-1)*ncols+j
3344
3345            if sometrue(inside_indices == index):
3346                ascid.write('%f ' %grid_values[index])
3347            else:
3348                ascid.write('%d ' %NODATA_value)
3349
3350        ascid.write('\n')
3351
3352    #Close
3353    ascid.close()
3354    fid.close()
3355
3356#********************
3357#*** END OF OBSOLETE FUNCTIONS
3358#***************
3359
3360
3361def tsh2sww(filename, verbose=False): #test_tsh2sww
3362    """
3363    to check if a tsh/msh file 'looks' good.
3364    """
3365
3366    #FIXME Move to data_manager
3367    from shallow_water import Domain
3368    from pmesh2domain import pmesh_to_domain_instance
3369    import time, os
3370    from data_manager import get_dataobject
3371    from os import sep, path
3372    from util import mean
3373
3374    if verbose == True:print 'Creating domain from', filename
3375    domain = pmesh_to_domain_instance(filename, Domain)
3376    if verbose == True:print "Number of triangles = ", len(domain)
3377
3378    domain.smooth = True
3379    domain.format = 'sww'   #Native netcdf visualisation format
3380    file_path, filename = path.split(filename)
3381    filename, ext = path.splitext(filename)
3382    domain.filename = filename
3383    domain.reduction = mean
3384    if verbose == True:print "file_path",file_path
3385    if file_path == "":file_path = "."
3386    domain.set_datadir(file_path)
3387
3388    if verbose == True:
3389        print "Output written to " + domain.get_datadir() + sep + \
3390              domain.filename + "." + domain.format
3391    sww = get_dataobject(domain)
3392    sww.store_connectivity()
3393    sww.store_timestep('stage')
3394
3395
3396def asc_csiro2sww(bath_dir,
3397                  elevation_dir,
3398                  ucur_dir,
3399                  vcur_dir,
3400                  sww_file,
3401                  minlat = None, maxlat = None,
3402                  minlon = None, maxlon = None,
3403                  zscale=1,
3404                  mean_stage = 0,
3405                  fail_on_NaN = True,
3406                  elevation_NaN_filler = 0,
3407                  bath_prefix='ba',
3408                  elevation_prefix='el',
3409                  verbose=False):
3410    """
3411    Produce an sww boundary file, from esri ascii data from CSIRO.
3412
3413    Also convert latitude and longitude to UTM. All coordinates are
3414    assumed to be given in the GDA94 datum.
3415
3416    assume:
3417    All files are in esri ascii format
3418
3419    4 types of information
3420    bathymetry
3421    elevation
3422    u velocity
3423    v velocity
3424
3425    Assumptions
3426    The metadata of all the files is the same
3427    Each type is in a seperate directory
3428    One bath file with extention .000
3429    The time period is less than 24hrs and uniform.
3430    """
3431    from Scientific.IO.NetCDF import NetCDFFile
3432
3433    from coordinate_transforms.redfearn import redfearn
3434
3435    precision = Float # So if we want to change the precision its done here
3436
3437    # go in to the bath dir and load the only file,
3438    bath_files = os.listdir(bath_dir)
3439    #print "bath_files",bath_files
3440
3441    #fixme: make more general?
3442    bath_file = bath_files[0]
3443    bath_dir_file =  bath_dir + os.sep + bath_file
3444    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3445    #print "bath_metadata",bath_metadata
3446    #print "bath_grid",bath_grid
3447
3448    #Use the date.time of the bath file as a basis for
3449    #the start time for other files
3450    base_start = bath_file[-12:]
3451
3452    #go into the elevation dir and load the 000 file
3453    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3454                         + base_start
3455    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3456    #print "elevation_dir_file",elevation_dir_file
3457    #print "elevation_grid", elevation_grid
3458
3459    elevation_files = os.listdir(elevation_dir)
3460    ucur_files = os.listdir(ucur_dir)
3461    vcur_files = os.listdir(vcur_dir)
3462
3463    # the first elevation file should be the
3464    # file with the same base name as the bath data
3465    #print "elevation_files[0]",elevation_files[0]
3466    #print "'el' + base_start",'el' + base_start
3467    assert elevation_files[0] == 'el' + base_start
3468
3469    #assert bath_metadata == elevation_metadata
3470
3471
3472
3473    number_of_latitudes = bath_grid.shape[0]
3474    number_of_longitudes = bath_grid.shape[1]
3475    #number_of_times = len(os.listdir(elevation_dir))
3476    #number_of_points = number_of_latitudes*number_of_longitudes
3477    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3478
3479    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3480                  for x in range(number_of_longitudes)]
3481    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3482                 for y in range(number_of_latitudes)]
3483
3484     # reverse order of lat, so the fist lat represents the first grid row
3485    latitudes.reverse()
3486
3487    #print "latitudes - before _get_min_max_indexes",latitudes
3488    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3489                                                 minlat=minlat, maxlat=maxlat,
3490                                                 minlon=minlon, maxlon=maxlon)
3491
3492
3493    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3494    #print "bath_grid",bath_grid
3495    latitudes = latitudes[kmin:kmax]
3496    longitudes = longitudes[lmin:lmax]
3497    number_of_latitudes = len(latitudes)
3498    number_of_longitudes = len(longitudes)
3499    number_of_times = len(os.listdir(elevation_dir))
3500    number_of_points = number_of_latitudes*number_of_longitudes
3501    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3502    #print "number_of_points",number_of_points
3503
3504    #Work out the times
3505    if len(elevation_files) > 1:
3506        # Assume: The time period is less than 24hrs.
3507        time_period = (int(elevation_files[1][-3:]) - \
3508                      int(elevation_files[0][-3:]))*60*60
3509        times = [x*time_period for x in range(len(elevation_files))]
3510    else:
3511        times = [0.0]
3512    #print "times", times
3513    #print "number_of_latitudes", number_of_latitudes
3514    #print "number_of_longitudes", number_of_longitudes
3515    #print "number_of_times", number_of_times
3516    #print "latitudes", latitudes
3517    #print "longitudes", longitudes
3518
3519
3520    if verbose:
3521        print '------------------------------------------------'
3522        print 'Statistics:'
3523        print '  Extent (lat/lon):'
3524        print '    lat in [%f, %f], len(lat) == %d'\
3525              %(min(latitudes), max(latitudes),
3526                len(latitudes))
3527        print '    lon in [%f, %f], len(lon) == %d'\
3528              %(min(longitudes), max(longitudes),
3529                len(longitudes))
3530        print '    t in [%f, %f], len(t) == %d'\
3531              %(min(times), max(times), len(times))
3532
3533    ######### WRITE THE SWW FILE #############
3534    # NetCDF file definition
3535    outfile = NetCDFFile(sww_file, 'w')
3536
3537    #Create new file
3538    outfile.institution = 'Geoscience Australia'
3539    outfile.description = 'Converted from XXX'
3540
3541
3542    #For sww compatibility
3543    outfile.smoothing = 'Yes'
3544    outfile.order = 1
3545
3546    #Start time in seconds since the epoch (midnight 1/1/1970)
3547    outfile.starttime = starttime = times[0]
3548
3549
3550    # dimension definitions
3551    outfile.createDimension('number_of_volumes', number_of_volumes)
3552
3553    outfile.createDimension('number_of_vertices', 3)
3554    outfile.createDimension('number_of_points', number_of_points)
3555    outfile.createDimension('number_of_timesteps', number_of_times)
3556
3557    # variable definitions
3558    outfile.createVariable('x', precision, ('number_of_points',))
3559    outfile.createVariable('y', precision, ('number_of_points',))
3560    outfile.createVariable('elevation', precision, ('number_of_points',))
3561
3562    #FIXME: Backwards compatibility
3563    outfile.createVariable('z', precision, ('number_of_points',))
3564    #################################
3565
3566    outfile.createVariable('volumes', Int, ('number_of_volumes',
3567                                            'number_of_vertices'))
3568
3569    outfile.createVariable('time', precision,
3570                           ('number_of_timesteps',))
3571
3572    outfile.createVariable('stage', precision,
3573                           ('number_of_timesteps',
3574                            'number_of_points'))
3575
3576    outfile.createVariable('xmomentum', precision,
3577                           ('number_of_timesteps',
3578                            'number_of_points'))
3579
3580    outfile.createVariable('ymomentum', precision,
3581                           ('number_of_timesteps',
3582                            'number_of_points'))
3583
3584    #Store
3585    from coordinate_transforms.redfearn import redfearn
3586    x = zeros(number_of_points, Float)  #Easting
3587    y = zeros(number_of_points, Float)  #Northing
3588
3589    if verbose: print 'Making triangular grid'
3590    #Get zone of 1st point.
3591    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3592
3593    vertices = {}
3594    i = 0
3595    for k, lat in enumerate(latitudes):
3596        for l, lon in enumerate(longitudes):
3597
3598            vertices[l,k] = i
3599
3600            zone, easting, northing = redfearn(lat,lon)
3601
3602            msg = 'Zone boundary crossed at longitude =', lon
3603            #assert zone == refzone, msg
3604            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3605            x[i] = easting
3606            y[i] = northing
3607            i += 1
3608
3609
3610    #Construct 2 triangles per 'rectangular' element
3611    volumes = []
3612    for l in range(number_of_longitudes-1):    #X direction
3613        for k in range(number_of_latitudes-1): #Y direction
3614            v1 = vertices[l,k+1]
3615            v2 = vertices[l,k]
3616            v3 = vertices[l+1,k+1]
3617            v4 = vertices[l+1,k]
3618
3619            #Note, this is different to the ferrit2sww code
3620            #since the order of the lats is reversed.
3621            volumes.append([v1,v3,v2]) #Upper element
3622            volumes.append([v4,v2,v3]) #Lower element
3623
3624    volumes = array(volumes)
3625
3626    geo_ref = Geo_reference(refzone,min(x),min(y))
3627    geo_ref.write_NetCDF(outfile)
3628
3629    # This will put the geo ref in the middle
3630    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3631
3632
3633    if verbose:
3634        print '------------------------------------------------'
3635        print 'More Statistics:'
3636        print '  Extent (/lon):'
3637        print '    x in [%f, %f], len(lat) == %d'\
3638              %(min(x), max(x),
3639                len(x))
3640        print '    y in [%f, %f], len(lon) == %d'\
3641              %(min(y), max(y),
3642                len(y))
3643        print 'geo_ref: ',geo_ref
3644
3645    z = resize(bath_grid,outfile.variables['z'][:].shape)
3646    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3647    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3648    outfile.variables['z'][:] = z
3649    outfile.variables['elevation'][:] = z  #FIXME HACK
3650    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3651
3652    # do this to create an ok sww file.
3653    #outfile.variables['time'][:] = [0]   #Store time relative
3654    #outfile.variables['stage'] = z
3655    # put the next line up in the code after outfile.order = 1
3656    #number_of_times = 1
3657
3658    stage = outfile.variables['stage']
3659    xmomentum = outfile.variables['xmomentum']
3660    ymomentum = outfile.variables['ymomentum']
3661
3662    outfile.variables['time'][:] = times   #Store time relative
3663
3664    if verbose: print 'Converting quantities'
3665    n = number_of_times
3666    for j in range(number_of_times):
3667        # load in files
3668        elevation_meta, elevation_grid = \
3669           _read_asc(elevation_dir + os.sep + elevation_files[j])
3670
3671        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3672        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3673
3674        #print "elevation_grid",elevation_grid
3675        #cut matrix to desired size
3676        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3677        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3678        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3679        #print "elevation_grid",elevation_grid
3680        # handle missing values
3681        missing = (elevation_grid == elevation_meta['NODATA_value'])
3682        if sometrue (missing):
3683            if fail_on_NaN:
3684                msg = 'File %s contains missing values'\
3685                      %(elevation_files[j])
3686                raise msg
3687            else:
3688                elevation_grid = elevation_grid*(missing==0) + \
3689                                 missing*elevation_NaN_filler
3690
3691
3692        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3693        i = 0
3694        for k in range(number_of_latitudes):      #Y direction
3695            for l in range(number_of_longitudes): #X direction
3696                w = zscale*elevation_grid[k,l] + mean_stage
3697                stage[j,i] = w
3698                h = w - z[i]
3699                xmomentum[j,i] = u_momentum_grid[k,l]*h
3700                ymomentum[j,i] = v_momentum_grid[k,l]*h
3701                i += 1
3702    outfile.close()
3703
3704def _get_min_max_indexes(latitudes,longitudes,
3705                        minlat=None, maxlat=None,
3706                        minlon=None, maxlon=None):
3707    """
3708    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3709    specified with min/max lat/long
3710
3711    Think of the latitudes and longitudes describing a 2d surface.
3712    The area returned is, if possible, just big enough to cover the
3713    inputed max/min area. (This will not be possible if the max/min area
3714    has a section outside of the latitudes/longitudes area.)
3715
3716    assume latitudess & longitudes are sorted,
3717    long - from low to high (west to east, eg 148 - 151)
3718    lat - from high to low (north to south, eg -35 - -38)
3719    """
3720
3721    # reverse order of lat, so it's in ascending order
3722    latitudes.reverse()
3723    largest_lat_index = len(latitudes)-1
3724    #Cut out a smaller extent.
3725    if minlat == None:
3726        lat_min_index = 0
3727    else:
3728        lat_min_index = searchsorted(latitudes, minlat)-1
3729        if lat_min_index <0:
3730            lat_min_index = 0
3731
3732
3733    if maxlat == None:
3734        lat_max_index = largest_lat_index #len(latitudes)
3735    else:
3736        lat_max_index = searchsorted(latitudes, maxlat)
3737        if lat_max_index > largest_lat_index:
3738            lat_max_index = largest_lat_index
3739
3740    if minlon == None:
3741        lon_min_index = 0
3742    else:
3743        lon_min_index = searchsorted(longitudes, minlon)-1
3744        if lon_min_index <0:
3745            lon_min_index = 0
3746
3747    if maxlon == None:
3748        lon_max_index = len(longitudes)
3749    else:
3750        lon_max_index = searchsorted(longitudes, maxlon)
3751
3752    #Take into account that the latitude list was reversed
3753    latitudes.reverse() # Python passes by reference, need to swap back
3754    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3755                                   largest_lat_index - lat_min_index
3756    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3757    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3758
3759    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3760
3761
3762def _read_asc(filename, verbose=False):
3763    """Read esri file from the following ASCII format (.asc)
3764
3765    Example:
3766
3767    ncols         3121
3768    nrows         1800
3769    xllcorner     722000
3770    yllcorner     5893000
3771    cellsize      25
3772    NODATA_value  -9999
3773    138.3698 137.4194 136.5062 135.5558 ..........
3774
3775    """
3776
3777    datafile = open(filename)
3778
3779    if verbose: print 'Reading DEM from %s' %(filename)
3780    lines = datafile.readlines()
3781    datafile.close()
3782
3783    if verbose: print 'Got', len(lines), ' lines'
3784
3785    ncols = int(lines.pop(0).split()[1].strip())
3786    nrows = int(lines.pop(0).split()[1].strip())
3787    xllcorner = float(lines.pop(0).split()[1].strip())
3788    yllcorner = float(lines.pop(0).split()[1].strip())
3789    cellsize = float(lines.pop(0).split()[1].strip())
3790    NODATA_value = float(lines.pop(0).split()[1].strip())
3791
3792    assert len(lines) == nrows
3793
3794    #Store data
3795    grid = []
3796
3797    n = len(lines)
3798    for i, line in enumerate(lines):
3799        cells = line.split()
3800        assert len(cells) == ncols
3801        grid.append(array([float(x) for x in cells]))
3802    grid = array(grid)
3803
3804    return {'xllcorner':xllcorner,
3805            'yllcorner':yllcorner,
3806            'cellsize':cellsize,
3807            'NODATA_value':NODATA_value}, grid
Note: See TracBrowser for help on using the repository browser.