source: anuga_core/source/anuga/shallow_water/data_manager.py @ 5550

Last change on this file since 5550 was 5550, checked in by ole, 16 years ago

Better reading of ordering file

File size: 221.1 KB
RevLine 
[2852]1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
[4663]13.csv: ASCII format for storing arbitrary points and associated attributes
[2852]14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
[3535]33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
[2852]34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
[3560]47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
[2852]48
49"""
50
[5485]51# This file was reverted from changeset:5484 to changeset:5470 on 10th July
52# by Ole.
53
[2852]54import exceptions
[3292]55class TitleValueError(exceptions.Exception): pass
[2852]56class DataMissingValuesError(exceptions.Exception): pass
57class DataFileNotOpenError(exceptions.Exception): pass
58class DataTimeError(exceptions.Exception): pass
59class DataDomainError(exceptions.Exception): pass
[4455]60class NewQuantity(exceptions.Exception): pass
[2852]61
62
63
[3292]64import csv
[4598]65import os, sys
[4500]66import shutil
[3720]67from struct import unpack
68import array as p_array
[4271]69#import time, os
[4595]70from os import sep, path, remove, mkdir, access, F_OK, W_OK, getcwd
[2852]71
[4595]72
[4868]73from Numeric import concatenate, array, Float, Int, Int32, resize, \
74     sometrue, searchsorted, zeros, allclose, around, reshape, \
75     transpose, sort, NewAxis, ArrayType, compress, take, arange, \
76     argmax, alltrue, shape, Float32, size
[4595]77
78import string
79
[3720]80from Scientific.IO.NetCDF import NetCDFFile
[4500]81#from shutil import copy
[4595]82from os.path import exists, basename, join
[4636]83from os import getcwd
[2852]84
85
[4271]86from anuga.coordinate_transforms.redfearn import redfearn, \
87     convert_from_latlon_to_utm
[4387]88from anuga.coordinate_transforms.geo_reference import Geo_reference, \
[4455]89     write_NetCDF_georeference, ensure_geo_reference
[4382]90from anuga.geospatial_data.geospatial_data import Geospatial_data,\
91     ensure_absolute
[3642]92from anuga.config import minimum_storable_height as default_minimum_storable_height
[4699]93from anuga.config import max_float
[4271]94from anuga.utilities.numerical_tools import ensure_numeric,  mean
[3720]95from anuga.caching.caching import myhash
96from anuga.utilities.anuga_exceptions import ANUGAError
[4271]97from anuga.shallow_water import Domain
98from anuga.abstract_2d_finite_volumes.pmesh2domain import \
99     pmesh_to_domain_instance
[4480]100from anuga.abstract_2d_finite_volumes.util import get_revision_number, \
[4567]101     remove_lone_verts, sww2timeseries, get_centroid_values
[4497]102from anuga.load_mesh.loadASCII import export_mesh_file
[5226]103from anuga.utilities.polygon import intersection
104
105
[4462]106# formula mappings
107
108quantity_formula = {'momentum':'(xmomentum**2 + ymomentum**2)**0.5',
109                    'depth':'stage-elevation',
110                    'speed': \
111 '(xmomentum**2 + ymomentum**2)**0.5/(stage-elevation+1.e-6/(stage-elevation))'}
112
113
[4271]114   
[2852]115def make_filename(s):
[4612]116    """Transform argument string into a Sexsuitable filename
[2852]117    """
118
119    s = s.strip()
120    s = s.replace(' ', '_')
121    s = s.replace('(', '')
122    s = s.replace(')', '')
123    s = s.replace('__', '_')
124
125    return s
126
127
128def check_dir(path, verbose=None):
129    """Check that specified path exists.
130    If path does not exist it will be created if possible
131
132    USAGE:
133       checkdir(path, verbose):
134
135    ARGUMENTS:
136        path -- Directory
137        verbose -- Flag verbose output (default: None)
138
139    RETURN VALUE:
140        Verified path including trailing separator
141
142    """
143
144    import os.path
145
146    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
147        unix = 0
148    else:
149        unix = 1
150
151
152    if path[-1] != os.sep:
153        path = path + os.sep  # Add separator for directories
154
155    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
156    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
157        try:
158            exitcode=os.mkdir(path)
159
160            # Change access rights if possible
161            #
162            if unix:
163                exitcode=os.system('chmod 775 '+path)
164            else:
165                pass  # FIXME: What about acces rights under Windows?
166
167            if verbose: print 'MESSAGE: Directory', path, 'created.'
168
169        except:
170            print 'WARNING: Directory', path, 'could not be created.'
171            if unix:
172                path = '/tmp/'
173            else:
174                path = 'C:'
175
176            print 'Using directory %s instead' %path
177
178    return(path)
179
180
181
182def del_dir(path):
183    """Recursively delete directory path and all its contents
184    """
185
186    import os
187
188    if os.path.isdir(path):
189        for file in os.listdir(path):
190            X = os.path.join(path, file)
191
192
193            if os.path.isdir(X) and not os.path.islink(X):
194                del_dir(X)
195            else:
196                try:
197                    os.remove(X)
198                except:
199                    print "Could not remove file %s" %X
200
201        os.rmdir(path)
[4598]202       
203       
204# ANOTHER OPTION, IF NEED IN THE FUTURE, Nick B 7/2007   
[4603]205def rmgeneric(path, __func__,verbose=False):
206    ERROR_STR= """Error removing %(path)s, %(error)s """
[2852]207
[4603]208    try:
209        __func__(path)
210        if verbose: print 'Removed ', path
211    except OSError, (errno, strerror):
212        print ERROR_STR % {'path' : path, 'error': strerror }
213           
214def removeall(path,verbose=False):
[2852]215
[4603]216    if not os.path.isdir(path):
217        return
218   
219    files=os.listdir(path)
[2852]220
[4603]221    for x in files:
222        fullpath=os.path.join(path, x)
223        if os.path.isfile(fullpath):
224            f=os.remove
225            rmgeneric(fullpath, f)
226        elif os.path.isdir(fullpath):
227            removeall(fullpath)
228            f=os.rmdir
229            rmgeneric(fullpath, f,verbose)
230
231
232
[2852]233def create_filename(datadir, filename, format, size=None, time=None):
234
235    import os
[3514]236    #from anuga.config import data_dir
[2852]237
238    FN = check_dir(datadir) + filename
239
240    if size is not None:
241        FN += '_size%d' %size
242
243    if time is not None:
244        FN += '_time%.2f' %time
245
246    FN += '.' + format
247    return FN
248
249
250def get_files(datadir, filename, format, size):
[4595]251    """Get all file (names) with given name, size and format
[2852]252    """
253
254    import glob
255
256    import os
[3514]257    #from anuga.config import data_dir
[2852]258
259    dir = check_dir(datadir)
260
261    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
262    return glob.glob(pattern)
263
264
265
266#Generic class for storing output to e.g. visualisation or checkpointing
267class Data_format:
268    """Generic interface to data formats
269    """
270
271
272    def __init__(self, domain, extension, mode = 'w'):
273        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
274                                        '''   'w' (write)'''+\
275                                        '''   'r' (read)''' +\
276                                        '''   'a' (append)'''
277
278        #Create filename
279        self.filename = create_filename(domain.get_datadir(),
[3928]280                                        domain.get_name(), extension)
[2852]281
282        #print 'F', self.filename
283        self.timestep = 0
284        self.domain = domain
[3928]285       
[2852]286
287
[3928]288        # Exclude ghosts in case this is a parallel domain
289        self.number_of_nodes = domain.number_of_full_nodes       
290        self.number_of_volumes = domain.number_of_full_triangles
291        #self.number_of_volumes = len(domain)       
292
293
294
295
[2852]296        #FIXME: Should we have a general set_precision function?
297
298
299
300#Class for storing output to e.g. visualisation
301class Data_format_sww(Data_format):
302    """Interface to native NetCDF format (.sww) for storing model output
303
304    There are two kinds of data
305
306    1: Constant data: Vertex coordinates and field values. Stored once
307    2: Variable data: Conserved quantities. Stored once per timestep.
308
309    All data is assumed to reside at vertex locations.
310    """
311
312
313    def __init__(self, domain, mode = 'w',\
314                 max_size = 2000000000,
315                 recursion = False):
316        from Scientific.IO.NetCDF import NetCDFFile
317        from Numeric import Int, Float, Float32
318
319        self.precision = Float32 #Use single precision for quantities
320        if hasattr(domain, 'max_size'):
321            self.max_size = domain.max_size #file size max is 2Gig
322        else:
323            self.max_size = max_size
324        self.recursion = recursion
325        self.mode = mode
326
327        Data_format.__init__(self, domain, 'sww', mode)
328
[3642]329        if hasattr(domain, 'minimum_storable_height'):
[4704]330            self.minimum_storable_height = domain.minimum_storable_height
[3529]331        else:
[3642]332            self.minimum_storable_height = default_minimum_storable_height
[2852]333
334        # NetCDF file definition
335        fid = NetCDFFile(self.filename, mode)
336
337        if mode == 'w':
[4455]338            description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
339            self.writer = Write_sww()
[4704]340            self.writer.store_header(fid,
341                                     domain.starttime,
342                                     self.number_of_volumes,
343                                     self.domain.number_of_full_nodes,
344                                     description=description,
345                                     smoothing=domain.smooth,
[4862]346                                     order=domain.default_order,
347                                     sww_precision=self.precision)
[4704]348
349            # Extra optional information
[4455]350            if hasattr(domain, 'texture'):
[4704]351                fid.texture = domain.texture
[2852]352
[4704]353            if domain.quantities_to_be_monitored is not None:
354                fid.createDimension('singleton', 1)
[4705]355                fid.createDimension('two', 2)               
356
357                poly = domain.monitor_polygon
358                if poly is not None:
359                    N = len(poly)
360                    fid.createDimension('polygon_length', N)
[4706]361                    fid.createVariable('extrema.polygon',
[4705]362                                       self.precision,
363                                       ('polygon_length',
364                                        'two'))
[4706]365                    fid.variables['extrema.polygon'][:] = poly                                   
366
[4705]367                   
368                interval = domain.monitor_time_interval
369                if interval is not None:
[4706]370                    fid.createVariable('extrema.time_interval',
[4705]371                                       self.precision,
372                                       ('two',))
[4706]373                    fid.variables['extrema.time_interval'][:] = interval
[4705]374
375               
[4704]376                for q in domain.quantities_to_be_monitored:
377                    #print 'doing', q
[4706]378                    fid.createVariable(q+'.extrema', self.precision,
[4704]379                                       ('numbers_in_range',))
[4706]380                    fid.createVariable(q+'.min_location', self.precision,
[4704]381                                       ('numbers_in_range',))
[4706]382                    fid.createVariable(q+'.max_location', self.precision,
[4704]383                                       ('numbers_in_range',))
[4706]384                    fid.createVariable(q+'.min_time', self.precision,
[4704]385                                       ('singleton',))
[4706]386                    fid.createVariable(q+'.max_time', self.precision,
[4704]387                                       ('singleton',))
[2852]388
[4704]389                   
[2852]390        fid.close()
391
392
393    def store_connectivity(self):
394        """Specialisation of store_connectivity for net CDF format
395
396        Writes x,y,z coordinates of triangles constituting
397        the bed elevation.
398        """
399
400        from Scientific.IO.NetCDF import NetCDFFile
401
402        from Numeric import concatenate, Int
403
404        domain = self.domain
405
406        #Get NetCDF
407        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
408
409        # Get the variables
410        x = fid.variables['x']
411        y = fid.variables['y']
412        z = fid.variables['elevation']
413
414        volumes = fid.variables['volumes']
415
416        # Get X, Y and bed elevation Z
417        Q = domain.quantities['elevation']
418        X,Y,Z,V = Q.get_vertex_values(xy=True,
[3926]419                                      precision=self.precision)
[2852]420
[4455]421        #
422        points = concatenate( (X[:,NewAxis],Y[:,NewAxis]), axis=1 )
[4704]423        self.writer.store_triangulation(fid,
424                                        points,
425                                        V.astype(volumes.typecode()),
426                                        Z,
427                                        points_georeference= \
428                                        domain.geo_reference)
[2852]429
[4704]430        # Close
431        fid.close()
[2852]432
433
[4868]434    def store_timestep(self, names=None):
[2852]435        """Store time and named quantities to file
436        """
[4868]437       
[2852]438        from Scientific.IO.NetCDF import NetCDFFile
439        import types
440        from time import sleep
441        from os import stat
442
443        from Numeric import choose
[4868]444
445
446        if names is None:
447            # Standard shallow water wave equation quantitites in ANUGA
448            names = ['stage', 'xmomentum', 'ymomentum']
[4455]449       
[4704]450        # Get NetCDF       
[2852]451        retries = 0
452        file_open = False
453        while not file_open and retries < 10:
454            try:
[4704]455                fid = NetCDFFile(self.filename, 'a') # Open existing file
[2852]456            except IOError:
[4704]457                # This could happen if someone was reading the file.
458                # In that case, wait a while and try again
[2852]459                msg = 'Warning (store_timestep): File %s could not be opened'\
460                      %self.filename
461                msg += ' - trying step %s again' %self.domain.time
462                print msg
463                retries += 1
464                sleep(1)
465            else:
466                file_open = True
467
468        if not file_open:
469            msg = 'File %s could not be opened for append' %self.filename
470            raise DataFileNotOpenError, msg
471
472
473
[4704]474        # Check to see if the file is already too big:
[2852]475        time = fid.variables['time']
476        i = len(time)+1
477        file_size = stat(self.filename)[6]
478        file_size_increase =  file_size/i
479        if file_size + file_size_increase > self.max_size*(2**self.recursion):
[4704]480            # In order to get the file name and start time correct,
481            # I change the domain.filename and domain.starttime.
482            # This is the only way to do this without changing
483            # other modules (I think).
[2852]484
[4704]485            # Write a filename addon that won't break swollens reader
486            # (10.sww is bad)
[2852]487            filename_ext = '_time_%s'%self.domain.time
488            filename_ext = filename_ext.replace('.', '_')
[4704]489           
490            # Remember the old filename, then give domain a
491            # name with the extension
[3846]492            old_domain_filename = self.domain.get_name()
[2852]493            if not self.recursion:
[3846]494                self.domain.set_name(old_domain_filename+filename_ext)
[2852]495
[3846]496
[4704]497            # Change the domain starttime to the current time
[2852]498            old_domain_starttime = self.domain.starttime
499            self.domain.starttime = self.domain.time
500
[4704]501            # Build a new data_structure.
[2852]502            next_data_structure=\
503                Data_format_sww(self.domain, mode=self.mode,\
504                                max_size = self.max_size,\
505                                recursion = self.recursion+1)
506            if not self.recursion:
507                print '    file_size = %s'%file_size
508                print '    saving file to %s'%next_data_structure.filename
509            #set up the new data_structure
510            self.domain.writer = next_data_structure
511
512            #FIXME - could be cleaner to use domain.store_timestep etc.
513            next_data_structure.store_connectivity()
514            next_data_structure.store_timestep(names)
515            fid.sync()
516            fid.close()
517
518            #restore the old starttime and filename
519            self.domain.starttime = old_domain_starttime
[3846]520            self.domain.set_name(old_domain_filename)           
[2852]521        else:
522            self.recursion = False
523            domain = self.domain
524
525            # Get the variables
526            time = fid.variables['time']
527            stage = fid.variables['stage']
528            xmomentum = fid.variables['xmomentum']
529            ymomentum = fid.variables['ymomentum']
530            i = len(time)
531            if type(names) not in [types.ListType, types.TupleType]:
532                names = [names]
533
[4455]534            if 'stage' in names and 'xmomentum' in names and \
[4704]535               'ymomentum' in names:
[4455]536
[4868]537                # Get stage, elevation, depth and select only those
538                # values where minimum_storable_height is exceeded
[4455]539                Q = domain.quantities['stage']
[4868]540                A, _ = Q.get_vertex_values(xy = False,
541                                           precision = self.precision)
[4455]542                z = fid.variables['elevation']
[4868]543
544                storable_indices = A-z[:] >= self.minimum_storable_height
545                stage = choose(storable_indices, (z[:], A))
[4455]546               
[4868]547                # Define a zero vector of same size and type as A
548                # for use with momenta
549                null = zeros(size(A), A.typecode())
550               
551                # Get xmomentum where depth exceeds minimum_storable_height
[4455]552                Q = domain.quantities['xmomentum']
[4868]553                xmom, _ = Q.get_vertex_values(xy = False,
554                                              precision = self.precision)
555                xmomentum = choose(storable_indices, (null, xmom))
[4455]556               
[4868]557
558                # Get ymomentum where depth exceeds minimum_storable_height
[4455]559                Q = domain.quantities['ymomentum']
[4868]560                ymom, _ = Q.get_vertex_values(xy = False,
561                                              precision = self.precision)
562                ymomentum = choose(storable_indices, (null, ymom))               
[4704]563               
564                # Write quantities to NetCDF
565                self.writer.store_quantities(fid, 
566                                             time=self.domain.time,
[4862]567                                             sww_precision=self.precision,
[4704]568                                             stage=stage,
569                                             xmomentum=xmomentum,
570                                             ymomentum=ymomentum)
[4455]571            else:
[4868]572                msg = 'Quantities stored must be: stage, xmomentum, ymomentum.'
573                msg += ' Instead I got: ' + str(names)
574                raise Exception, msg
575           
[2852]576
[3946]577
[4704]578            # Update extrema if requested
579            domain = self.domain
580            if domain.quantities_to_be_monitored is not None:
581                for q, info in domain.quantities_to_be_monitored.items():
582
583                    if info['min'] is not None:
[4706]584                        fid.variables[q + '.extrema'][0] = info['min']
585                        fid.variables[q + '.min_location'][:] =\
[4704]586                                        info['min_location']
[4706]587                        fid.variables[q + '.min_time'][0] = info['min_time']
[4704]588                       
589                    if info['max'] is not None:
[4706]590                        fid.variables[q + '.extrema'][1] = info['max']
591                        fid.variables[q + '.max_location'][:] =\
[4704]592                                        info['max_location']
[4706]593                        fid.variables[q + '.max_time'][0] = info['max_time']
[4704]594
595           
596
[4868]597            # Flush and close
[2852]598            fid.sync()
599            fid.close()
600
601
602
[4704]603# Class for handling checkpoints data
[2852]604class Data_format_cpt(Data_format):
605    """Interface to native NetCDF format (.cpt)
606    """
607
608
609    def __init__(self, domain, mode = 'w'):
610        from Scientific.IO.NetCDF import NetCDFFile
611        from Numeric import Int, Float, Float
612
613        self.precision = Float #Use full precision
614
615        Data_format.__init__(self, domain, 'sww', mode)
616
617
618        # NetCDF file definition
619        fid = NetCDFFile(self.filename, mode)
620
621        if mode == 'w':
622            #Create new file
623            fid.institution = 'Geoscience Australia'
624            fid.description = 'Checkpoint data'
625            #fid.smooth = domain.smooth
626            fid.order = domain.default_order
627
628            # dimension definitions
629            fid.createDimension('number_of_volumes', self.number_of_volumes)
630            fid.createDimension('number_of_vertices', 3)
631
632            #Store info at all vertices (no smoothing)
633            fid.createDimension('number_of_points', 3*self.number_of_volumes)
634            fid.createDimension('number_of_timesteps', None) #extensible
635
636            # variable definitions
637
638            #Mesh
639            fid.createVariable('x', self.precision, ('number_of_points',))
640            fid.createVariable('y', self.precision, ('number_of_points',))
641
642
643            fid.createVariable('volumes', Int, ('number_of_volumes',
644                                                'number_of_vertices'))
645
646            fid.createVariable('time', self.precision,
647                               ('number_of_timesteps',))
648
649            #Allocate space for all quantities
650            for name in domain.quantities.keys():
651                fid.createVariable(name, self.precision,
652                                   ('number_of_timesteps',
653                                    'number_of_points'))
654
655        #Close
656        fid.close()
657
658
659    def store_checkpoint(self):
660        """
661        Write x,y coordinates of triangles.
662        Write connectivity (
663        constituting
664        the bed elevation.
665        """
666
667        from Scientific.IO.NetCDF import NetCDFFile
668
669        from Numeric import concatenate
670
671        domain = self.domain
672
673        #Get NetCDF
674        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
675
676        # Get the variables
677        x = fid.variables['x']
678        y = fid.variables['y']
679
680        volumes = fid.variables['volumes']
681
682        # Get X, Y and bed elevation Z
683        Q = domain.quantities['elevation']
684        X,Y,Z,V = Q.get_vertex_values(xy=True,
685                      precision = self.precision)
686
687
688
689        x[:] = X.astype(self.precision)
690        y[:] = Y.astype(self.precision)
691        z[:] = Z.astype(self.precision)
692
693        volumes[:] = V
694
695        #Close
696        fid.close()
697
698
699    def store_timestep(self, name):
700        """Store time and named quantity to file
701        """
702        from Scientific.IO.NetCDF import NetCDFFile
703        from time import sleep
704
705        #Get NetCDF
706        retries = 0
707        file_open = False
708        while not file_open and retries < 10:
709            try:
710                fid = NetCDFFile(self.filename, 'a') #Open existing file
711            except IOError:
712                #This could happen if someone was reading the file.
713                #In that case, wait a while and try again
714                msg = 'Warning (store_timestep): File %s could not be opened'\
715                  %self.filename
716                msg += ' - trying again'
717                print msg
718                retries += 1
719                sleep(1)
720            else:
721                file_open = True
722
723        if not file_open:
724            msg = 'File %s could not be opened for append' %self.filename
725            raise DataFileNotOPenError, msg
726
727
728        domain = self.domain
729
730        # Get the variables
731        time = fid.variables['time']
732        stage = fid.variables['stage']
733        i = len(time)
734
735        #Store stage
736        time[i] = self.domain.time
737
738        # Get quantity
739        Q = domain.quantities[name]
740        A,V = Q.get_vertex_values(xy=False,
[4704]741                                  precision = self.precision)
[2852]742
743        stage[i,:] = A.astype(self.precision)
744
745        #Flush and close
746        fid.sync()
747        fid.close()
748
749
[4326]750#### NED is national exposure database (name changed to NEXIS)
[3292]751   
752LAT_TITLE = 'LATITUDE'
753LONG_TITLE = 'LONGITUDE'
[3336]754X_TITLE = 'x'
755Y_TITLE = 'y'
[3292]756class Exposure_csv:
757    def __init__(self,file_name, latitude_title=LAT_TITLE,
[3398]758                 longitude_title=LONG_TITLE, is_x_y_locations=None,
[3336]759                 x_title=X_TITLE, y_title=Y_TITLE,
[4326]760                 refine_polygon=None, title_check_list=None):
[3292]761        """
[3296]762        This class is for handling the exposure csv file.
763        It reads the file in and converts the lats and longs to a geospatial
764        data object.
765        Use the methods to read and write columns.
766
767        The format of the csv files it reads is;
768           The first row is a title row.
769           comma's are the delimiters
770           each column is a 'set' of data
771
772        Feel free to use/expand it to read other csv files.
773           
774           
775        It is not for adding and deleting rows
776       
[3292]777        Can geospatial handle string attributes? It's not made for them.
778        Currently it can't load and save string att's.
779
780        So just use geospatial to hold the x, y and georef? Bad, since
781        different att's are in diferent structures.  Not so bad, the info
782        to write if the .csv file is saved is in attribute_dic
783
784        The location info is in the geospatial attribute.
785       
786       
787        """
788        self._file_name = file_name
789        self._geospatial = None #
790
791        # self._attribute_dic is a dictionary.
792        #The keys are the column titles.
793        #The values are lists of column data
794       
795        # self._title_index_dic is a dictionary.
796        #The keys are the column titles.
797        #The values are the index positions of file columns.
798        self._attribute_dic, self._title_index_dic = \
[4612]799            csv2dict(self._file_name, title_check_list=title_check_list)
[3292]800        try:
[4059]801            #Have code here that handles caps or lower
[3292]802            lats = self._attribute_dic[latitude_title]
803            longs = self._attribute_dic[longitude_title]
804           
805        except KeyError:
806            # maybe a warning..
[3398]807            #Let's see if this works..
808            if False != is_x_y_locations:
809                is_x_y_locations = True
[3292]810            pass
811        else:
812            self._geospatial = Geospatial_data(latitudes = lats,
813                 longitudes = longs)
[3336]814
[3398]815        if is_x_y_locations is True:
[3336]816            if self._geospatial is not None:
817                pass #fixme throw an error
818            try:
819                xs = self._attribute_dic[x_title]
820                ys = self._attribute_dic[y_title]
821                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
822            except KeyError:
823                # maybe a warning..
[3664]824                msg = "Could not find location information."
[3336]825                raise TitleValueError, msg
826            else:
827                self._geospatial = Geospatial_data(data_points=points)
[3292]828           
829        # create a list of points that are in the refining_polygon
830        # described by a list of indexes representing the points
831
832    def __cmp__(self, other):
833        #print "self._attribute_dic",self._attribute_dic
834        #print "other._attribute_dic",other._attribute_dic
835        #print "self._title_index_dic", self._title_index_dic
836        #print "other._title_index_dic", other._title_index_dic
837       
838        #check that a is an instance of this class
839        if isinstance(self, type(other)):
840            result = cmp(self._attribute_dic, other._attribute_dic)
841            if result <>0:
842                return result
843            # The order of the columns is important. Therefore..
844            result = cmp(self._title_index_dic, other._title_index_dic)
845            if result <>0:
846                return result
847            for self_ls, other_ls in map(None,self._attribute_dic, \
848                    other._attribute_dic):
849                result = cmp(self._attribute_dic[self_ls],
850                             other._attribute_dic[other_ls])
851                if result <>0:
852                    return result
853            return 0
854        else:
855            return 1
856   
857
858    def get_column(self, column_name, use_refind_polygon=False):
859        """
860        Given a column name return a list of the column values
861
862        Note, the type of the values will be String!
[3437]863        do this to change a list of strings to a list of floats
864        time = [float(x) for x in time]
[3292]865       
866        Not implemented:
867        if use_refind_polygon is True, only return values in the
868        refined polygon
869        """
870        if not self._attribute_dic.has_key(column_name):
871            msg = 'Therer is  no column called %s!' %column_name
872            raise TitleValueError, msg
873        return self._attribute_dic[column_name]
874
[3437]875
876    def get_value(self, value_column_name,
877                  known_column_name,
878                  known_values,
879                  use_refind_polygon=False):
880        """
881        Do linear interpolation on the known_colum, using the known_value,
882        to return a value of the column_value_name.
883        """
884        pass
885
886
[3292]887    def get_location(self, use_refind_polygon=False):
888        """
889        Return a geospatial object which describes the
890        locations of the location file.
891
892        Note, if there is not location info, this returns None.
893       
894        Not implemented:
895        if use_refind_polygon is True, only return values in the
896        refined polygon
897        """
898        return self._geospatial
899
900    def set_column(self, column_name, column_values, overwrite=False):
901        """
902        Add a column to the 'end' (with the right most column being the end)
903        of the csv file.
904
905        Set overwrite to True if you want to overwrite a column.
906       
907        Note, in column_name white space is removed and case is not checked.
908        Precondition
909        The column_name and column_values cannot have comma's in it.
910        """
911       
912        value_row_count = \
913            len(self._attribute_dic[self._title_index_dic.keys()[0]])
914        if len(column_values) <> value_row_count: 
915            msg = 'The number of column values must equal the number of rows.'
916            raise DataMissingValuesError, msg
917       
918        if self._attribute_dic.has_key(column_name):
919            if not overwrite:
920                msg = 'Column name %s already in use!' %column_name
921                raise TitleValueError, msg
922        else:
923            # New title.  Add it to the title index.
924            self._title_index_dic[column_name] = len(self._title_index_dic)
925        self._attribute_dic[column_name] = column_values
926        #print "self._title_index_dic[column_name]",self._title_index_dic
927
928    def save(self, file_name=None):
929        """
930        Save the exposure csv file
931        """
932        if file_name is None:
933            file_name = self._file_name
934       
935        fd = open(file_name,'wb')
936        writer = csv.writer(fd)
937       
938        #Write the title to a cvs file
939        line = [None]* len(self._title_index_dic)
940        for title in self._title_index_dic.iterkeys():
941            line[self._title_index_dic[title]]= title
942        writer.writerow(line)
943       
944        # Write the values to a cvs file
945        value_row_count = \
946            len(self._attribute_dic[self._title_index_dic.keys()[0]])
947        for row_i in range(value_row_count):
948            line = [None]* len(self._title_index_dic)
949            for title in self._title_index_dic.iterkeys():
950                line[self._title_index_dic[title]]= \
951                     self._attribute_dic[title][row_i]
952            writer.writerow(line)
953
954
[4612]955def csv2dict(file_name, title_check_list=None):
956    """
957    Load in the csv as a dic, title as key and column info as value, .
958    Also, create a dic, title as key and column index as value,
959    to keep track of the column order.
[4775]960
961    Two dictionaries are returned.
[4612]962   
963    WARNING: Vaules are returned as strings.
964    do this to change a list of strings to a list of floats
965        time = [float(x) for x in time]
[4775]966
967       
[4612]968    """
969   
970    #
971    attribute_dic = {}
972    title_index_dic = {}
973    titles_stripped = [] # list of titles
974    reader = csv.reader(file(file_name))
975
976    # Read in and manipulate the title info
977    titles = reader.next()
978    for i,title in enumerate(titles):
979        titles_stripped.append(title.strip())
980        title_index_dic[title.strip()] = i
981    title_count = len(titles_stripped)       
982    #print "title_index_dic",title_index_dic
983    if title_check_list is not None:
984        for title_check in title_check_list:
985            #msg = "Reading error.  This row is not present ", title_check
986            #assert title_index_dic.has_key(title_check), msg
987            if not title_index_dic.has_key(title_check):
988                #reader.close()
989                msg = "Reading error.  This row is not present ", \
990                      title_check                     
991                raise IOError, msg
992               
993   
994   
995    #create a dic of colum values, indexed by column title
996    for line in reader:
997        if len(line) <> title_count:
998            raise IOError #FIXME make this nicer
999        for i, value in enumerate(line):
1000            attribute_dic.setdefault(titles_stripped[i],[]).append(value)
1001       
1002    return attribute_dic, title_index_dic
1003
1004
[2852]1005#Auxiliary
1006def write_obj(filename,x,y,z):
1007    """Store x,y,z vectors into filename (obj format)
1008       Vectors are assumed to have dimension (M,3) where
1009       M corresponds to the number elements.
1010       triangles are assumed to be disconnected
1011
1012       The three numbers in each vector correspond to three vertices,
1013
1014       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
1015
1016    """
1017    #print 'Writing obj to %s' % filename
1018
1019    import os.path
1020
1021    root, ext = os.path.splitext(filename)
1022    if ext == '.obj':
1023        FN = filename
1024    else:
1025        FN = filename + '.obj'
1026
1027
1028    outfile = open(FN, 'wb')
1029    outfile.write("# Triangulation as an obj file\n")
1030
1031    M, N = x.shape
1032    assert N==3  #Assuming three vertices per element
1033
1034    for i in range(M):
1035        for j in range(N):
1036            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1037
1038    for i in range(M):
1039        base = i*N
1040        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1041
1042    outfile.close()
1043
1044
1045#########################################################
1046#Conversion routines
1047########################################################
1048
1049def sww2obj(basefilename, size):
1050    """Convert netcdf based data output to obj
1051    """
1052    from Scientific.IO.NetCDF import NetCDFFile
1053
1054    from Numeric import Float, zeros
1055
1056    #Get NetCDF
1057    FN = create_filename('.', basefilename, 'sww', size)
1058    print 'Reading from ', FN
1059    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1060
1061
1062    # Get the variables
1063    x = fid.variables['x']
1064    y = fid.variables['y']
1065    z = fid.variables['elevation']
1066    time = fid.variables['time']
1067    stage = fid.variables['stage']
1068
1069    M = size  #Number of lines
1070    xx = zeros((M,3), Float)
1071    yy = zeros((M,3), Float)
1072    zz = zeros((M,3), Float)
1073
1074    for i in range(M):
1075        for j in range(3):
1076            xx[i,j] = x[i+j*M]
1077            yy[i,j] = y[i+j*M]
1078            zz[i,j] = z[i+j*M]
1079
1080    #Write obj for bathymetry
1081    FN = create_filename('.', basefilename, 'obj', size)
1082    write_obj(FN,xx,yy,zz)
1083
1084
1085    #Now read all the data with variable information, combine with
1086    #x,y info and store as obj
1087
1088    for k in range(len(time)):
1089        t = time[k]
1090        print 'Processing timestep %f' %t
1091
1092        for i in range(M):
1093            for j in range(3):
1094                zz[i,j] = stage[k,i+j*M]
1095
1096
1097        #Write obj for variable data
1098        #FN = create_filename(basefilename, 'obj', size, time=t)
1099        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1100        write_obj(FN,xx,yy,zz)
1101
1102
1103def dat2obj(basefilename):
1104    """Convert line based data output to obj
1105    FIXME: Obsolete?
1106    """
1107
1108    import glob, os
[3514]1109    from anuga.config import data_dir
[2852]1110
1111
1112    #Get bathymetry and x,y's
1113    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1114
1115    from Numeric import zeros, Float
1116
1117    M = len(lines)  #Number of lines
1118    x = zeros((M,3), Float)
1119    y = zeros((M,3), Float)
1120    z = zeros((M,3), Float)
1121
1122    ##i = 0
1123    for i, line in enumerate(lines):
1124        tokens = line.split()
1125        values = map(float,tokens)
1126
1127        for j in range(3):
1128            x[i,j] = values[j*3]
1129            y[i,j] = values[j*3+1]
1130            z[i,j] = values[j*3+2]
1131
1132        ##i += 1
1133
1134
1135    #Write obj for bathymetry
1136    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1137
1138
1139    #Now read all the data files with variable information, combine with
1140    #x,y info
1141    #and store as obj
1142
1143    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1144
1145    for filename in files:
1146        print 'Processing %s' % filename
1147
1148        lines = open(data_dir+os.sep+filename,'r').readlines()
1149        assert len(lines) == M
1150        root, ext = os.path.splitext(filename)
1151
1152        #Get time from filename
1153        i0 = filename.find('_time=')
1154        if i0 == -1:
1155            #Skip bathymetry file
1156            continue
1157
1158        i0 += 6  #Position where time starts
1159        i1 = filename.find('.dat')
1160
1161        if i1 > i0:
1162            t = float(filename[i0:i1])
1163        else:
1164            raise DataTimeError, 'Hmmmm'
1165
1166
1167
1168        ##i = 0
1169        for i, line in enumerate(lines):
1170            tokens = line.split()
1171            values = map(float,tokens)
1172
1173            for j in range(3):
1174                z[i,j] = values[j]
1175
1176            ##i += 1
1177
1178        #Write obj for variable data
1179        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1180
1181
1182def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1183    """Read netcdf filename1, pick timesteps first:step:last and save to
1184    nettcdf file filename2
1185    """
1186    from Scientific.IO.NetCDF import NetCDFFile
1187
1188    #Get NetCDF
1189    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1190    outfile = NetCDFFile(filename2, 'w')  #Open new file
1191
1192
1193    #Copy dimensions
1194    for d in infile.dimensions:
1195        outfile.createDimension(d, infile.dimensions[d])
1196
1197    for name in infile.variables:
1198        var = infile.variables[name]
1199        outfile.createVariable(name, var.typecode(), var.dimensions)
1200
1201
1202    #Copy the static variables
1203    for name in infile.variables:
1204        if name == 'time' or name == 'stage':
1205            pass
1206        else:
1207            #Copy
1208            outfile.variables[name][:] = infile.variables[name][:]
1209
1210    #Copy selected timesteps
1211    time = infile.variables['time']
1212    stage = infile.variables['stage']
1213
1214    newtime = outfile.variables['time']
1215    newstage = outfile.variables['stage']
1216
1217    if last is None:
1218        last = len(time)
1219
1220    selection = range(first, last, step)
1221    for i, j in enumerate(selection):
1222        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1223        newtime[i] = time[j]
1224        newstage[i,:] = stage[j,:]
1225
1226    #Close
1227    infile.close()
1228    outfile.close()
1229
1230
1231#Get data objects
1232def get_dataobject(domain, mode='w'):
1233    """Return instance of class of given format using filename
1234    """
1235
1236    cls = eval('Data_format_%s' %domain.format)
1237    return cls(domain, mode)
1238
1239
1240
1241
1242def dem2pts(basename_in, basename_out=None,
1243            easting_min=None, easting_max=None,
1244            northing_min=None, northing_max=None,
1245            use_cache=False, verbose=False,):
1246    """Read Digitial Elevation model from the following NetCDF format (.dem)
1247
1248    Example:
1249
1250    ncols         3121
1251    nrows         1800
1252    xllcorner     722000
1253    yllcorner     5893000
1254    cellsize      25
1255    NODATA_value  -9999
1256    138.3698 137.4194 136.5062 135.5558 ..........
1257
1258    Convert to NetCDF pts format which is
1259
1260    points:  (Nx2) Float array
1261    elevation: N Float array
1262    """
1263
1264
1265
1266    kwargs = {'basename_out': basename_out,
1267              'easting_min': easting_min,
1268              'easting_max': easting_max,
1269              'northing_min': northing_min,
1270              'northing_max': northing_max,
1271              'verbose': verbose}
1272
1273    if use_cache is True:
1274        from caching import cache
1275        result = cache(_dem2pts, basename_in, kwargs,
1276                       dependencies = [basename_in + '.dem'],
1277                       verbose = verbose)
1278
1279    else:
1280        result = apply(_dem2pts, [basename_in], kwargs)
1281
1282    return result
1283
1284
1285def _dem2pts(basename_in, basename_out=None, verbose=False,
1286            easting_min=None, easting_max=None,
1287            northing_min=None, northing_max=None):
1288    """Read Digitial Elevation model from the following NetCDF format (.dem)
1289
1290    Internal function. See public function dem2pts for details.
1291    """
1292
[4776]1293    # FIXME: Can this be written feasibly using write_pts?
[2852]1294
1295    import os
1296    from Scientific.IO.NetCDF import NetCDFFile
1297    from Numeric import Float, zeros, reshape, sum
1298
1299    root = basename_in
1300
[4776]1301    # Get NetCDF
1302    infile = NetCDFFile(root + '.dem', 'r')  # Open existing netcdf file for read
[2852]1303
1304    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1305
1306    ncols = infile.ncols[0]
1307    nrows = infile.nrows[0]
[4776]1308    xllcorner = infile.xllcorner[0]  # Easting of lower left corner
1309    yllcorner = infile.yllcorner[0]  # Northing of lower left corner
[2852]1310    cellsize = infile.cellsize[0]
1311    NODATA_value = infile.NODATA_value[0]
1312    dem_elevation = infile.variables['elevation']
1313
1314    zone = infile.zone[0]
1315    false_easting = infile.false_easting[0]
1316    false_northing = infile.false_northing[0]
1317
[4776]1318    # Text strings
[2852]1319    projection = infile.projection
1320    datum = infile.datum
1321    units = infile.units
1322
1323
[4776]1324    # Get output file
[2852]1325    if basename_out == None:
1326        ptsname = root + '.pts'
1327    else:
1328        ptsname = basename_out + '.pts'
1329
1330    if verbose: print 'Store to NetCDF file %s' %ptsname
1331    # NetCDF file definition
1332    outfile = NetCDFFile(ptsname, 'w')
1333
[4776]1334    # Create new file
[2852]1335    outfile.institution = 'Geoscience Australia'
1336    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1337                      'of spatial point data'
[4776]1338    # Assign default values
[2852]1339    if easting_min is None: easting_min = xllcorner
1340    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1341    if northing_min is None: northing_min = yllcorner
1342    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1343
[4776]1344    # Compute offsets to update georeferencing
[2852]1345    easting_offset = xllcorner - easting_min
1346    northing_offset = yllcorner - northing_min
1347
[4776]1348    # Georeferencing
[2852]1349    outfile.zone = zone
[4776]1350    outfile.xllcorner = easting_min # Easting of lower left corner
1351    outfile.yllcorner = northing_min # Northing of lower left corner
[2852]1352    outfile.false_easting = false_easting
1353    outfile.false_northing = false_northing
1354
1355    outfile.projection = projection
1356    outfile.datum = datum
1357    outfile.units = units
1358
1359
[4776]1360    # Grid info (FIXME: probably not going to be used, but heck)
[2852]1361    outfile.ncols = ncols
1362    outfile.nrows = nrows
1363
1364    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1365    totalnopoints = nrows*ncols
1366
[4776]1367    # Calculating number of NODATA_values for each row in clipped region
1368    # FIXME: use array operations to do faster
[2852]1369    nn = 0
1370    k = 0
1371    i1_0 = 0
1372    j1_0 = 0
1373    thisj = 0
1374    thisi = 0
1375    for i in range(nrows):
1376        y = (nrows-i-1)*cellsize + yllcorner
1377        for j in range(ncols):
1378            x = j*cellsize + xllcorner
1379            if easting_min <= x <= easting_max and \
1380               northing_min <= y <= northing_max:
1381                thisj = j
1382                thisi = i
1383                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1384
1385                if k == 0:
1386                    i1_0 = i
1387                    j1_0 = j
1388                k += 1
1389
1390    index1 = j1_0
1391    index2 = thisj
1392
[4776]1393    # Dimension definitions
[2852]1394    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1395    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1396
1397    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1398    nopoints = clippednopoints-nn
1399
1400    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1401
[3664]1402    if verbose:
[2852]1403        print 'There are %d values in the elevation' %totalnopoints
1404        print 'There are %d values in the clipped elevation' %clippednopoints
1405        print 'There are %d NODATA_values in the clipped elevation' %nn
1406
1407    outfile.createDimension('number_of_points', nopoints)
1408    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1409
[4776]1410    # Variable definitions
[2852]1411    outfile.createVariable('points', Float, ('number_of_points',
1412                                             'number_of_dimensions'))
1413    outfile.createVariable('elevation', Float, ('number_of_points',))
1414
1415    # Get handles to the variables
1416    points = outfile.variables['points']
1417    elevation = outfile.variables['elevation']
1418
1419    lenv = index2-index1+1
[4776]1420    # Store data
[2852]1421    global_index = 0
[4776]1422    # for i in range(nrows):
[2852]1423    for i in range(i1_0,thisi+1,1):
1424        if verbose and i%((nrows+10)/10)==0:
1425            print 'Processing row %d of %d' %(i, nrows)
1426
1427        lower_index = global_index
1428
1429        v = dem_elevation_r[i,index1:index2+1]
1430        no_NODATA = sum(v == NODATA_value)
1431        if no_NODATA > 0:
[4776]1432            newcols = lenv - no_NODATA # ncols_in_bounding_box - no_NODATA
[2852]1433        else:
[4776]1434            newcols = lenv # ncols_in_bounding_box
[2852]1435
1436        telev = zeros(newcols, Float)
1437        tpoints = zeros((newcols, 2), Float)
1438
1439        local_index = 0
1440
1441        y = (nrows-i-1)*cellsize + yllcorner
1442        #for j in range(ncols):
1443        for j in range(j1_0,index2+1,1):
1444
1445            x = j*cellsize + xllcorner
1446            if easting_min <= x <= easting_max and \
1447               northing_min <= y <= northing_max and \
1448               dem_elevation_r[i,j] <> NODATA_value:
1449                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1450                telev[local_index] = dem_elevation_r[i, j]
1451                global_index += 1
1452                local_index += 1
1453
1454        upper_index = global_index
1455
1456        if upper_index == lower_index + newcols:
1457            points[lower_index:upper_index, :] = tpoints
1458            elevation[lower_index:upper_index] = telev
1459
1460    assert global_index == nopoints, 'index not equal to number of points'
1461
1462    infile.close()
1463    outfile.close()
1464
1465
1466
1467def _read_hecras_cross_sections(lines):
1468    """Return block of surface lines for each cross section
1469    Starts with SURFACE LINE,
1470    Ends with END CROSS-SECTION
1471    """
1472
1473    points = []
1474
1475    reading_surface = False
1476    for i, line in enumerate(lines):
1477
1478        if len(line.strip()) == 0:    #Ignore blanks
1479            continue
1480
1481        if lines[i].strip().startswith('SURFACE LINE'):
1482            reading_surface = True
1483            continue
1484
1485        if lines[i].strip().startswith('END') and reading_surface:
1486            yield points
1487            reading_surface = False
1488            points = []
1489
1490        if reading_surface:
1491            fields = line.strip().split(',')
1492            easting = float(fields[0])
1493            northing = float(fields[1])
1494            elevation = float(fields[2])
1495            points.append([easting, northing, elevation])
1496
1497
1498
1499
1500def hecras_cross_sections2pts(basename_in,
1501                              basename_out=None,
1502                              verbose=False):
1503    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1504
1505    Example:
1506
1507
1508# RAS export file created on Mon 15Aug2005 11:42
1509# by HEC-RAS Version 3.1.1
1510
1511BEGIN HEADER:
1512  UNITS: METRIC
1513  DTM TYPE: TIN
1514  DTM: v:\1\cit\perth_topo\river_tin
1515  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1516  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1517  MAP PROJECTION: UTM
1518  PROJECTION ZONE: 50
1519  DATUM: AGD66
1520  VERTICAL DATUM:
1521  NUMBER OF REACHES:  19
1522  NUMBER OF CROSS-SECTIONS:  14206
1523END HEADER:
1524
1525
1526Only the SURFACE LINE data of the following form will be utilised
1527
1528  CROSS-SECTION:
1529    STREAM ID:Southern-Wungong
1530    REACH ID:Southern-Wungong
1531    STATION:19040.*
1532    CUT LINE:
1533      405548.671603161 , 6438142.7594925
1534      405734.536092045 , 6438326.10404912
1535      405745.130459356 , 6438331.48627354
1536      405813.89633823 , 6438368.6272789
1537    SURFACE LINE:
1538     405548.67,   6438142.76,   35.37
1539     405552.24,   6438146.28,   35.41
1540     405554.78,   6438148.78,   35.44
1541     405555.80,   6438149.79,   35.44
1542     405559.37,   6438153.31,   35.45
1543     405560.88,   6438154.81,   35.44
1544     405562.93,   6438156.83,   35.42
1545     405566.50,   6438160.35,   35.38
1546     405566.99,   6438160.83,   35.37
1547     ...
1548   END CROSS-SECTION
1549
1550    Convert to NetCDF pts format which is
1551
1552    points:  (Nx2) Float array
1553    elevation: N Float array
1554    """
1555
1556    import os
1557    from Scientific.IO.NetCDF import NetCDFFile
1558    from Numeric import Float, zeros, reshape
1559
1560    root = basename_in
1561
1562    #Get ASCII file
1563    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1564
1565    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1566
1567    lines = infile.readlines()
1568    infile.close()
1569
1570    if verbose: print 'Converting to pts format'
1571
1572    i = 0
1573    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1574        i += 1
1575
1576    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1577    i += 1
1578
1579    assert lines[i].strip().upper().startswith('UNITS:')
1580    units = lines[i].strip().split()[1]
1581    i += 1
1582
1583    assert lines[i].strip().upper().startswith('DTM TYPE:')
1584    i += 1
1585
1586    assert lines[i].strip().upper().startswith('DTM:')
1587    i += 1
1588
1589    assert lines[i].strip().upper().startswith('STREAM')
1590    i += 1
1591
1592    assert lines[i].strip().upper().startswith('CROSS')
1593    i += 1
1594
1595    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1596    projection = lines[i].strip().split(':')[1]
1597    i += 1
1598
1599    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1600    zone = int(lines[i].strip().split(':')[1])
1601    i += 1
1602
1603    assert lines[i].strip().upper().startswith('DATUM:')
1604    datum = lines[i].strip().split(':')[1]
1605    i += 1
1606
1607    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1608    i += 1
1609
1610    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1611    i += 1
1612
1613    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1614    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1615    i += 1
1616
1617
1618    #Now read all points
1619    points = []
1620    elevation = []
1621    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1622        for k, entry in enumerate(entries):
1623            points.append(entry[:2])
1624            elevation.append(entry[2])
1625
1626
1627    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1628          %(j+1, number_of_cross_sections)
1629    assert j+1 == number_of_cross_sections, msg
1630
1631    #Get output file
1632    if basename_out == None:
1633        ptsname = root + '.pts'
1634    else:
1635        ptsname = basename_out + '.pts'
1636
[4455]1637    geo_ref = Geo_reference(zone, 0, 0, datum, projection, units)
1638    geo = Geospatial_data(points, {"elevation":elevation},
1639                          verbose=verbose, geo_reference=geo_ref)
1640    geo.export_points_file(ptsname)
[2852]1641
[4462]1642def export_grid(basename_in, extra_name_out = None,
1643                quantities = None, # defaults to elevation
1644                timestep = None,
1645                reduction = None,
1646                cellsize = 10,
1647                NODATA_value = -9999,
1648                easting_min = None,
1649                easting_max = None,
1650                northing_min = None,
1651                northing_max = None,
1652                verbose = False,
1653                origin = None,
1654                datum = 'WGS84',
1655                format = 'ers'):
1656    """
1657   
1658    Wrapper for sww2dem. - see sww2dem to find out what most of the
1659    parameters do.
[2852]1660
[4462]1661    Quantities is a list of quantities.  Each quantity will be
1662    calculated for each sww file.
1663
1664    This returns the basenames of the files returned, which is made up
1665    of the dir and all of the file name, except the extension.
1666
1667    This function returns the names of the files produced.
[4535]1668
1669    It will also produce as many output files as there are input sww files.
[4462]1670    """
1671   
1672    if quantities is None:
1673        quantities = ['elevation']
1674       
1675    if type(quantities) is str:
1676            quantities = [quantities]
1677
1678    # How many sww files are there?
1679    dir, base = os.path.split(basename_in)
[4489]1680    #print "basename_in",basename_in
1681    #print "base",base
[4586]1682
1683    iterate_over = get_all_swwfiles(dir,base,verbose)
[4463]1684   
[4526]1685    if dir == "":
1686        dir = "." # Unix compatibility
[4462]1687   
1688    files_out = []
[4586]1689    #print 'sww_file',iterate_over
[4548]1690    for sww_file in iterate_over:
[4462]1691        for quantity in quantities:
1692            if extra_name_out is None:
1693                basename_out = sww_file + '_' + quantity
1694            else:
1695                basename_out = sww_file + '_' + quantity + '_' \
1696                               + extra_name_out
[4524]1697#            print "basename_out", basename_out
[4462]1698       
[4524]1699            file_out = sww2dem(dir+sep+sww_file, dir+sep+basename_out,
[4462]1700                               quantity, 
1701                               timestep,
1702                               reduction,
1703                               cellsize,
1704                               NODATA_value,
1705                               easting_min,
1706                               easting_max,
1707                               northing_min,
1708                               northing_max,
1709                               verbose,
1710                               origin,
1711                               datum,
1712                               format)
1713            files_out.append(file_out)
1714    #print "basenames_out after",basenames_out
1715    return files_out
[4545]1716
1717
1718def get_timeseries(production_dirs, output_dir, scenario_name, gauges_dir_name,
1719                   plot_quantity, generate_fig = False,
1720                   reportname = None, surface = False, time_min = None,
1721                   time_max = None, title_on = False, verbose = True,
1722                   nodes=None):
1723    """
1724    nodes - number of processes used.
1725
1726    warning - this function has no tests
1727    """
1728    if reportname == None:
1729        report = False
1730    else:
1731        report = True
1732       
1733    if nodes is None:
1734        is_parallel = False
1735    else:
1736        is_parallel = True
1737       
1738    # Generate figures
1739    swwfiles = {}
[4462]1740   
[4545]1741    if is_parallel is True:   
1742        for i in range(nodes):
1743            print 'Sending node %d of %d' %(i,nodes)
1744            swwfiles = {}
1745            if not reportname == None:
1746                reportname = report_name + '_%s' %(i)
1747            for label_id in production_dirs.keys():
1748                if label_id == 'boundaries':
1749                    swwfile = best_boundary_sww
1750                else:
1751                    file_loc = output_dir + label_id + sep
1752                    sww_extra = '_P%s_%s' %(i,nodes)
1753                    swwfile = file_loc + scenario_name + sww_extra + '.sww'
1754                    print 'swwfile',swwfile
1755                    swwfiles[swwfile] = label_id
1756
1757                texname, elev_output = sww2timeseries(swwfiles,
1758                                              gauges_dir_name,
1759                                              production_dirs,
1760                                              report = report,
1761                                              reportname = reportname,
1762                                              plot_quantity = plot_quantity,
1763                                              generate_fig = generate_fig,
1764                                              surface = surface,
1765                                              time_min = time_min,
1766                                              time_max = time_max,
1767                                              title_on = title_on,
1768                                              verbose = verbose)
1769    else:   
1770        for label_id in production_dirs.keys():       
1771            if label_id == 'boundaries':
1772                print 'boundaries'
1773                file_loc = project.boundaries_in_dir
1774                swwfile = project.boundaries_dir_name3 + '.sww'
1775                #  swwfile = boundary_dir_filename
1776            else:
1777                file_loc = output_dir + label_id + sep
1778                swwfile = file_loc + scenario_name + '.sww'
1779            swwfiles[swwfile] = label_id
1780       
1781        texname, elev_output = sww2timeseries(swwfiles,
1782                                              gauges_dir_name,
1783                                              production_dirs,
1784                                              report = report,
1785                                              reportname = reportname,
1786                                              plot_quantity = plot_quantity,
1787                                              generate_fig = generate_fig,
1788                                              surface = surface,
1789                                              time_min = time_min,
1790                                              time_max = time_max,
1791                                              title_on = title_on,
1792                                              verbose = verbose)
1793                                         
1794
1795   
[2852]1796def sww2dem(basename_in, basename_out = None,
[4462]1797            quantity = None, # defaults to elevation
[2852]1798            timestep = None,
1799            reduction = None,
1800            cellsize = 10,
1801            NODATA_value = -9999,
1802            easting_min = None,
1803            easting_max = None,
1804            northing_min = None,
1805            northing_max = None,
1806            verbose = False,
1807            origin = None,
1808            datum = 'WGS84',
[4462]1809            format = 'ers'):
[2852]1810
[4663]1811    """Read SWW file and convert to Digitial Elevation model format
1812    (.asc or .ers)
[2852]1813
1814    Example (ASC):
1815
1816    ncols         3121
1817    nrows         1800
1818    xllcorner     722000
1819    yllcorner     5893000
1820    cellsize      25
1821    NODATA_value  -9999
1822    138.3698 137.4194 136.5062 135.5558 ..........
1823
1824    Also write accompanying file with same basename_in but extension .prj
1825    used to fix the UTM zone, datum, false northings and eastings.
1826
1827    The prj format is assumed to be as
1828
1829    Projection    UTM
1830    Zone          56
1831    Datum         WGS84
1832    Zunits        NO
1833    Units         METERS
1834    Spheroid      WGS84
1835    Xshift        0.0000000000
1836    Yshift        10000000.0000000000
1837    Parameters
1838
1839
1840    The parameter quantity must be the name of an existing quantity or
1841    an expression involving existing quantities. The default is
[4462]1842    'elevation'. Quantity is not a list of quantities.
[2852]1843
1844    if timestep (an index) is given, output quantity at that timestep
1845
1846    if reduction is given use that to reduce quantity over all timesteps.
1847
1848    datum
1849
1850    format can be either 'asc' or 'ers'
1851    """
1852
1853    import sys
[4663]1854    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, \
1855         sometrue
[2852]1856    from Numeric import array2string
1857
[4663]1858    from anuga.utilities.polygon import inside_polygon, outside_polygon, \
1859         separate_points_by_polygon
1860    from anuga.abstract_2d_finite_volumes.util import \
1861         apply_expression_to_dictionary
[2852]1862
1863    msg = 'Format must be either asc or ers'
1864    assert format.lower() in ['asc', 'ers'], msg
1865
1866
1867    false_easting = 500000
1868    false_northing = 10000000
1869
1870    if quantity is None:
1871        quantity = 'elevation'
[4462]1872       
[2852]1873    if reduction is None:
1874        reduction = max
1875
1876    if basename_out is None:
1877        basename_out = basename_in + '_%s' %quantity
1878
[4462]1879    if quantity_formula.has_key(quantity):
1880        quantity = quantity_formula[quantity]
1881       
[2852]1882    swwfile = basename_in + '.sww'
1883    demfile = basename_out + '.' + format
1884    # Note the use of a .ers extension is optional (write_ermapper_grid will
1885    # deal with either option
[4413]1886   
[4548]1887    #if verbose: bye= nsuadsfd[0] # uncomment to check catching verbose errors
[4413]1888   
[4551]1889    # Read sww file
[4524]1890    if verbose: 
1891        print 'Reading from %s' %swwfile
1892        print 'Output directory is %s' %basename_out
1893   
[2852]1894    from Scientific.IO.NetCDF import NetCDFFile
1895    fid = NetCDFFile(swwfile)
1896
1897    #Get extent and reference
1898    x = fid.variables['x'][:]
1899    y = fid.variables['y'][:]
1900    volumes = fid.variables['volumes'][:]
[3961]1901    if timestep is not None:
1902        times = fid.variables['time'][timestep]
[3967]1903    else:
1904        times = fid.variables['time'][:]
[2852]1905
1906    number_of_timesteps = fid.dimensions['number_of_timesteps']
1907    number_of_points = fid.dimensions['number_of_points']
[3967]1908   
[2852]1909    if origin is None:
1910
[4551]1911        # Get geo_reference
1912        # sww files don't have to have a geo_ref
[2852]1913        try:
1914            geo_reference = Geo_reference(NetCDFObject=fid)
1915        except AttributeError, e:
[4551]1916            geo_reference = Geo_reference() # Default georef object
[2852]1917
1918        xllcorner = geo_reference.get_xllcorner()
1919        yllcorner = geo_reference.get_yllcorner()
1920        zone = geo_reference.get_zone()
1921    else:
1922        zone = origin[0]
1923        xllcorner = origin[1]
1924        yllcorner = origin[2]
1925
1926
1927
[4663]1928    # FIXME: Refactor using code from Interpolation_function.statistics
1929    # (in interpolate.py)
[4551]1930    # Something like print swwstats(swwname)
[2852]1931    if verbose:
1932        print '------------------------------------------------'
1933        print 'Statistics of SWW file:'
1934        print '  Name: %s' %swwfile
1935        print '  Reference:'
1936        print '    Lower left corner: [%f, %f]'\
1937              %(xllcorner, yllcorner)
[3961]1938        if timestep is not None:
1939            print '    Time: %f' %(times)
1940        else:
1941            print '    Start time: %f' %fid.starttime[0]
[2852]1942        print '  Extent:'
1943        print '    x [m] in [%f, %f], len(x) == %d'\
1944              %(min(x.flat), max(x.flat), len(x.flat))
1945        print '    y [m] in [%f, %f], len(y) == %d'\
1946              %(min(y.flat), max(y.flat), len(y.flat))
[3961]1947        if timestep is not None:
1948            print '    t [s] = %f, len(t) == %d' %(times, 1)
1949        else:
1950            print '    t [s] in [%f, %f], len(t) == %d'\
[3967]1951              %(min(times), max(times), len(times))
[2852]1952        print '  Quantities [SI units]:'
[5189]1953        # Comment out for reduced memory consumption
[3967]1954        for name in ['stage', 'xmomentum', 'ymomentum']:
[2852]1955            q = fid.variables[name][:].flat
[3967]1956            if timestep is not None:
1957                q = q[timestep*len(x):(timestep+1)*len(x)]
1958            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1959        for name in ['elevation']:
1960            q = fid.variables[name][:].flat
1961            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1962           
[2891]1963    # Get quantity and reduce if applicable
[2852]1964    if verbose: print 'Processing quantity %s' %quantity
1965
[2891]1966    # Turn NetCDF objects into Numeric arrays
[5115]1967    try:
1968        q = fid.variables[quantity][:] 
[5189]1969       
1970       
[5115]1971    except:
1972        quantity_dict = {}
1973        for name in fid.variables.keys():
1974            quantity_dict[name] = fid.variables[name][:] 
1975        #Convert quantity expression to quantities found in sww file   
1976        q = apply_expression_to_dictionary(quantity, quantity_dict)
[5189]1977    #print "q.shape",q.shape
[2852]1978    if len(q.shape) == 2:
1979        #q has a time component and needs to be reduced along
1980        #the temporal dimension
1981        if verbose: print 'Reducing quantity %s' %quantity
1982        q_reduced = zeros( number_of_points, Float )
[3967]1983       
1984        if timestep is not None:
1985            for k in range(number_of_points):
1986                q_reduced[k] = q[timestep,k] 
1987        else:
1988            for k in range(number_of_points):
1989                q_reduced[k] = reduction( q[:,k] )
[2852]1990
1991        q = q_reduced
1992
1993    #Post condition: Now q has dimension: number_of_points
1994    assert len(q.shape) == 1
1995    assert q.shape[0] == number_of_points
1996
1997    if verbose:
1998        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1999
2000
2001    #Create grid and update xll/yll corner and x,y
2002
2003    #Relative extent
2004    if easting_min is None:
2005        xmin = min(x)
2006    else:
2007        xmin = easting_min - xllcorner
2008
2009    if easting_max is None:
2010        xmax = max(x)
2011    else:
2012        xmax = easting_max - xllcorner
2013
2014    if northing_min is None:
2015        ymin = min(y)
2016    else:
2017        ymin = northing_min - yllcorner
2018
2019    if northing_max is None:
2020        ymax = max(y)
2021    else:
2022        ymax = northing_max - yllcorner
2023
2024
2025
2026    if verbose: print 'Creating grid'
2027    ncols = int((xmax-xmin)/cellsize)+1
2028    nrows = int((ymax-ymin)/cellsize)+1
2029
2030
2031    #New absolute reference and coordinates
2032    newxllcorner = xmin+xllcorner
2033    newyllcorner = ymin+yllcorner
2034
2035    x = x+xllcorner-newxllcorner
2036    y = y+yllcorner-newyllcorner
[3961]2037   
[2852]2038    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2039    assert len(vertex_points.shape) == 2
2040
2041    grid_points = zeros ( (ncols*nrows, 2), Float )
2042
2043
2044    for i in xrange(nrows):
2045        if format.lower() == 'asc':
2046            yg = i*cellsize
2047        else:
2048        #this will flip the order of the y values for ers
2049            yg = (nrows-i)*cellsize
2050
2051        for j in xrange(ncols):
2052            xg = j*cellsize
2053            k = i*ncols + j
2054
2055            grid_points[k,0] = xg
2056            grid_points[k,1] = yg
2057
2058    #Interpolate
[3514]2059    from anuga.fit_interpolate.interpolate import Interpolate
[2852]2060
[4480]2061    # Remove loners from vertex_points, volumes here
2062    vertex_points, volumes = remove_lone_verts(vertex_points, volumes)
[4497]2063    #export_mesh_file('monkey.tsh',{'vertices':vertex_points, 'triangles':volumes})
[4522]2064    #import sys; sys.exit()
[2852]2065    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2066
2067    #Interpolate using quantity values
2068    if verbose: print 'Interpolating'
2069    grid_values = interp.interpolate(q, grid_points).flat
2070
2071
2072    if verbose:
2073        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2074                                                      max(grid_values))
2075
2076    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2077    P = interp.mesh.get_boundary_polygon()
2078    outside_indices = outside_polygon(grid_points, P, closed=True)
2079
2080    for i in outside_indices:
2081        grid_values[i] = NODATA_value
2082
2083
2084
2085
2086    if format.lower() == 'ers':
2087        # setup ERS header information
2088        grid_values = reshape(grid_values,(nrows, ncols))
2089        header = {}
2090        header['datum'] = '"' + datum + '"'
2091        # FIXME The use of hardwired UTM and zone number needs to be made optional
2092        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2093        header['projection'] = '"UTM-' + str(zone) + '"'
2094        header['coordinatetype'] = 'EN'
2095        if header['coordinatetype'] == 'LL':
2096            header['longitude'] = str(newxllcorner)
2097            header['latitude'] = str(newyllcorner)
2098        elif header['coordinatetype'] == 'EN':
2099            header['eastings'] = str(newxllcorner)
2100            header['northings'] = str(newyllcorner)
2101        header['nullcellvalue'] = str(NODATA_value)
2102        header['xdimension'] = str(cellsize)
2103        header['ydimension'] = str(cellsize)
2104        header['value'] = '"' + quantity + '"'
2105        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2106
2107
2108        #Write
2109        if verbose: print 'Writing %s' %demfile
2110        import ermapper_grids
2111        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2112
2113        fid.close()
2114    else:
2115        #Write to Ascii format
2116
2117        #Write prj file
2118        prjfile = basename_out + '.prj'
2119
2120        if verbose: print 'Writing %s' %prjfile
2121        prjid = open(prjfile, 'w')
2122        prjid.write('Projection    %s\n' %'UTM')
2123        prjid.write('Zone          %d\n' %zone)
2124        prjid.write('Datum         %s\n' %datum)
2125        prjid.write('Zunits        NO\n')
2126        prjid.write('Units         METERS\n')
2127        prjid.write('Spheroid      %s\n' %datum)
2128        prjid.write('Xshift        %d\n' %false_easting)
2129        prjid.write('Yshift        %d\n' %false_northing)
2130        prjid.write('Parameters\n')
2131        prjid.close()
2132
2133
2134
2135        if verbose: print 'Writing %s' %demfile
2136
2137        ascid = open(demfile, 'w')
2138
2139        ascid.write('ncols         %d\n' %ncols)
2140        ascid.write('nrows         %d\n' %nrows)
2141        ascid.write('xllcorner     %d\n' %newxllcorner)
2142        ascid.write('yllcorner     %d\n' %newyllcorner)
2143        ascid.write('cellsize      %f\n' %cellsize)
2144        ascid.write('NODATA_value  %d\n' %NODATA_value)
2145
2146
2147        #Get bounding polygon from mesh
2148        #P = interp.mesh.get_boundary_polygon()
2149        #inside_indices = inside_polygon(grid_points, P)
2150
2151        for i in range(nrows):
2152            if verbose and i%((nrows+10)/10)==0:
2153                print 'Doing row %d of %d' %(i, nrows)
2154
2155            base_index = (nrows-i-1)*ncols
2156
2157            slice = grid_values[base_index:base_index+ncols]
2158            s = array2string(slice, max_line_width=sys.maxint)
2159            ascid.write(s[1:-1] + '\n')
2160
2161
2162            #print
2163            #for j in range(ncols):
2164            #    index = base_index+j#
2165            #    print grid_values[index],
2166            #    ascid.write('%f ' %grid_values[index])
2167            #ascid.write('\n')
2168
2169        #Close
2170        ascid.close()
2171        fid.close()
[4462]2172        return basename_out
2173
[5189]2174
[2852]2175#Backwards compatibility
2176def sww2asc(basename_in, basename_out = None,
2177            quantity = None,
2178            timestep = None,
2179            reduction = None,
2180            cellsize = 10,
2181            verbose = False,
2182            origin = None):
2183    print 'sww2asc will soon be obsoleted - please use sww2dem'
2184    sww2dem(basename_in,
2185            basename_out = basename_out,
2186            quantity = quantity,
2187            timestep = timestep,
2188            reduction = reduction,
2189            cellsize = cellsize,
2190            verbose = verbose,
2191            origin = origin,
2192        datum = 'WGS84',
2193        format = 'asc')
2194
2195def sww2ers(basename_in, basename_out = None,
2196            quantity = None,
2197            timestep = None,
2198            reduction = None,
2199            cellsize = 10,
2200            verbose = False,
2201            origin = None,
2202            datum = 'WGS84'):
2203    print 'sww2ers will soon be obsoleted - please use sww2dem'
2204    sww2dem(basename_in,
2205            basename_out = basename_out,
2206            quantity = quantity,
2207            timestep = timestep,
2208            reduction = reduction,
2209            cellsize = cellsize,
2210            verbose = verbose,
2211            origin = origin,
[2891]2212            datum = datum,
2213            format = 'ers')
[2852]2214################################# END COMPATIBILITY ##############
2215
2216
2217
[2891]2218def sww2pts(basename_in, basename_out=None,
2219            data_points=None,
2220            quantity=None,
2221            timestep=None,
2222            reduction=None,
2223            NODATA_value=-9999,
2224            verbose=False,
2225            origin=None):
2226            #datum = 'WGS84')
2227
2228
2229    """Read SWW file and convert to interpolated values at selected points
2230
2231    The parameter quantity' must be the name of an existing quantity or
2232    an expression involving existing quantities. The default is
2233    'elevation'.
2234
2235    if timestep (an index) is given, output quantity at that timestep
2236
2237    if reduction is given use that to reduce quantity over all timesteps.
2238
2239    data_points (Nx2 array) give locations of points where quantity is to be computed.
2240   
2241    """
2242
2243    import sys
2244    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2245    from Numeric import array2string
2246
[3514]2247    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
[3560]2248    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
[2891]2249
[3514]2250    from anuga.geospatial_data.geospatial_data import Geospatial_data
[2891]2251
2252    if quantity is None:
2253        quantity = 'elevation'
2254
2255    if reduction is None:
2256        reduction = max
2257
2258    if basename_out is None:
2259        basename_out = basename_in + '_%s' %quantity
2260
2261    swwfile = basename_in + '.sww'
2262    ptsfile = basename_out + '.pts'
2263
2264    # Read sww file
2265    if verbose: print 'Reading from %s' %swwfile
2266    from Scientific.IO.NetCDF import NetCDFFile
2267    fid = NetCDFFile(swwfile)
2268
2269    # Get extent and reference
2270    x = fid.variables['x'][:]
2271    y = fid.variables['y'][:]
2272    volumes = fid.variables['volumes'][:]
2273
2274    number_of_timesteps = fid.dimensions['number_of_timesteps']
2275    number_of_points = fid.dimensions['number_of_points']
2276    if origin is None:
2277
2278        # Get geo_reference
2279        # sww files don't have to have a geo_ref
2280        try:
2281            geo_reference = Geo_reference(NetCDFObject=fid)
2282        except AttributeError, e:
2283            geo_reference = Geo_reference() #Default georef object
2284
2285        xllcorner = geo_reference.get_xllcorner()
2286        yllcorner = geo_reference.get_yllcorner()
2287        zone = geo_reference.get_zone()
2288    else:
2289        zone = origin[0]
2290        xllcorner = origin[1]
2291        yllcorner = origin[2]
2292
2293
2294
2295    # FIXME: Refactor using code from file_function.statistics
2296    # Something like print swwstats(swwname)
2297    if verbose:
2298        x = fid.variables['x'][:]
2299        y = fid.variables['y'][:]
2300        times = fid.variables['time'][:]
2301        print '------------------------------------------------'
2302        print 'Statistics of SWW file:'
2303        print '  Name: %s' %swwfile
2304        print '  Reference:'
2305        print '    Lower left corner: [%f, %f]'\
2306              %(xllcorner, yllcorner)
2307        print '    Start time: %f' %fid.starttime[0]
2308        print '  Extent:'
2309        print '    x [m] in [%f, %f], len(x) == %d'\
2310              %(min(x.flat), max(x.flat), len(x.flat))
2311        print '    y [m] in [%f, %f], len(y) == %d'\
2312              %(min(y.flat), max(y.flat), len(y.flat))
2313        print '    t [s] in [%f, %f], len(t) == %d'\
2314              %(min(times), max(times), len(times))
2315        print '  Quantities [SI units]:'
2316        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2317            q = fid.variables[name][:].flat
2318            print '    %s in [%f, %f]' %(name, min(q), max(q))
2319
2320
2321
2322    # Get quantity and reduce if applicable
2323    if verbose: print 'Processing quantity %s' %quantity
2324
2325    # Turn NetCDF objects into Numeric arrays
2326    quantity_dict = {}
2327    for name in fid.variables.keys():
2328        quantity_dict[name] = fid.variables[name][:]
2329
2330
2331
2332    # Convert quantity expression to quantities found in sww file   
2333    q = apply_expression_to_dictionary(quantity, quantity_dict)
2334
2335
2336
2337    if len(q.shape) == 2:
2338        # q has a time component and needs to be reduced along
2339        # the temporal dimension
2340        if verbose: print 'Reducing quantity %s' %quantity
2341        q_reduced = zeros( number_of_points, Float )
2342
2343        for k in range(number_of_points):
2344            q_reduced[k] = reduction( q[:,k] )
2345
2346        q = q_reduced
2347
2348    # Post condition: Now q has dimension: number_of_points
2349    assert len(q.shape) == 1
2350    assert q.shape[0] == number_of_points
2351
2352
2353    if verbose:
2354        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2355
2356
2357    # Create grid and update xll/yll corner and x,y
2358    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2359    assert len(vertex_points.shape) == 2
2360
2361    # Interpolate
[3514]2362    from anuga.fit_interpolate.interpolate import Interpolate
[2891]2363    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2364
2365    # Interpolate using quantity values
2366    if verbose: print 'Interpolating'
2367    interpolated_values = interp.interpolate(q, data_points).flat
2368
2369
2370    if verbose:
2371        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2372                                                      max(interpolated_values))
2373
2374    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2375    P = interp.mesh.get_boundary_polygon()
2376    outside_indices = outside_polygon(data_points, P, closed=True)
2377
2378    for i in outside_indices:
2379        interpolated_values[i] = NODATA_value
2380
2381
2382    # Store results   
2383    G = Geospatial_data(data_points=data_points,
2384                        attributes=interpolated_values)
2385
2386    G.export_points_file(ptsfile, absolute = True)
2387
[2931]2388    fid.close()
[2891]2389
2390
[2852]2391def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2392                                  use_cache = False,
2393                                  verbose = False):
2394    """Read Digitial Elevation model from the following ASCII format (.asc)
2395
2396    Example:
2397
2398    ncols         3121
2399    nrows         1800
2400    xllcorner     722000
2401    yllcorner     5893000
2402    cellsize      25
2403    NODATA_value  -9999
2404    138.3698 137.4194 136.5062 135.5558 ..........
2405
2406    Convert basename_in + '.asc' to NetCDF format (.dem)
2407    mimicking the ASCII format closely.
2408
2409
2410    An accompanying file with same basename_in but extension .prj must exist
2411    and is used to fix the UTM zone, datum, false northings and eastings.
2412
2413    The prj format is assumed to be as
2414
2415    Projection    UTM
2416    Zone          56
2417    Datum         WGS84
2418    Zunits        NO
2419    Units         METERS
2420    Spheroid      WGS84
2421    Xshift        0.0000000000
2422    Yshift        10000000.0000000000
2423    Parameters
2424    """
2425
2426
2427
2428    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2429
2430    if use_cache is True:
2431        from caching import cache
2432        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
[4688]2433                       dependencies = [basename_in + '.asc',
2434                                       basename_in + '.prj'],
[2852]2435                       verbose = verbose)
2436
2437    else:
2438        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2439
2440    return result
2441
2442
2443
2444
2445
2446
2447def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2448                                  verbose = False):
2449    """Read Digitial Elevation model from the following ASCII format (.asc)
2450
2451    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2452    """
2453
2454    import os
2455    from Scientific.IO.NetCDF import NetCDFFile
2456    from Numeric import Float, array
2457
2458    #root, ext = os.path.splitext(basename_in)
2459    root = basename_in
2460
2461    ###########################################
2462    # Read Meta data
2463    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2464    metadatafile = open(root + '.prj')
2465    metalines = metadatafile.readlines()
2466    metadatafile.close()
2467
2468    L = metalines[0].strip().split()
2469    assert L[0].strip().lower() == 'projection'
2470    projection = L[1].strip()                   #TEXT
2471
2472    L = metalines[1].strip().split()
2473    assert L[0].strip().lower() == 'zone'
2474    zone = int(L[1].strip())
2475
2476    L = metalines[2].strip().split()
2477    assert L[0].strip().lower() == 'datum'
2478    datum = L[1].strip()                        #TEXT
2479
2480    L = metalines[3].strip().split()
2481    assert L[0].strip().lower() == 'zunits'     #IGNORE
2482    zunits = L[1].strip()                       #TEXT
2483
2484    L = metalines[4].strip().split()
2485    assert L[0].strip().lower() == 'units'
2486    units = L[1].strip()                        #TEXT
2487
2488    L = metalines[5].strip().split()
2489    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2490    spheroid = L[1].strip()                     #TEXT
2491
2492    L = metalines[6].strip().split()
2493    assert L[0].strip().lower() == 'xshift'
2494    false_easting = float(L[1].strip())
2495
2496    L = metalines[7].strip().split()
2497    assert L[0].strip().lower() == 'yshift'
2498    false_northing = float(L[1].strip())
2499
2500    #print false_easting, false_northing, zone, datum
2501
2502
2503    ###########################################
2504    #Read DEM data
2505
2506    datafile = open(basename_in + '.asc')
2507
2508    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2509    lines = datafile.readlines()
2510    datafile.close()
2511
2512    if verbose: print 'Got', len(lines), ' lines'
2513
2514    ncols = int(lines[0].split()[1].strip())
2515    nrows = int(lines[1].split()[1].strip())
[4824]2516
2517    # Do cellsize (line 4) before line 2 and 3
2518    cellsize = float(lines[4].split()[1].strip())   
2519
2520    # Checks suggested by Joaquim Luis
2521    # Our internal representation of xllcorner
2522    # and yllcorner is non-standard.
2523    xref = lines[2].split()
2524    if xref[0].strip() == 'xllcorner':
2525        xllcorner = float(xref[1].strip()) # + 0.5*cellsize # Correct offset
2526    elif xref[0].strip() == 'xllcenter':
2527        xllcorner = float(xref[1].strip())
2528    else:
2529        msg = 'Unknown keyword: %s' %xref[0].strip()
2530        raise Exception, msg
2531
2532    yref = lines[3].split()
2533    if yref[0].strip() == 'yllcorner':
2534        yllcorner = float(yref[1].strip()) # + 0.5*cellsize # Correct offset
2535    elif yref[0].strip() == 'yllcenter':
2536        yllcorner = float(yref[1].strip())
2537    else:
2538        msg = 'Unknown keyword: %s' %yref[0].strip()
2539        raise Exception, msg
2540       
2541
[2852]2542    NODATA_value = int(lines[5].split()[1].strip())
2543
2544    assert len(lines) == nrows + 6
2545
2546
2547    ##########################################
2548
2549
2550    if basename_out == None:
2551        netcdfname = root + '.dem'
2552    else:
2553        netcdfname = basename_out + '.dem'
2554
2555    if verbose: print 'Store to NetCDF file %s' %netcdfname
2556    # NetCDF file definition
2557    fid = NetCDFFile(netcdfname, 'w')
2558
2559    #Create new file
2560    fid.institution = 'Geoscience Australia'
2561    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2562                      'of spatial point data'
2563
2564    fid.ncols = ncols
2565    fid.nrows = nrows
2566    fid.xllcorner = xllcorner
2567    fid.yllcorner = yllcorner
2568    fid.cellsize = cellsize
2569    fid.NODATA_value = NODATA_value
2570
2571    fid.zone = zone
2572    fid.false_easting = false_easting
2573    fid.false_northing = false_northing
2574    fid.projection = projection
2575    fid.datum = datum
2576    fid.units = units
2577
2578
2579    # dimension definitions
2580    fid.createDimension('number_of_rows', nrows)
2581    fid.createDimension('number_of_columns', ncols)
2582
2583    # variable definitions
2584    fid.createVariable('elevation', Float, ('number_of_rows',
2585                                            'number_of_columns'))
2586
2587    # Get handles to the variables
2588    elevation = fid.variables['elevation']
2589
2590    #Store data
2591    n = len(lines[6:])
2592    for i, line in enumerate(lines[6:]):
2593        fields = line.split()
2594        if verbose and i%((n+10)/10)==0:
2595            print 'Processing row %d of %d' %(i, nrows)
2596
2597        elevation[i, :] = array([float(x) for x in fields])
2598
2599    fid.close()
2600
2601
2602
2603
2604
2605def ferret2sww(basename_in, basename_out = None,
2606               verbose = False,
2607               minlat = None, maxlat = None,
2608               minlon = None, maxlon = None,
2609               mint = None, maxt = None, mean_stage = 0,
2610               origin = None, zscale = 1,
2611               fail_on_NaN = True,
2612               NaN_filler = 0,
2613               elevation = None,
[3694]2614               inverted_bathymetry = True
[2852]2615               ): #FIXME: Bathymetry should be obtained
2616                                  #from MOST somehow.
2617                                  #Alternatively from elsewhere
2618                                  #or, as a last resort,
2619                                  #specified here.
2620                                  #The value of -100 will work
2621                                  #for the Wollongong tsunami
2622                                  #scenario but is very hacky
2623    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
[3560]2624    sww format native to abstract_2d_finite_volumes.
[2852]2625
2626    Specify only basename_in and read files of the form
2627    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2628    relative height, x-velocity and y-velocity, respectively.
2629
2630    Also convert latitude and longitude to UTM. All coordinates are
2631    assumed to be given in the GDA94 datum.
2632
2633    min's and max's: If omitted - full extend is used.
2634    To include a value min may equal it, while max must exceed it.
2635    Lat and lon are assuemd to be in decimal degrees
2636
2637    origin is a 3-tuple with geo referenced
2638    UTM coordinates (zone, easting, northing)
2639
2640    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2641    which means that longitude is the fastest
2642    varying dimension (row major order, so to speak)
2643
2644    ferret2sww uses grid points as vertices in a triangular grid
2645    counting vertices from lower left corner upwards, then right
2646    """
2647
2648    import os
2649    from Scientific.IO.NetCDF import NetCDFFile
2650    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2651    from Numeric import allclose, around
2652
2653    precision = Float
2654
2655    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2656
2657    if minlat != None:
2658        assert -90 < minlat < 90 , msg
2659    if maxlat != None:
2660        assert -90 < maxlat < 90 , msg
[4050]2661        if minlat != None:
2662            assert maxlat > minlat
[2852]2663    if minlon != None:
2664        assert -180 < minlon < 180 , msg
2665    if maxlon != None:
2666        assert -180 < maxlon < 180 , msg
[4050]2667        if minlon != None:
2668            assert maxlon > minlon
2669       
[2852]2670
2671
2672    #Get NetCDF data
2673    if verbose: print 'Reading files %s_*.nc' %basename_in
[3694]2674    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
[2852]2675    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2676    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2677    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2678    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2679
2680    if basename_out is None:
2681        swwname = basename_in + '.sww'
2682    else:
2683        swwname = basename_out + '.sww'
2684
[4418]2685    # Get dimensions of file_h
[2852]2686    for dimension in file_h.dimensions.keys():
2687        if dimension[:3] == 'LON':
2688            dim_h_longitude = dimension
2689        if dimension[:3] == 'LAT':
2690            dim_h_latitude = dimension
2691        if dimension[:4] == 'TIME':
2692            dim_h_time = dimension
2693
2694#    print 'long:', dim_h_longitude
2695#    print 'lats:', dim_h_latitude
2696#    print 'times:', dim_h_time
2697
2698    times = file_h.variables[dim_h_time]
2699    latitudes = file_h.variables[dim_h_latitude]
2700    longitudes = file_h.variables[dim_h_longitude]
[5347]2701
2702    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2703                                                  longitudes[:],
2704                                                  minlat, maxlat,
2705                                                  minlon, maxlon)
[4418]2706    # get dimensions for file_e
[2852]2707    for dimension in file_e.dimensions.keys():
2708        if dimension[:3] == 'LON':
2709            dim_e_longitude = dimension
2710        if dimension[:3] == 'LAT':
2711            dim_e_latitude = dimension
2712
[4418]2713    # get dimensions for file_u
[2852]2714    for dimension in file_u.dimensions.keys():
2715        if dimension[:3] == 'LON':
2716            dim_u_longitude = dimension
2717        if dimension[:3] == 'LAT':
2718            dim_u_latitude = dimension
2719        if dimension[:4] == 'TIME':
2720            dim_u_time = dimension
2721           
[4418]2722    # get dimensions for file_v
[2852]2723    for dimension in file_v.dimensions.keys():
2724        if dimension[:3] == 'LON':
2725            dim_v_longitude = dimension
2726        if dimension[:3] == 'LAT':
2727            dim_v_latitude = dimension
2728        if dimension[:4] == 'TIME':
2729            dim_v_time = dimension
2730
2731
[4418]2732    # Precision used by most for lat/lon is 4 or 5 decimals
[2852]2733    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2734    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2735
[4418]2736    # Check that files are compatible
[2852]2737    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2738    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2739    assert allclose(latitudes, e_lat)
2740
2741    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2742    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2743    assert allclose(longitudes, e_lon)
2744
[4418]2745    if mint is None:
[2852]2746        jmin = 0
[4418]2747        mint = times[0]       
[2852]2748    else:
2749        jmin = searchsorted(times, mint)
[4024]2750       
[4418]2751    if maxt is None:
2752        jmax = len(times)
2753        maxt = times[-1]
[2852]2754    else:
2755        jmax = searchsorted(times, maxt)
2756
[4037]2757    #print "latitudes[:]",latitudes[:]
2758    #print "longitudes[:]",longitudes [:]
[4024]2759    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2760                                                  longitudes[:],
[4418]2761                                                  minlat, maxlat,
2762                                                  minlon, maxlon)
[2852]2763
2764
2765    times = times[jmin:jmax]
2766    latitudes = latitudes[kmin:kmax]
2767    longitudes = longitudes[lmin:lmax]
2768
[4037]2769    #print "latitudes[:]",latitudes[:]
2770    #print "longitudes[:]",longitudes [:]
[2852]2771
2772    if verbose: print 'cropping'
2773    zname = 'ELEVATION'
2774
2775    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2776    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2777    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2778    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2779
2780    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2781    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2782    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2783    #        from Numeric import asarray
2784    #        elevations=elevations.tolist()
2785    #        elevations.reverse()
2786    #        elevations=asarray(elevations)
2787    #    else:
2788    #        from Numeric import asarray
2789    #        elevations=elevations.tolist()
2790    #        elevations.reverse()
2791    #        elevations=asarray(elevations)
2792    #        'print hmmm'
2793
2794
2795
2796    #Get missing values
2797    nan_ha = file_h.variables['HA'].missing_value[0]
2798    nan_ua = file_u.variables['UA'].missing_value[0]
2799    nan_va = file_v.variables['VA'].missing_value[0]
2800    if hasattr(file_e.variables[zname],'missing_value'):
2801        nan_e  = file_e.variables[zname].missing_value[0]
2802    else:
2803        nan_e = None
2804
2805    #Cleanup
2806    from Numeric import sometrue
2807
2808    missing = (amplitudes == nan_ha)
2809    if sometrue (missing):
2810        if fail_on_NaN:
2811            msg = 'NetCDFFile %s contains missing values'\
2812                  %(basename_in+'_ha.nc')
2813            raise DataMissingValuesError, msg
2814        else:
2815            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2816
2817    missing = (uspeed == nan_ua)
2818    if sometrue (missing):
2819        if fail_on_NaN:
2820            msg = 'NetCDFFile %s contains missing values'\
2821                  %(basename_in+'_ua.nc')
2822            raise DataMissingValuesError, msg
2823        else:
2824            uspeed = uspeed*(missing==0) + missing*NaN_filler
2825
2826    missing = (vspeed == nan_va)
2827    if sometrue (missing):
2828        if fail_on_NaN:
2829            msg = 'NetCDFFile %s contains missing values'\
2830                  %(basename_in+'_va.nc')
2831            raise DataMissingValuesError, msg
2832        else:
2833            vspeed = vspeed*(missing==0) + missing*NaN_filler
2834
2835
2836    missing = (elevations == nan_e)
2837    if sometrue (missing):
2838        if fail_on_NaN:
2839            msg = 'NetCDFFile %s contains missing values'\
2840                  %(basename_in+'_e.nc')
2841            raise DataMissingValuesError, msg
2842        else:
2843            elevations = elevations*(missing==0) + missing*NaN_filler
2844
2845    #######
2846
2847
2848
2849    number_of_times = times.shape[0]
2850    number_of_latitudes = latitudes.shape[0]
2851    number_of_longitudes = longitudes.shape[0]
2852
2853    assert amplitudes.shape[0] == number_of_times
2854    assert amplitudes.shape[1] == number_of_latitudes
2855    assert amplitudes.shape[2] == number_of_longitudes
2856
2857    if verbose:
2858        print '------------------------------------------------'
2859        print 'Statistics:'
2860        print '  Extent (lat/lon):'
2861        print '    lat in [%f, %f], len(lat) == %d'\
2862              %(min(latitudes.flat), max(latitudes.flat),
2863                len(latitudes.flat))
2864        print '    lon in [%f, %f], len(lon) == %d'\
2865              %(min(longitudes.flat), max(longitudes.flat),
2866                len(longitudes.flat))
2867        print '    t in [%f, %f], len(t) == %d'\
2868              %(min(times.flat), max(times.flat), len(times.flat))
2869
2870        q = amplitudes.flat
2871        name = 'Amplitudes (ha) [cm]'
2872        print %s in [%f, %f]' %(name, min(q), max(q))
2873
2874        q = uspeed.flat
2875        name = 'Speeds (ua) [cm/s]'
2876        print %s in [%f, %f]' %(name, min(q), max(q))
2877
2878        q = vspeed.flat
2879        name = 'Speeds (va) [cm/s]'
2880        print %s in [%f, %f]' %(name, min(q), max(q))
2881
2882        q = elevations.flat
2883        name = 'Elevations (e) [m]'
2884        print %s in [%f, %f]' %(name, min(q), max(q))
2885
2886
[4704]2887    # print number_of_latitudes, number_of_longitudes
[2852]2888    number_of_points = number_of_latitudes*number_of_longitudes
2889    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2890
2891
2892    file_h.close()
2893    file_u.close()
2894    file_v.close()
2895    file_e.close()
2896
2897
2898    # NetCDF file definition
2899    outfile = NetCDFFile(swwname, 'w')
2900
[4387]2901    description = 'Converted from Ferret files: %s, %s, %s, %s'\
2902                  %(basename_in + '_ha.nc',
2903                    basename_in + '_ua.nc',
2904                    basename_in + '_va.nc',
2905                    basename_in + '_e.nc')
[4388]2906   
[4704]2907    # Create new file
[4416]2908    starttime = times[0]
[4862]2909   
[4455]2910    sww = Write_sww()
[4704]2911    sww.store_header(outfile, times, number_of_volumes,
2912                     number_of_points, description=description,
[4862]2913                     verbose=verbose,sww_precision=Float)
[2852]2914
[4704]2915    # Store
[3514]2916    from anuga.coordinate_transforms.redfearn import redfearn
[2852]2917    x = zeros(number_of_points, Float)  #Easting
2918    y = zeros(number_of_points, Float)  #Northing
2919
2920
2921    if verbose: print 'Making triangular grid'
[4704]2922
2923    # Check zone boundaries
[2852]2924    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2925
2926    vertices = {}
2927    i = 0
2928    for k, lat in enumerate(latitudes):       #Y direction
2929        for l, lon in enumerate(longitudes):  #X direction
2930
2931            vertices[l,k] = i
2932
2933            zone, easting, northing = redfearn(lat,lon)
2934
2935            msg = 'Zone boundary crossed at longitude =', lon
2936            #assert zone == refzone, msg
2937            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2938            x[i] = easting
2939            y[i] = northing
2940            i += 1
2941
2942    #Construct 2 triangles per 'rectangular' element
2943    volumes = []
2944    for l in range(number_of_longitudes-1):    #X direction
2945        for k in range(number_of_latitudes-1): #Y direction
2946            v1 = vertices[l,k+1]
2947            v2 = vertices[l,k]
2948            v3 = vertices[l+1,k+1]
2949            v4 = vertices[l+1,k]
2950
2951            volumes.append([v1,v2,v3]) #Upper element
2952            volumes.append([v4,v3,v2]) #Lower element
2953
2954    volumes = array(volumes)
2955
[4387]2956    if origin is None:
2957        origin = Geo_reference(refzone,min(x),min(y))
2958    geo_ref = write_NetCDF_georeference(origin, outfile)
2959   
[2852]2960    if elevation is not None:
2961        z = elevation
2962    else:
2963        if inverted_bathymetry:
2964            z = -1*elevations
2965        else:
2966            z = elevations
2967    #FIXME: z should be obtained from MOST and passed in here
2968
[4862]2969    #FIXME use the Write_sww instance(sww) to write this info
[2852]2970    from Numeric import resize
2971    z = resize(z,outfile.variables['z'][:].shape)
[4387]2972    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
2973    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
[3954]2974    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
[2852]2975    outfile.variables['elevation'][:] = z
[3954]2976    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
[2852]2977
2978
2979
2980    #Time stepping
2981    stage = outfile.variables['stage']
2982    xmomentum = outfile.variables['xmomentum']
2983    ymomentum = outfile.variables['ymomentum']
2984
2985    if verbose: print 'Converting quantities'
2986    n = len(times)
2987    for j in range(n):
2988        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2989        i = 0
2990        for k in range(number_of_latitudes):      #Y direction
2991            for l in range(number_of_longitudes): #X direction
2992                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2993                stage[j,i] = w
2994                h = w - z[i]
2995                xmomentum[j,i] = uspeed[j,k,l]/100*h
2996                ymomentum[j,i] = vspeed[j,k,l]/100*h
2997                i += 1
2998
2999    #outfile.close()
3000
3001    #FIXME: Refactor using code from file_function.statistics
3002    #Something like print swwstats(swwname)
3003    if verbose:
3004        x = outfile.variables['x'][:]
3005        y = outfile.variables['y'][:]
3006        print '------------------------------------------------'
3007        print 'Statistics of output file:'
3008        print '  Name: %s' %swwname
3009        print '  Reference:'
3010        print '    Lower left corner: [%f, %f]'\
[4387]3011              %(geo_ref.get_xllcorner(), geo_ref.get_yllcorner())
[4416]3012        print ' Start time: %f' %starttime
[4418]3013        print '    Min time: %f' %mint
3014        print '    Max time: %f' %maxt
[2852]3015        print '  Extent:'
3016        print '    x [m] in [%f, %f], len(x) == %d'\
3017              %(min(x.flat), max(x.flat), len(x.flat))
3018        print '    y [m] in [%f, %f], len(y) == %d'\
3019              %(min(y.flat), max(y.flat), len(y.flat))
3020        print '    t [s] in [%f, %f], len(t) == %d'\
3021              %(min(times), max(times), len(times))
3022        print '  Quantities [SI units]:'
3023        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3024            q = outfile.variables[name][:].flat
3025            print '    %s in [%f, %f]' %(name, min(q), max(q))
3026
3027
3028
3029    outfile.close()
3030
3031
3032
3033
3034
[4303]3035def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False):
[2852]3036    """Template for converting typical text files with time series to
3037    NetCDF tms file.
3038
3039
3040    The file format is assumed to be either two fields separated by a comma:
3041
3042        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3043
3044    E.g
3045
3046      31/08/04 00:00:00, 1.328223 0 0
3047      31/08/04 00:15:00, 1.292912 0 0
3048
[4303]3049    or time (seconds), value0 value1 value2 ...
3050   
3051      0.0, 1.328223 0 0
3052      0.1, 1.292912 0 0
3053
[2852]3054    will provide a time dependent function f(t) with three attributes
3055
3056    filename is assumed to be the rootname with extenisons .txt and .sww
3057    """
3058
3059    import time, calendar
3060    from Numeric import array
[3514]3061    from anuga.config import time_format
3062    from anuga.utilities.numerical_tools import ensure_numeric
[2852]3063
3064
3065
3066    fid = open(filename + '.txt')
3067    line = fid.readline()
3068    fid.close()
3069
3070    fields = line.split(',')
3071    msg = 'File %s must have the format date, value0 value1 value2 ...'
3072    assert len(fields) == 2, msg
3073
[4303]3074    if not time_as_seconds:
3075        try:
3076            starttime = calendar.timegm(time.strptime(fields[0], time_format))
3077        except ValueError:
3078            msg = 'First field in file %s must be' %filename
3079            msg += ' date-time with format %s.\n' %time_format
3080            msg += 'I got %s instead.' %fields[0]
3081            raise DataTimeError, msg
3082    else:
3083        try:
3084            starttime = float(fields[0])
3085        except Error:
3086            msg = "Bad time format"
3087            raise DataTimeError, msg
[2852]3088
3089
3090    #Split values
3091    values = []
3092    for value in fields[1].split():
3093        values.append(float(value))
3094
3095    q = ensure_numeric(values)
3096
3097    msg = 'ERROR: File must contain at least one independent value'
3098    assert len(q.shape) == 1, msg
3099
3100
3101
3102    #Read times proper
3103    from Numeric import zeros, Float, alltrue
[3514]3104    from anuga.config import time_format
[2852]3105    import time, calendar
3106
3107    fid = open(filename + '.txt')
3108    lines = fid.readlines()
3109    fid.close()
3110
3111    N = len(lines)
3112    d = len(q)
3113
3114    T = zeros(N, Float)       #Time
3115    Q = zeros((N, d), Float)  #Values
3116
3117    for i, line in enumerate(lines):
3118        fields = line.split(',')
[4303]3119        if not time_as_seconds:
3120            realtime = calendar.timegm(time.strptime(fields[0], time_format))
3121        else:
3122             realtime = float(fields[0])
[2852]3123        T[i] = realtime - starttime
3124
3125        for j, value in enumerate(fields[1].split()):
3126            Q[i, j] = float(value)
3127
3128    msg = 'File %s must list time as a monotonuosly ' %filename
3129    msg += 'increasing sequence'
3130    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3131
3132    #Create NetCDF file
3133    from Scientific.IO.NetCDF import NetCDFFile
3134
3135    fid = NetCDFFile(filename + '.tms', 'w')
3136
3137
3138    fid.institution = 'Geoscience Australia'
3139    fid.description = 'Time series'
3140
3141
3142    #Reference point
3143    #Start time in seconds since the epoch (midnight 1/1/1970)
3144    #FIXME: Use Georef
3145    fid.starttime = starttime
3146
3147    # dimension definitions
3148    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3149    #fid.createDimension('number_of_vertices', 3)
3150
3151
3152    fid.createDimension('number_of_timesteps', len(T))
3153
3154    fid.createVariable('time', Float, ('number_of_timesteps',))
3155
3156    fid.variables['time'][:] = T
3157
3158    for i in range(Q.shape[1]):
3159        try:
3160            name = quantity_names[i]
3161        except:
3162            name = 'Attribute%d'%i
3163
3164        fid.createVariable(name, Float, ('number_of_timesteps',))
3165        fid.variables[name][:] = Q[:,i]
3166
3167    fid.close()
3168
3169
3170def extent_sww(file_name):
3171    """
3172    Read in an sww file.
3173
3174    Input;
3175    file_name - the sww file
3176
3177    Output;
3178    z - Vector of bed elevation
3179    volumes - Array.  Each row has 3 values, representing
3180    the vertices that define the volume
3181    time - Vector of the times where there is stage information
3182    stage - array with respect to time and vertices (x,y)
3183    """
3184
3185
3186    from Scientific.IO.NetCDF import NetCDFFile
3187
3188    #Check contents
3189    #Get NetCDF
3190    fid = NetCDFFile(file_name, 'r')
3191
3192    # Get the variables
3193    x = fid.variables['x'][:]
3194    y = fid.variables['y'][:]
3195    stage = fid.variables['stage'][:]
3196    #print "stage",stage
3197    #print "stage.shap",stage.shape
3198    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3199    #print "min(stage)",min(stage)
3200
3201    fid.close()
3202
3203    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3204
3205
[5276]3206def sww2domain(filename, boundary=None, t=None,
3207               fail_if_NaN=True ,NaN_filler=0,
3208               verbose = False, very_verbose = False):
[2852]3209    """
3210    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3211
3212    Boundary is not recommended if domain.smooth is not selected, as it
3213    uses unique coordinates, but not unique boundaries. This means that
3214    the boundary file will not be compatable with the coordinates, and will
3215    give a different final boundary, or crash.
3216    """
3217    NaN=9.969209968386869e+036
3218    #initialise NaN.
3219
3220    from Scientific.IO.NetCDF import NetCDFFile
3221    from shallow_water import Domain
3222    from Numeric import asarray, transpose, resize
3223
3224    if verbose: print 'Reading from ', filename
3225    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3226    time = fid.variables['time']       #Timesteps
3227    if t is None:
3228        t = time[-1]
3229    time_interp = get_time_interp(time,t)
3230
3231    # Get the variables as Numeric arrays
3232    x = fid.variables['x'][:]             #x-coordinates of vertices
3233    y = fid.variables['y'][:]             #y-coordinates of vertices
3234    elevation = fid.variables['elevation']     #Elevation
3235    stage = fid.variables['stage']     #Water level
3236    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3237    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3238
3239    starttime = fid.starttime[0]
3240    volumes = fid.variables['volumes'][:] #Connectivity
[5276]3241    coordinates = transpose(asarray([x.tolist(),y.tolist()]))
3242    #FIXME (Ole): Something like this might be better: concatenate( (x, y), axis=1 )
3243    # or concatenate( (x[:,NewAxis],x[:,NewAxis]), axis=1 )
[2852]3244
3245    conserved_quantities = []
3246    interpolated_quantities = {}
3247    other_quantities = []
3248
3249    # get geo_reference
3250    #sww files don't have to have a geo_ref
3251    try:
3252        geo_reference = Geo_reference(NetCDFObject=fid)
3253    except: #AttributeError, e:
3254        geo_reference = None
3255
3256    if verbose: print '    getting quantities'
3257    for quantity in fid.variables.keys():
3258        dimensions = fid.variables[quantity].dimensions
3259        if 'number_of_timesteps' in dimensions:
3260            conserved_quantities.append(quantity)
3261            interpolated_quantities[quantity]=\
3262                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3263        else: other_quantities.append(quantity)
3264
3265    other_quantities.remove('x')
3266    other_quantities.remove('y')
3267    other_quantities.remove('z')
3268    other_quantities.remove('volumes')
[4455]3269    try:
3270        other_quantities.remove('stage_range')
3271        other_quantities.remove('xmomentum_range')
3272        other_quantities.remove('ymomentum_range')
3273        other_quantities.remove('elevation_range')
3274    except:
3275        pass
3276       
[2852]3277
3278    conserved_quantities.remove('time')
3279
3280    if verbose: print '    building domain'
3281    #    From domain.Domain:
3282    #    domain = Domain(coordinates, volumes,\
3283    #                    conserved_quantities = conserved_quantities,\
3284    #                    other_quantities = other_quantities,zone=zone,\
3285    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3286
3287    #   From shallow_water.Domain:
3288    coordinates=coordinates.tolist()
3289    volumes=volumes.tolist()
3290    #FIXME:should this be in mesh?(peter row)
3291    if fid.smoothing == 'Yes': unique = False
3292    else: unique = True
3293    if unique:
3294        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3295
3296
3297    try:
3298        domain = Domain(coordinates, volumes, boundary)
3299    except AssertionError, e:
3300        fid.close()
3301        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3302        raise DataDomainError, msg
3303
3304    if not boundary is None:
3305        domain.boundary = boundary
3306
3307    domain.geo_reference = geo_reference
3308
3309    domain.starttime=float(starttime)+float(t)
3310    domain.time=0.0
3311
3312    for quantity in other_quantities:
3313        try:
3314            NaN = fid.variables[quantity].missing_value
3315        except:
3316            pass #quantity has no missing_value number
3317        X = fid.variables[quantity][:]
3318        if very_verbose:
3319            print '       ',quantity
3320            print '        NaN =',NaN
3321            print '        max(X)'
3322            print '       ',max(X)
3323            print '        max(X)==NaN'
3324            print '       ',max(X)==NaN
3325            print ''
3326        if (max(X)==NaN) or (min(X)==NaN):
3327            if fail_if_NaN:
3328                msg = 'quantity "%s" contains no_data entry'%quantity
3329                raise DataMissingValuesError, msg
3330            else:
3331                data = (X<>NaN)
3332                X = (X*data)+(data==0)*NaN_filler
3333        if unique:
3334            X = resize(X,(len(X)/3,3))
3335        domain.set_quantity(quantity,X)
3336    #
3337    for quantity in conserved_quantities:
3338        try:
3339            NaN = fid.variables[quantity].missing_value
3340        except:
3341            pass #quantity has no missing_value number
3342        X = interpolated_quantities[quantity]
3343        if very_verbose:
3344            print '       ',quantity
3345            print '        NaN =',NaN
3346            print '        max(X)'
3347            print '       ',max(X)
3348            print '        max(X)==NaN'
3349            print '       ',max(X)==NaN
3350            print ''
3351        if (max(X)==NaN) or (min(X)==NaN):
3352            if fail_if_NaN:
3353                msg = 'quantity "%s" contains no_data entry'%quantity
3354                raise DataMissingValuesError, msg
3355            else:
3356                data = (X<>NaN)
3357                X = (X*data)+(data==0)*NaN_filler
3358        if unique:
3359            X = resize(X,(X.shape[0]/3,3))
3360        domain.set_quantity(quantity,X)
3361
3362    fid.close()
3363    return domain
3364
[5276]3365
[2852]3366def interpolated_quantity(saved_quantity,time_interp):
3367
3368    #given an index and ratio, interpolate quantity with respect to time.
3369    index,ratio = time_interp
3370    Q = saved_quantity
3371    if ratio > 0:
3372        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3373    else:
3374        q = Q[index]
3375    #Return vector of interpolated values
3376    return q
3377
[5276]3378
[2852]3379def get_time_interp(time,t=None):
3380    #Finds the ratio and index for time interpolation.
[3560]3381    #It is borrowed from previous abstract_2d_finite_volumes code.
[2852]3382    if t is None:
3383        t=time[-1]
3384        index = -1
3385        ratio = 0.
3386    else:
3387        T = time
3388        tau = t
3389        index=0
3390        msg = 'Time interval derived from file %s [%s:%s]'\
3391            %('FIXMEfilename', T[0], T[-1])
3392        msg += ' does not match model time: %s' %tau
3393        if tau < time[0]: raise DataTimeError, msg
3394        if tau > time[-1]: raise DataTimeError, msg
3395        while tau > time[index]: index += 1
3396        while tau < time[index]: index -= 1
3397        if tau == time[index]:
3398            #Protect against case where tau == time[-1] (last time)
3399            # - also works in general when tau == time[i]
3400            ratio = 0
3401        else:
3402            #t is now between index and index+1
3403            ratio = (tau - time[index])/(time[index+1] - time[index])
3404    return (index,ratio)
3405
3406
3407def weed(coordinates,volumes,boundary = None):
[4455]3408    if type(coordinates)==ArrayType:
[2852]3409        coordinates = coordinates.tolist()
[4455]3410    if type(volumes)==ArrayType:
[2852]3411        volumes = volumes.tolist()
3412
3413    unique = False
3414    point_dict = {}
3415    same_point = {}
3416    for i in range(len(coordinates)):
3417        point = tuple(coordinates[i])
3418        if point_dict.has_key(point):
3419            unique = True
3420            same_point[i]=point
3421            #to change all point i references to point j
3422        else:
3423            point_dict[point]=i
3424            same_point[i]=point
3425
3426    coordinates = []
3427    i = 0
3428    for point in point_dict.keys():
3429        point = tuple(point)
3430        coordinates.append(list(point))
3431        point_dict[point]=i
3432        i+=1
3433
3434
3435    for volume in volumes:
3436        for i in range(len(volume)):
3437            index = volume[i]
3438            if index>-1:
3439                volume[i]=point_dict[same_point[index]]
3440
3441    new_boundary = {}
3442    if not boundary is None:
3443        for segment in boundary.keys():
3444            point0 = point_dict[same_point[segment[0]]]
3445            point1 = point_dict[same_point[segment[1]]]
3446            label = boundary[segment]
3447            #FIXME should the bounday attributes be concaterated
3448            #('exterior, pond') or replaced ('pond')(peter row)
3449
3450            if new_boundary.has_key((point0,point1)):
3451                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3452                                              #+','+label
3453
3454            elif new_boundary.has_key((point1,point0)):
3455                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3456                                              #+','+label
3457            else: new_boundary[(point0,point1)]=label
3458
3459        boundary = new_boundary
3460
3461    return coordinates,volumes,boundary
3462
3463
3464def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3465                 verbose=False):
3466    """Read Digitial Elevation model from the following NetCDF format (.dem)
3467
3468    Example:
3469
3470    ncols         3121
3471    nrows         1800
3472    xllcorner     722000
3473    yllcorner     5893000
3474    cellsize      25
3475    NODATA_value  -9999
3476    138.3698 137.4194 136.5062 135.5558 ..........
3477
3478    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3479    """
3480
3481    import os
3482    from Scientific.IO.NetCDF import NetCDFFile
3483    from Numeric import Float, zeros, sum, reshape, equal
3484
3485    root = basename_in
3486    inname = root + '.dem'
3487
3488    #Open existing netcdf file to read
3489    infile = NetCDFFile(inname, 'r')
3490    if verbose: print 'Reading DEM from %s' %inname
3491
3492    #Read metadata
3493    ncols = infile.ncols[0]
3494    nrows = infile.nrows[0]
3495    xllcorner = infile.xllcorner[0]
3496    yllcorner = infile.yllcorner[0]
3497    cellsize = infile.cellsize[0]
3498    NODATA_value = infile.NODATA_value[0]
3499    zone = infile.zone[0]
3500    false_easting = infile.false_easting[0]
3501    false_northing = infile.false_northing[0]
3502    projection = infile.projection
3503    datum = infile.datum
3504    units = infile.units
3505
3506    dem_elevation = infile.variables['elevation']
3507
3508    #Get output file name
3509    if basename_out == None:
3510        outname = root + '_' + repr(cellsize_new) + '.dem'
3511    else:
3512        outname = basename_out + '.dem'
3513
3514    if verbose: print 'Write decimated NetCDF file to %s' %outname
3515
3516    #Determine some dimensions for decimated grid
3517    (nrows_stencil, ncols_stencil) = stencil.shape
3518    x_offset = ncols_stencil / 2
3519    y_offset = nrows_stencil / 2
3520    cellsize_ratio = int(cellsize_new / cellsize)
3521    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3522    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3523
3524    #Open netcdf file for output
3525    outfile = NetCDFFile(outname, 'w')
3526
3527    #Create new file
3528    outfile.institution = 'Geoscience Australia'
3529    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3530                           'of spatial point data'
3531    #Georeferencing
3532    outfile.zone = zone
3533    outfile.projection = projection
3534    outfile.datum = datum
3535    outfile.units = units
3536
3537    outfile.cellsize = cellsize_new
3538    outfile.NODATA_value = NODATA_value
3539    outfile.false_easting = false_easting
3540    outfile.false_northing = false_northing
3541
3542    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3543    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3544    outfile.ncols = ncols_new
3545    outfile.nrows = nrows_new
3546
3547    # dimension definition
3548    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3549
3550    # variable definition
3551    outfile.createVariable('elevation', Float, ('number_of_points',))
3552
3553    # Get handle to the variable
3554    elevation = outfile.variables['elevation']
3555
3556    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3557
3558    #Store data
3559    global_index = 0
3560    for i in range(nrows_new):
3561        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3562        lower_index = global_index
3563        telev =  zeros(ncols_new, Float)
3564        local_index = 0
3565        trow = i * cellsize_ratio
3566
3567        for j in range(ncols_new):
3568            tcol = j * cellsize_ratio
3569            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3570
3571            #if dem contains 1 or more NODATA_values set value in
3572            #decimated dem to NODATA_value, else compute decimated
3573            #value using stencil
3574            if sum(sum(equal(tmp, NODATA_value))) > 0:
3575                telev[local_index] = NODATA_value
3576            else:
3577                telev[local_index] = sum(sum(tmp * stencil))
3578
3579            global_index += 1
3580            local_index += 1
3581
3582        upper_index = global_index
3583
3584        elevation[lower_index:upper_index] = telev
3585
3586    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3587
3588    infile.close()
3589    outfile.close()
3590
3591
3592
3593
[4868]3594def tsh2sww(filename, verbose=False): 
[2852]3595    """
3596    to check if a tsh/msh file 'looks' good.
3597    """
3598
3599
3600    if verbose == True:print 'Creating domain from', filename
3601    domain = pmesh_to_domain_instance(filename, Domain)
3602    if verbose == True:print "Number of triangles = ", len(domain)
3603
3604    domain.smooth = True
3605    domain.format = 'sww'   #Native netcdf visualisation format
3606    file_path, filename = path.split(filename)
3607    filename, ext = path.splitext(filename)
[3846]3608    domain.set_name(filename)   
[2852]3609    domain.reduction = mean
3610    if verbose == True:print "file_path",file_path
3611    if file_path == "":file_path = "."
3612    domain.set_datadir(file_path)
3613
3614    if verbose == True:
3615        print "Output written to " + domain.get_datadir() + sep + \
[3846]3616              domain.get_name() + "." + domain.format
[2852]3617    sww = get_dataobject(domain)
3618    sww.store_connectivity()
[4868]3619    sww.store_timestep()
[2852]3620
3621
3622def asc_csiro2sww(bath_dir,
3623                  elevation_dir,
3624                  ucur_dir,
3625                  vcur_dir,
3626                  sww_file,
3627                  minlat = None, maxlat = None,
3628                  minlon = None, maxlon = None,
3629                  zscale=1,
3630                  mean_stage = 0,
3631                  fail_on_NaN = True,
3632                  elevation_NaN_filler = 0,
3633                  bath_prefix='ba',
3634                  elevation_prefix='el',
3635                  verbose=False):
3636    """
3637    Produce an sww boundary file, from esri ascii data from CSIRO.
3638
3639    Also convert latitude and longitude to UTM. All coordinates are
3640    assumed to be given in the GDA94 datum.
3641
3642    assume:
3643    All files are in esri ascii format
3644
3645    4 types of information
3646    bathymetry
3647    elevation
3648    u velocity
3649    v velocity
3650
3651    Assumptions
3652    The metadata of all the files is the same
3653    Each type is in a seperate directory
3654    One bath file with extention .000
3655    The time period is less than 24hrs and uniform.
3656    """
3657    from Scientific.IO.NetCDF import NetCDFFile
3658
[3514]3659    from anuga.coordinate_transforms.redfearn import redfearn
[2852]3660
3661    precision = Float # So if we want to change the precision its done here
3662
3663    # go in to the bath dir and load the only file,
3664    bath_files = os.listdir(bath_dir)
3665
3666    bath_file = bath_files[0]
3667    bath_dir_file =  bath_dir + os.sep + bath_file
3668    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3669
3670    #Use the date.time of the bath file as a basis for
3671    #the start time for other files
3672    base_start = bath_file[-12:]
3673
3674    #go into the elevation dir and load the 000 file
3675    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3676                         + base_start
3677
3678    elevation_files = os.listdir(elevation_dir)
3679    ucur_files = os.listdir(ucur_dir)
3680    vcur_files = os.listdir(vcur_dir)
[4031]3681    elevation_files.sort()
[2852]3682    # the first elevation file should be the
3683    # file with the same base name as the bath data
3684    assert elevation_files[0] == 'el' + base_start
3685
3686    number_of_latitudes = bath_grid.shape[0]
3687    number_of_longitudes = bath_grid.shape[1]
3688    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3689
3690    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3691                  for x in range(number_of_longitudes)]
3692    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3693                 for y in range(number_of_latitudes)]
3694
3695     # reverse order of lat, so the fist lat represents the first grid row
3696    latitudes.reverse()
3697
[4027]3698    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
[2852]3699                                                 minlat=minlat, maxlat=maxlat,
3700                                                 minlon=minlon, maxlon=maxlon)
3701
3702
3703    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3704    latitudes = latitudes[kmin:kmax]
3705    longitudes = longitudes[lmin:lmax]
3706    number_of_latitudes = len(latitudes)
3707    number_of_longitudes = len(longitudes)
3708    number_of_times = len(os.listdir(elevation_dir))
3709    number_of_points = number_of_latitudes*number_of_longitudes
3710    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3711
3712    #Work out the times
3713    if len(elevation_files) > 1:
3714        # Assume: The time period is less than 24hrs.
3715        time_period = (int(elevation_files[1][-3:]) - \
3716                      int(elevation_files[0][-3:]))*60*60
3717        times = [x*time_period for x in range(len(elevation_files))]
3718    else:
3719        times = [0.0]
3720
3721
3722    if verbose:
3723        print '------------------------------------------------'
3724        print 'Statistics:'
3725        print '  Extent (lat/lon):'
3726        print '    lat in [%f, %f], len(lat) == %d'\
3727              %(min(latitudes), max(latitudes),
3728                len(latitudes))
3729        print '    lon in [%f, %f], len(lon) == %d'\
3730              %(min(longitudes), max(longitudes),
3731                len(longitudes))
3732        print '    t in [%f, %f], len(t) == %d'\
3733              %(min(times), max(times), len(times))
3734
3735    ######### WRITE THE SWW FILE #############
3736    # NetCDF file definition
3737    outfile = NetCDFFile(sww_file, 'w')
3738
3739    #Create new file
3740    outfile.institution = 'Geoscience Australia'
3741    outfile.description = 'Converted from XXX'
3742
3743
3744    #For sww compatibility
3745    outfile.smoothing = 'Yes'
3746    outfile.order = 1
3747
3748    #Start time in seconds since the epoch (midnight 1/1/1970)
3749    outfile.starttime = starttime = times[0]
3750
3751
3752    # dimension definitions
3753    outfile.createDimension('number_of_volumes', number_of_volumes)
3754
3755    outfile.createDimension('number_of_vertices', 3)
3756    outfile.createDimension('number_of_points', number_of_points)
3757    outfile.createDimension('number_of_timesteps', number_of_times)
3758
3759    # variable definitions
3760    outfile.createVariable('x', precision, ('number_of_points',))
3761    outfile.createVariable('y', precision, ('number_of_points',))
3762    outfile.createVariable('elevation', precision, ('number_of_points',))
3763
3764    #FIXME: Backwards compatibility
3765    outfile.createVariable('z', precision, ('number_of_points',))
3766    #################################
3767
3768    outfile.createVariable('volumes', Int, ('number_of_volumes',
3769                                            'number_of_vertices'))
3770
3771    outfile.createVariable('time', precision,
3772                           ('number_of_timesteps',))
3773
3774    outfile.createVariable('stage', precision,
3775                           ('number_of_timesteps',
3776                            'number_of_points'))
3777
3778    outfile.createVariable('xmomentum', precision,
3779                           ('number_of_timesteps',
3780                            'number_of_points'))
3781
3782    outfile.createVariable('ymomentum', precision,
3783                           ('number_of_timesteps',
3784                            'number_of_points'))
3785
3786    #Store
[3514]3787    from anuga.coordinate_transforms.redfearn import redfearn
[2852]3788    x = zeros(number_of_points, Float)  #Easting
3789    y = zeros(number_of_points, Float)  #Northing
3790
3791    if verbose: print 'Making triangular grid'
3792    #Get zone of 1st point.
3793    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3794
3795    vertices = {}
3796    i = 0
3797    for k, lat in enumerate(latitudes):
3798        for l, lon in enumerate(longitudes):
3799
3800            vertices[l,k] = i
3801
3802            zone, easting, northing = redfearn(lat,lon)
3803
3804            msg = 'Zone boundary crossed at longitude =', lon
3805            #assert zone == refzone, msg
3806            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3807            x[i] = easting
3808            y[i] = northing
3809            i += 1
3810
3811
3812    #Construct 2 triangles per 'rectangular' element
3813    volumes = []
3814    for l in range(number_of_longitudes-1):    #X direction
3815        for k in range(number_of_latitudes-1): #Y direction
3816            v1 = vertices[l,k+1]
3817            v2 = vertices[l,k]
3818            v3 = vertices[l+1,k+1]
3819            v4 = vertices[l+1,k]
3820
3821            #Note, this is different to the ferrit2sww code
3822            #since the order of the lats is reversed.
3823            volumes.append([v1,v3,v2]) #Upper element
3824            volumes.append([v4,v2,v3]) #Lower element
3825
3826    volumes = array(volumes)
3827
3828    geo_ref = Geo_reference(refzone,min(x),min(y))
3829    geo_ref.write_NetCDF(outfile)
3830
3831    # This will put the geo ref in the middle
3832    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3833
3834
3835    if verbose:
3836        print '------------------------------------------------'
3837        print 'More Statistics:'
3838        print '  Extent (/lon):'
3839        print '    x in [%f, %f], len(lat) == %d'\
3840              %(min(x), max(x),
3841                len(x))
3842        print '    y in [%f, %f], len(lon) == %d'\
3843              %(min(y), max(y),
3844                len(y))
3845        print 'geo_ref: ',geo_ref
3846
3847    z = resize(bath_grid,outfile.variables['z'][:].shape)
3848    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3849    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3850    outfile.variables['z'][:] = z
3851    outfile.variables['elevation'][:] = z  #FIXME HACK
3852    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3853
3854    stage = outfile.variables['stage']
3855    xmomentum = outfile.variables['xmomentum']
3856    ymomentum = outfile.variables['ymomentum']
3857
3858    outfile.variables['time'][:] = times   #Store time relative
3859
3860    if verbose: print 'Converting quantities'
3861    n = number_of_times
3862    for j in range(number_of_times):
3863        # load in files
3864        elevation_meta, elevation_grid = \
3865           _read_asc(elevation_dir + os.sep + elevation_files[j])
3866
3867        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3868        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3869
3870        #cut matrix to desired size
3871        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3872        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3873        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
[4271]3874       
[2852]3875        # handle missing values
3876        missing = (elevation_grid == elevation_meta['NODATA_value'])
3877        if sometrue (missing):
3878            if fail_on_NaN:
3879                msg = 'File %s contains missing values'\
3880                      %(elevation_files[j])
3881                raise DataMissingValuesError, msg
3882            else:
3883                elevation_grid = elevation_grid*(missing==0) + \
3884                                 missing*elevation_NaN_filler
3885
3886
3887        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3888        i = 0
3889        for k in range(number_of_latitudes):      #Y direction
3890            for l in range(number_of_longitudes): #X direction
3891                w = zscale*elevation_grid[k,l] + mean_stage
3892                stage[j,i] = w
3893                h = w - z[i]
3894                xmomentum[j,i] = u_momentum_grid[k,l]*h
3895                ymomentum[j,i] = v_momentum_grid[k,l]*h
3896                i += 1
3897    outfile.close()
3898
[4037]3899def _get_min_max_indexes(latitudes_ref,longitudes_ref,
[2852]3900                        minlat=None, maxlat=None,
3901                        minlon=None, maxlon=None):
3902    """
3903    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3904    specified with min/max lat/long
3905
3906    Think of the latitudes and longitudes describing a 2d surface.
3907    The area returned is, if possible, just big enough to cover the
3908    inputed max/min area. (This will not be possible if the max/min area
3909    has a section outside of the latitudes/longitudes area.)
3910
[4037]3911    asset  longitudes are sorted,
[2852]3912    long - from low to high (west to east, eg 148 - 151)
[4037]3913    assert latitudes are sorted, ascending or decending
[2852]3914    """
[4037]3915    latitudes = latitudes_ref[:]
3916    longitudes = longitudes_ref[:]
[2852]3917
[4037]3918    latitudes = ensure_numeric(latitudes)
3919    longitudes = ensure_numeric(longitudes)
[5347]3920
[4037]3921    assert allclose(sort(longitudes), longitudes)
[5347]3922
[5352]3923    #print latitudes[0],longitudes[0]
3924    #print len(latitudes),len(longitudes)
3925    #print latitudes[len(latitudes)-1],longitudes[len(longitudes)-1]
[4024]3926   
[4037]3927    lat_ascending = True
3928    if not allclose(sort(latitudes), latitudes):
3929        lat_ascending = False
3930        # reverse order of lat, so it's in ascending order         
3931        latitudes = latitudes[::-1]
3932        assert allclose(sort(latitudes), latitudes)
3933    #print "latitudes  in funct", latitudes
3934   
[2852]3935    largest_lat_index = len(latitudes)-1
3936    #Cut out a smaller extent.
3937    if minlat == None:
3938        lat_min_index = 0
3939    else:
3940        lat_min_index = searchsorted(latitudes, minlat)-1
3941        if lat_min_index <0:
3942            lat_min_index = 0
3943
3944
3945    if maxlat == None:
3946        lat_max_index = largest_lat_index #len(latitudes)
3947    else:
3948        lat_max_index = searchsorted(latitudes, maxlat)
3949        if lat_max_index > largest_lat_index:
3950            lat_max_index = largest_lat_index
3951
3952    if minlon == None:
3953        lon_min_index = 0
3954    else:
3955        lon_min_index = searchsorted(longitudes, minlon)-1
3956        if lon_min_index <0:
3957            lon_min_index = 0
3958
3959    if maxlon == None:
3960        lon_max_index = len(longitudes)
3961    else:
3962        lon_max_index = searchsorted(longitudes, maxlon)
3963
[4037]3964    # Reversing the indexes, if the lat array is decending
3965    if lat_ascending is False:
3966        lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3967                                       largest_lat_index - lat_min_index
[2852]3968    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3969    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3970
3971    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3972
3973
3974def _read_asc(filename, verbose=False):
3975    """Read esri file from the following ASCII format (.asc)
3976
3977    Example:
3978
3979    ncols         3121
3980    nrows         1800
3981    xllcorner     722000
3982    yllcorner     5893000
3983    cellsize      25
3984    NODATA_value  -9999
3985    138.3698 137.4194 136.5062 135.5558 ..........
3986
3987    """
3988
3989    datafile = open(filename)
3990
3991    if verbose: print 'Reading DEM from %s' %(filename)
3992    lines = datafile.readlines()
3993    datafile.close()
3994
3995    if verbose: print 'Got', len(lines), ' lines'
3996
3997    ncols = int(lines.pop(0).split()[1].strip())
3998    nrows = int(lines.pop(0).split()[1].strip())
3999    xllcorner = float(lines.pop(0).split()[1].strip())
4000    yllcorner = float(lines.pop(0).split()[1].strip())
4001    cellsize = float(lines.pop(0).split()[1].strip())
4002    NODATA_value = float(lines.pop(0).split()[1].strip())
4003
4004    assert len(lines) == nrows
4005
4006    #Store data
4007    grid = []
4008
4009    n = len(lines)
4010    for i, line in enumerate(lines):
4011        cells = line.split()
4012        assert len(cells) == ncols
4013        grid.append(array([float(x) for x in cells]))
4014    grid = array(grid)
4015
4016    return {'xllcorner':xllcorner,
4017            'yllcorner':yllcorner,
4018            'cellsize':cellsize,
4019            'NODATA_value':NODATA_value}, grid
4020
[2884]4021
[2852]4022
[3720]4023    ####  URS 2 SWW  ###
4024
4025lon_name = 'LON'
4026lat_name = 'LAT'
4027time_name = 'TIME'
4028precision = Float # So if we want to change the precision its done here       
4029class Write_nc:
4030    """
4031    Write an nc file.
4032   
4033    Note, this should be checked to meet cdc netcdf conventions for gridded
4034    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4035   
4036    """
4037    def __init__(self,
4038                 quantity_name,
4039                 file_name,
4040                 time_step_count,
4041                 time_step,
4042                 lon,
4043                 lat):
4044        """
4045        time_step_count is the number of time steps.
4046        time_step is the time step size
4047       
4048        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4049        """
4050        self.quantity_name = quantity_name
[4348]4051        quantity_units = {'HA':'CENTIMETERS',
[4302]4052                              'UA':'CENTIMETERS/SECOND',
[4348]4053                              'VA':'CENTIMETERS/SECOND'}       
[3720]4054       
[4348]4055        multiplier_dic = {'HA':100.0, # To convert from m to cm
4056                              'UA':100.0,  #  m/s to cm/sec
4057                              'VA':-100.0}  # MUX files have positve x in the
4058        # Southern direction.  This corrects for it, when writing nc files.
4059       
4060        self.quantity_multiplier =  multiplier_dic[self.quantity_name]
4061       
[3720]4062        #self.file_name = file_name
4063        self.time_step_count = time_step_count
4064        self.time_step = time_step
4065
4066        # NetCDF file definition
4067        self.outfile = NetCDFFile(file_name, 'w')
4068        outfile = self.outfile       
4069
4070        #Create new file
4071        nc_lon_lat_header(outfile, lon, lat)
4072   
4073        # TIME
4074        outfile.createDimension(time_name, None)
4075        outfile.createVariable(time_name, precision, (time_name,))
4076
4077        #QUANTITY
4078        outfile.createVariable(self.quantity_name, precision,
4079                               (time_name, lat_name, lon_name))
4080        outfile.variables[self.quantity_name].missing_value=-1.e+034
4081        outfile.variables[self.quantity_name].units= \
4082                                 quantity_units[self.quantity_name]
4083        outfile.variables[lon_name][:]= ensure_numeric(lon)
4084        outfile.variables[lat_name][:]= ensure_numeric(lat)
4085
4086        #Assume no one will be wanting to read this, while we are writing
4087        #outfile.close()
4088       
4089    def store_timestep(self, quantity_slice):
4090        """
[4348]4091        Write a time slice of quantity info
[3720]4092        quantity_slice is the data to be stored at this time step
4093        """
4094       
4095        outfile = self.outfile
4096       
4097        # Get the variables
4098        time = outfile.variables[time_name]
4099        quantity = outfile.variables[self.quantity_name]
4100           
4101        i = len(time)
4102
4103        #Store time
4104        time[i] = i*self.time_step #self.domain.time
[4348]4105        quantity[i,:] = quantity_slice* self.quantity_multiplier
[3720]4106       
4107    def close(self):
4108        self.outfile.close()
4109
4110def urs2sww(basename_in='o', basename_out=None, verbose=False,
4111            remove_nc_files=True,
4112            minlat=None, maxlat=None,
4113            minlon= None, maxlon=None,
4114            mint=None, maxt=None,
4115            mean_stage=0,
[3930]4116            origin = None,
[3720]4117            zscale=1,
4118            fail_on_NaN=True,
4119            NaN_filler=0,
[4380]4120            elevation=None):
[3720]4121    """
4122    Convert URS C binary format for wave propagation to
4123    sww format native to abstract_2d_finite_volumes.
4124
4125    Specify only basename_in and read files of the form
[5466]4126    basefilename-z-mux2, basefilename-e-mux2 and
4127    basefilename-n-mux2 containing relative height,
[4378]4128    x-velocity and y-velocity, respectively.
[3720]4129
4130    Also convert latitude and longitude to UTM. All coordinates are
4131    assumed to be given in the GDA94 datum. The latitude and longitude
4132    information is for  a grid.
4133
4134    min's and max's: If omitted - full extend is used.
4135    To include a value min may equal it, while max must exceed it.
[3930]4136    Lat and lon are assumed to be in decimal degrees.
[4014]4137    NOTE: minlon is the most east boundary.
[3964]4138   
4139    origin is a 3-tuple with geo referenced
4140    UTM coordinates (zone, easting, northing)
4141    It will be the origin of the sww file. This shouldn't be used,
4142    since all of anuga should be able to handle an arbitary origin.
[3720]4143
[3964]4144
[3720]4145    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4146    which means that latitude is the fastest
4147    varying dimension (row major order, so to speak)
4148
4149    In URS C binary the latitudes and longitudes are in assending order.
4150    """
4151    if basename_out == None:
4152        basename_out = basename_in
4153    files_out = urs2nc(basename_in, basename_out)
4154    ferret2sww(basename_out,
4155               minlat=minlat,
4156               maxlat=maxlat,
4157               minlon=minlon,
[4014]4158               maxlon=maxlon,
[3720]4159               mint=mint,
4160               maxt=maxt,
4161               mean_stage=mean_stage,
[3930]4162               origin=origin,
[3720]4163               zscale=zscale,
4164               fail_on_NaN=fail_on_NaN,
4165               NaN_filler=NaN_filler,
4166               inverted_bathymetry=True,
4167               verbose=verbose)
4168    #print "files_out",files_out
4169    if remove_nc_files:
4170        for file_out in files_out:
4171            os.remove(file_out)
4172   
4173def urs2nc(basename_in = 'o', basename_out = 'urs'):
[3964]4174    """
4175    Convert the 3 urs files to 4 nc files.
4176
4177    The name of the urs file names must be;
[5466]4178    [basename_in]-z-mux
4179    [basename_in]-e-mux
4180    [basename_in]-n-mux
[3964]4181   
4182    """
[4378]4183   
4184    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4185                basename_in + EAST_VELOCITY_LABEL,
4186                basename_in + NORTH_VELOCITY_LABEL]
[3720]4187    files_out = [basename_out+'_ha.nc',
4188                 basename_out+'_ua.nc',
4189                 basename_out+'_va.nc']
4190    quantities = ['HA','UA','VA']
4191
[5104]4192    #if os.access(files_in[0]+'.mux', os.F_OK) == 0 :
4193    for i, file_name in enumerate(files_in):
4194        if os.access(file_name, os.F_OK) == 0:
4195            if os.access(file_name+'.mux', os.F_OK) == 0 :
4196                msg = 'File %s does not exist or is not accessible' %file_name
4197                raise IOError, msg
4198            else:
4199               files_in[i] += '.mux'
4200               print "file_name", file_name
[3720]4201    hashed_elevation = None
4202    for file_in, file_out, quantity in map(None, files_in,
4203                                           files_out,
4204                                           quantities):
[3820]4205        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
[3720]4206                                         file_out,
4207                                         quantity)
[3975]4208        #print "lonlatdep", lonlatdep
[3720]4209        if hashed_elevation == None:
4210            elevation_file = basename_out+'_e.nc'
[3964]4211            write_elevation_nc(elevation_file,
[3720]4212                                lon,
4213                                lat,
[3820]4214                                depth)
[3720]4215            hashed_elevation = myhash(lonlatdep)
4216        else:
4217            msg = "The elevation information in the mux files is inconsistent"
4218            assert hashed_elevation == myhash(lonlatdep), msg
4219    files_out.append(elevation_file)
4220    return files_out
4221   
4222def _binary_c2nc(file_in, file_out, quantity):
4223    """
[3964]4224    Reads in a quantity urs file and writes a quantity nc file.
4225    additionally, returns the depth and lat, long info,
4226    so it can be written to a file.
[3720]4227    """
4228    columns = 3 # long, lat , depth
[3964]4229    mux_file = open(file_in, 'rb')
[3720]4230   
4231    # Number of points/stations
[3964]4232    (points_num,)= unpack('i',mux_file.read(4))
[3720]4233
4234    # nt, int - Number of time steps
[3964]4235    (time_step_count,)= unpack('i',mux_file.read(4))
[3720]4236
4237    #dt, float - time step, seconds
[3964]4238    (time_step,) = unpack('f', mux_file.read(4))
[3720]4239   
4240    msg = "Bad data in the mux file."
4241    if points_num < 0:
[3964]4242        mux_file.close()
[3720]4243        raise ANUGAError, msg
4244    if time_step_count < 0:
[3964]4245        mux_file.close()
[3720]4246        raise ANUGAError, msg
4247    if time_step < 0:
[3964]4248        mux_file.close()
[3720]4249        raise ANUGAError, msg
4250   
4251    lonlatdep = p_array.array('f')
[3964]4252    lonlatdep.read(mux_file, columns * points_num)
[3720]4253    lonlatdep = array(lonlatdep, typecode=Float)   
[3826]4254    lonlatdep = reshape(lonlatdep, (points_num, columns))
[3720]4255   
[3820]4256    lon, lat, depth = lon_lat2grid(lonlatdep)
[3830]4257    lon_sorted = list(lon)
[3750]4258    lon_sorted.sort()
[3830]4259
[3964]4260    if not lon == lon_sorted:
[3750]4261        msg = "Longitudes in mux file are not in ascending order"
4262        raise IOError, msg
[3830]4263    lat_sorted = list(lat)
4264    lat_sorted.sort()
4265
[3964]4266    if not lat == lat_sorted:
[3750]4267        msg = "Latitudes in mux file are not in ascending order"
4268   
[3720]4269    nc_file = Write_nc(quantity,
4270                       file_out,
[3826]4271                       time_step_count,
4272                       time_step,
4273                       lon,
4274                       lat)
[3720]4275
4276    for i in range(time_step_count):
[3824]4277        #Read in a time slice  from mux file 
[3720]4278        hz_p_array = p_array.array('f')
[3964]4279        hz_p_array.read(mux_file, points_num)
[3720]4280        hz_p = array(hz_p_array, typecode=Float)
[3820]4281        hz_p = reshape(hz_p, (len(lon), len(lat)))
4282        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
[3824]4283
4284        #write time slice to nc file
[3720]4285        nc_file.store_timestep(hz_p)
[3964]4286    mux_file.close()
[3720]4287    nc_file.close()
4288
[3820]4289    return lonlatdep, lon, lat, depth
[3720]4290   
4291
[3964]4292def write_elevation_nc(file_out, lon, lat, depth_vector):
4293    """
4294    Write an nc elevation file.
4295    """
4296   
[3720]4297    # NetCDF file definition
4298    outfile = NetCDFFile(file_out, 'w')
4299
4300    #Create new file
4301    nc_lon_lat_header(outfile, lon, lat)
4302   
4303    # ELEVATION
4304    zname = 'ELEVATION'
4305    outfile.createVariable(zname, precision, (lat_name, lon_name))
4306    outfile.variables[zname].units='CENTIMETERS'
4307    outfile.variables[zname].missing_value=-1.e+034
4308
4309    outfile.variables[lon_name][:]= ensure_numeric(lon)
4310    outfile.variables[lat_name][:]= ensure_numeric(lat)
4311
4312    depth = reshape(depth_vector, ( len(lat), len(lon)))
4313    outfile.variables[zname][:]= depth
4314   
4315    outfile.close()
4316   
4317def nc_lon_lat_header(outfile, lon, lat):
[3964]4318    """
4319    outfile is the netcdf file handle.
4320    lon - a list/array of the longitudes
4321    lat - a list/array of the latitudes
4322    """
[3720]4323   
4324    outfile.institution = 'Geoscience Australia'
4325    outfile.description = 'Converted from URS binary C'
4326   
4327    # Longitude
4328    outfile.createDimension(lon_name, len(lon))
4329    outfile.createVariable(lon_name, precision, (lon_name,))
4330    outfile.variables[lon_name].point_spacing='uneven'
4331    outfile.variables[lon_name].units='degrees_east'
4332    outfile.variables[lon_name].assignValue(lon)
4333
4334
4335    # Latitude
4336    outfile.createDimension(lat_name, len(lat))
4337    outfile.createVariable(lat_name, precision, (lat_name,))
4338    outfile.variables[lat_name].point_spacing='uneven'
4339    outfile.variables[lat_name].units='degrees_north'
4340    outfile.variables[lat_name].assignValue(lat)
4341
4342
4343   
4344def lon_lat2grid(long_lat_dep):
4345    """
4346    given a list of points that are assumed to be an a grid,
4347    return the long's and lat's of the grid.
4348    long_lat_dep is an array where each row is a position.
4349    The first column is longitudes.
4350    The second column is latitudes.
[3820]4351
4352    The latitude is the fastest varying dimension - in mux files
[3720]4353    """
4354    LONG = 0
4355    LAT = 1
[3820]4356    QUANTITY = 2
[3830]4357
4358    long_lat_dep = ensure_numeric(long_lat_dep, Float)
[3820]4359   
[3826]4360    num_points = long_lat_dep.shape[0]
4361    this_rows_long = long_lat_dep[0,LONG]
[3964]4362
[3826]4363    # Count the length of unique latitudes
4364    i = 0
4365    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4366        i += 1
[3964]4367    # determine the lats and longsfrom the grid
[3826]4368    lat = long_lat_dep[:i, LAT]       
[3964]4369    long = long_lat_dep[::i, LONG]
4370   
[3826]4371    lenlong = len(long)
4372    lenlat = len(lat)
[3964]4373    #print 'len lat', lat, len(lat)
4374    #print 'len long', long, len(long)
[3826]4375         
4376    msg = 'Input data is not gridded'     
4377    assert num_points % lenlat == 0, msg
4378    assert num_points % lenlong == 0, msg
4379         
4380    # Test that data is gridded       
4381    for i in range(lenlong):
[3720]4382        msg = 'Data is not gridded.  It must be for this operation'
[3826]4383        first = i*lenlat
4384        last = first + lenlat
4385               
4386        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4387        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
[3720]4388   
[3826]4389   
4390#    print 'range long', min(long), max(long)
4391#    print 'range lat', min(lat), max(lat)
4392#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4393#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4394   
4395   
4396   
4397    msg = 'Out of range latitudes/longitudes'
[3720]4398    for l in lat:assert -90 < l < 90 , msg
4399    for l in long:assert -180 < l < 180 , msg
4400
[3826]4401    # Changing quantity from lat being the fastest varying dimension to
[3820]4402    # long being the fastest varying dimension
4403    # FIXME - make this faster/do this a better way
4404    # use numeric transpose, after reshaping the quantity vector
[3826]4405#    quantity = zeros(len(long_lat_dep), Float)
4406    quantity = zeros(num_points, Float)
4407   
4408#    print 'num',num_points
[3820]4409    for lat_i, _ in enumerate(lat):
4410        for long_i, _ in enumerate(long):
4411            q_index = lat_i*lenlong+long_i
4412            lld_index = long_i*lenlat+lat_i
[3826]4413#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4414            temp = long_lat_dep[lld_index, QUANTITY]
4415            quantity[q_index] = temp
[3820]4416           
4417    return long, lat, quantity
4418
[5250]4419####  END URS 2 SWW  ###
[4223]4420
[5250]4421#### URS UNGRIDDED 2 SWW ###
[4223]4422
[5250]4423### PRODUCING THE POINTS NEEDED FILE ###
4424
[5248]4425# Ones used for FESA 2007 results
4426#LL_LAT = -50.0
4427#LL_LONG = 80.0
4428#GRID_SPACING = 1.0/60.0
4429#LAT_AMOUNT = 4800
4430#LONG_AMOUNT = 3600
4431
[4318]4432def URS_points_needed_to_file(file_name, boundary_polygon, zone,
[5249]4433                              ll_lat, ll_long,
4434                              grid_spacing, 
4435                              lat_amount, long_amount,
[5253]4436                              isSouthernHemisphere=True,
[4318]4437                              export_csv=False, use_cache=False,
[4284]4438                              verbose=False):
[4238]4439    """
[5253]4440    Given the info to replicate the URS grid and a polygon output
4441    a file that specifies the cloud of boundary points for URS.
[5369]4442
4443    This creates a .urs file.  This is in the format used by URS;
4444    1st line is the number of points,
4445    each line after represents a point,in lats and longs.
[5253]4446   
4447    Note: The polygon cannot cross zones or hemispheres.
[5369]4448
4449    A work-a-round for different zones or hemispheres is to run this twice,
4450    once for each zone, and then combine the output.
[5253]4451   
[4238]4452    file_name - name of the urs file produced for David.
4453    boundary_polygon - a list of points that describes a polygon.
4454                      The last point is assumed ot join the first point.
4455                      This is in UTM (lat long would be better though)
4456
[5248]4457     This is info about the URS model that needs to be inputted.
[5253]4458     
[4238]4459    ll_lat - lower left latitude, in decimal degrees
4460    ll-long - lower left longitude, in decimal degrees
4461    grid_spacing - in deciamal degrees
[5254]4462    lat_amount - number of latitudes
4463    long_amount- number of longs
[4238]4464
4465
4466    Don't add the file extension.  It will be added.
4467    """
[4318]4468    geo = URS_points_needed(boundary_polygon, zone, ll_lat, ll_long,
4469                            grid_spacing, 
[5253]4470                            lat_amount, long_amount, isSouthernHemisphere,
4471                            use_cache, verbose)
[4254]4472    if not file_name[-4:] == ".urs":
4473        file_name += ".urs"
[5253]4474    geo.export_points_file(file_name, isSouthHemisphere=isSouthernHemisphere)
[4284]4475    if export_csv:
4476        if file_name[-4:] == ".urs":
4477            file_name = file_name[:-4] + ".csv"
4478        geo.export_points_file(file_name)
[5254]4479    return geo
[4284]4480
[5250]4481def URS_points_needed(boundary_polygon, zone, ll_lat,
4482                      ll_long, grid_spacing, 
[5253]4483                      lat_amount, long_amount, isSouthHemisphere=True,
[4318]4484                      use_cache=False, verbose=False):
4485    args = (boundary_polygon,
[5253]4486            zone, ll_lat,
4487            ll_long, grid_spacing, 
4488            lat_amount, long_amount, isSouthHemisphere)
4489    kwargs = {} 
[4284]4490    if use_cache is True:
4491        try:
4492            from anuga.caching import cache
4493        except:
4494            msg = 'Caching was requested, but caching module'+\
4495                  'could not be imported'
4496            raise msg
4497
4498
4499        geo = cache(_URS_points_needed,
4500                  args, kwargs,
4501                  verbose=verbose,
4502                  compression=False)
4503    else:
[5253]4504        geo = apply(_URS_points_needed, args, kwargs)
[5250]4505
4506    return geo
4507
4508def _URS_points_needed(boundary_polygon,
[4318]4509                      zone, ll_lat,
[4284]4510                      ll_long, grid_spacing, 
[5253]4511                      lat_amount, long_amount,
4512                       isSouthHemisphere):
[4223]4513    """
4514    boundary_polygon - a list of points that describes a polygon.
4515                      The last point is assumed ot join the first point.
[5250]4516                      This is in UTM (lat long would b better though)
[4223]4517
4518    ll_lat - lower left latitude, in decimal degrees
4519    ll-long - lower left longitude, in decimal degrees
4520    grid_spacing - in deciamal degrees
4521
4522    """
[4254]4523   
[4223]4524    from sets import ImmutableSet
4525   
4526    msg = "grid_spacing can not be zero"
[5250]4527    assert not grid_spacing == 0, msg
[4223]4528    a = boundary_polygon
4529    # List of segments.  Each segment is two points.
4530    segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))]
4531    # convert the segs to Lat's and longs.
[4318]4532   
4533    # Don't assume the zone of the segments is the same as the lower left
4534    # corner of the lat long data!!  They can easily be in different zones
4535   
[4223]4536    lat_long_set = ImmutableSet()
4537    for seg in segs:
4538        points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, 
[5253]4539                      lat_amount, long_amount, zone, isSouthHemisphere)
[4223]4540        lat_long_set |= ImmutableSet(points_lat_long)
[5253]4541    if lat_long_set == ImmutableSet([]):
4542        msg = """URS region specified and polygon does not overlap."""
4543        raise ValueError, msg
4544
4545    # Warning there is no info in geospatial saying the hemisphere of
4546    # these points.  There should be.
[4223]4547    geo = Geospatial_data(data_points=list(lat_long_set),
4548                              points_are_lats_longs=True)
4549    return geo
4550   
4551def points_needed(seg, ll_lat, ll_long, grid_spacing, 
[5253]4552                  lat_amount, long_amount, zone,
4553                  isSouthHemisphere):
[4223]4554    """
[5254]4555    seg is two points, in UTM
[4223]4556    return a list of the points, in lats and longs that are needed to
4557    interpolate any point on the segment.
4558    """
[4280]4559    from math import sqrt
[4223]4560    #print "zone",zone
4561    geo_reference = Geo_reference(zone=zone)
[5254]4562    #print "seg", seg
[4223]4563    geo = Geospatial_data(seg,geo_reference=geo_reference)
[5253]4564    seg_lat_long = geo.get_data_points(as_lat_long=True,
4565                                       isSouthHemisphere=isSouthHemisphere)
[5254]4566    #print "seg_lat_long", seg_lat_long
[4223]4567    # 1.415 = 2^0.5, rounded up....
[4280]4568    sqrt_2_rounded_up = 1.415
4569    buffer = sqrt_2_rounded_up * grid_spacing
[4223]4570   
4571    max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer
4572    max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer
[4238]4573    min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer
4574    min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer
[4223]4575
[5254]4576    #print "min_long", min_long
4577    #print "ll_long", ll_long
4578    #print "grid_spacing", grid_spacing
[4223]4579    first_row = (min_long - ll_long)/grid_spacing
4580    # To round up
4581    first_row_long = int(round(first_row + 0.5))
4582    #print "first_row", first_row_long
4583
4584    last_row = (max_long - ll_long)/grid_spacing # round down
4585    last_row_long = int(round(last_row))
4586    #print "last_row",last_row _long
4587   
4588    first_row = (min_lat - ll_lat)/grid_spacing
4589    # To round up
4590    first_row_lat = int(round(first_row + 0.5))
4591    #print "first_row", first_row_lat
4592
4593    last_row = (max_lat - ll_lat)/grid_spacing # round down
4594    last_row_lat = int(round(last_row))
4595    #print "last_row",last_row_lat
4596
[4283]4597    # to work out the max distance -
4598    # 111120 - horizontal distance between 1 deg latitude.
4599    #max_distance = sqrt_2_rounded_up * 111120 * grid_spacing
4600    max_distance = 157147.4112 * grid_spacing
4601    #print "max_distance", max_distance #2619.12 m for 1 minute
[4223]4602    points_lat_long = []
4603    # Create a list of the lat long points to include.
4604    for index_lat in range(first_row_lat, last_row_lat + 1):
4605        for index_long in range(first_row_long, last_row_long + 1):
[5254]4606            #print "index_lat", index_lat
4607            #print "index_long", index_long
[4223]4608            lat = ll_lat + index_lat*grid_spacing
4609            long = ll_long + index_long*grid_spacing
[4280]4610
4611            #filter here to keep good points
4612            if keep_point(lat, long, seg, max_distance):
4613                points_lat_long.append((lat, long)) #must be hashable
[4223]4614    #print "points_lat_long", points_lat_long
[4268]4615
[4280]4616    # Now that we have these points, lets throw ones out that are too far away
4617    return points_lat_long
4618
4619def keep_point(lat, long, seg, max_distance):
4620    """
4621    seg is two points, UTM
4622    """
4623    from math import sqrt
4624    _ , x0, y0 = redfearn(lat, long)
4625    x1 = seg[0][0]
4626    y1 = seg[0][1]
4627    x2 = seg[1][0]
4628    y2 = seg[1][1]
4629    x2_1 = x2-x1
4630    y2_1 = y2-y1
[5254]4631    try:
4632        d = abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1))/sqrt( \
4633            (x2_1)*(x2_1)+(y2_1)*(y2_1))
4634    except ZeroDivisionError:
4635        #print "seg", seg
4636        #print "x0", x0
4637        #print "y0", y0
4638        if sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1)) == 0 and \
4639           abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1)) == 0:
4640            return True
4641        else:
4642            return False
4643   
[4280]4644    if d <= max_distance:
4645        return True
4646    else:
4647        return False
4648   
[4268]4649    #### CONVERTING UNGRIDDED URS DATA TO AN SWW FILE ####
[4378]4650   
[5466]4651WAVEHEIGHT_MUX_LABEL = '-z-mux'
4652EAST_VELOCITY_LABEL =  '-e-mux'
4653NORTH_VELOCITY_LABEL =  '-n-mux' 
[4268]4654def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False,
[4382]4655                      mint=None, maxt=None,
4656                      mean_stage=0,
4657                      origin=None,
4658                      hole_points_UTM=None,
4659                      zscale=1):
[4298]4660    """   
4661    Convert URS C binary format for wave propagation to
4662    sww format native to abstract_2d_finite_volumes.
4663
[4378]4664
[4298]4665    Specify only basename_in and read files of the form
[5466]4666    basefilename-z-mux, basefilename-e-mux and
4667    basefilename-n-mux containing relative height,
[4378]4668    x-velocity and y-velocity, respectively.
[4298]4669
4670    Also convert latitude and longitude to UTM. All coordinates are
4671    assumed to be given in the GDA94 datum. The latitude and longitude
4672    information is assumed ungridded grid.
4673
4674    min's and max's: If omitted - full extend is used.
4675    To include a value min ans max may equal it.
4676    Lat and lon are assumed to be in decimal degrees.
4677   
4678    origin is a 3-tuple with geo referenced
4679    UTM coordinates (zone, easting, northing)
4680    It will be the origin of the sww file. This shouldn't be used,
4681    since all of anuga should be able to handle an arbitary origin.
[4455]4682    The mux point info is NOT relative to this origin.
[4298]4683
4684
4685    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4686    which means that latitude is the fastest
4687    varying dimension (row major order, so to speak)
4688
4689    In URS C binary the latitudes and longitudes are in assending order.
[4363]4690
4691    Note, interpolations of the resulting sww file will be different
4692    from results of urs2sww.  This is due to the interpolation
4693    function used, and the different grid structure between urs2sww
4694    and this function.
4695   
4696    Interpolating data that has an underlying gridded source can
4697    easily end up with different values, depending on the underlying
4698    mesh.
4699
4700    consider these 4 points
4701    50  -50
4702
4703    0     0
4704
4705    The grid can be
4706     -
4707    |\|    A
4708     -
4709     or;
4710      -
4711     |/|   B
4712      -
4713      If a point is just below the center of the midpoint, it will have a
4714      +ve value in grid A and a -ve value in grid B.
[4292]4715    """ 
[4899]4716    from anuga.mesh_engine.mesh_engine import NoTrianglesError
4717    from anuga.pmesh.mesh import Mesh
[4280]4718
[4378]4719    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4720                basename_in + EAST_VELOCITY_LABEL,
4721                basename_in + NORTH_VELOCITY_LABEL]
[4271]4722    quantities = ['HA','UA','VA']
[4268]4723
[4271]4724    # instanciate urs_points of the three mux files.
4725    mux = {}
4726    for quantity, file in map(None, quantities, files_in):
4727        mux[quantity] = Urs_points(file)
4728       
[4298]4729    # Could check that the depth is the same. (hashing)
[4268]4730
[4271]4731    # handle to a mux file to do depth stuff
4732    a_mux = mux[quantities[0]]
4733   
4734    # Convert to utm
4735    lat = a_mux.lonlatdep[:,1]
4736    long = a_mux.lonlatdep[:,0]
4737    points_utm, zone = convert_from_latlon_to_utm( \
4738        latitudes=lat, longitudes=long)
4739    #print "points_utm", points_utm
4740    #print "zone", zone
4741
[4280]4742    elevation = a_mux.lonlatdep[:,2] * -1 #
[4271]4743   
4744    # grid ( create a mesh from the selected points)
[4280]4745    # This mesh has a problem.  Triangles are streched over ungridded areas.
4746    #  If these areas could be described as holes in pmesh, that would be great
[4381]4747
4748    # I can't just get the user to selection a point in the middle.
4749    # A boundary is needed around these points.
4750    # But if the zone of points is obvious enough auto-segment should do
4751    # a good boundary.
[4271]4752    mesh = Mesh()
4753    mesh.add_vertices(points_utm)
[4455]4754    mesh.auto_segment(smooth_indents=True, expand_pinch=True)
4755    # To try and avoid alpha shape 'hugging' too much
4756    mesh.auto_segment( mesh.shape.get_alpha()*1.1 )
[4382]4757    if hole_points_UTM is not None:
4758        point = ensure_absolute(hole_points_UTM)
4759        mesh.add_hole(point[0], point[1])
4760    try:
4761        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4762    except NoTrianglesError:
4763        # This is a bit of a hack, going in and changing the
4764        # data structure.
4765        mesh.holes = []
4766        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
[4271]4767    mesh_dic = mesh.Mesh2MeshList()
4768
[4522]4769    #mesh.export_mesh_file(basename_in + '_168.tsh')
4770    #import sys; sys.exit()
[4295]4771    # These are the times of the mux file
4772    mux_times = []
[4271]4773    for i in range(a_mux.time_step_count):
[4295]4774        mux_times.append(a_mux.time_step * i) 
4775    mux_times_start_i, mux_times_fin_i = mux2sww_time(mux_times, mint, maxt)
4776    times = mux_times[mux_times_start_i:mux_times_fin_i]
4777   
4778    if mux_times_start_i == mux_times_fin_i:
4779        # Close the mux files
4780        for quantity, file in map(None, quantities, files_in):
4781            mux[quantity].close()
4782        msg="Due to mint and maxt there's no time info in the boundary SWW."
[4608]4783        raise Exception, msg
[4271]4784       
[4295]4785    # If this raise is removed there is currently no downstream errors
4786           
[4271]4787    points_utm=ensure_numeric(points_utm)
4788    assert ensure_numeric(mesh_dic['generatedpointlist']) == \
4789           ensure_numeric(points_utm)
4790   
4791    volumes = mesh_dic['generatedtrianglelist']
4792   
4793    # write sww intro and grid stuff.   
[4268]4794    if basename_out is None:
4795        swwname = basename_in + '.sww'
4796    else:
4797        swwname = basename_out + '.sww'
4798
[4348]4799    if verbose: print 'Output to ', swwname
[4268]4800    outfile = NetCDFFile(swwname, 'w')
[4295]4801    # For a different way of doing this, check out tsh2sww
4802    # work out sww_times and the index range this covers
[4455]4803    sww = Write_sww()
[4704]4804    sww.store_header(outfile, times, len(volumes), len(points_utm),
[4862]4805                     verbose=verbose,sww_precision=Float)
[4295]4806    outfile.mean_stage = mean_stage
4807    outfile.zscale = zscale
[4455]4808
[4704]4809    sww.store_triangulation(outfile, points_utm, volumes,
[4455]4810                            elevation, zone,  new_origin=origin,
4811                            verbose=verbose)
[4292]4812   
4813    if verbose: print 'Converting quantities'
[4280]4814    j = 0
4815    # Read in a time slice from each mux file and write it to the sww file
[4455]4816    for ha, ua, va in map(None, mux['HA'], mux['UA'], mux['VA']):
[4295]4817        if j >= mux_times_start_i and j < mux_times_fin_i:
[4455]4818            stage = zscale*ha + mean_stage
4819            h = stage - elevation
4820            xmomentum = ua*h
4821            ymomentum = -1*va*h # -1 since in mux files south is positive.
[4704]4822            sww.store_quantities(outfile, 
4823                                 slice_index=j - mux_times_start_i,
4824                                 verbose=verbose,
4825                                 stage=stage,
4826                                 xmomentum=xmomentum,
[4862]4827                                 ymomentum=ymomentum,
4828                                 sww_precision=Float)
[4280]4829        j += 1
[4455]4830    if verbose: sww.verbose_quantities(outfile)
[4280]4831    outfile.close()
[4271]4832    #
4833    # Do some conversions while writing the sww file
[4455]4834
[5347]4835    ##################################
4836    # READ MUX2 FILES line of points #
4837    ##################################
4838
[5466]4839WAVEHEIGHT_MUX2_LABEL = '-z-mux2'
4840EAST_VELOCITY_MUX2_LABEL =  '-e-mux2'
4841NORTH_VELOCITY_MUX2_LABEL =  '-n-mux2'   
[5347]4842
[5534]4843def read_mux2_py(filenames,
4844                 weights,
4845                 permutation=None,
4846                 verbose=False):
4847    """Access the mux files specified in the filenames list. Combine the
4848       data found therin as a weighted linear sum as specifed by the weights.
[5537]4849       If permutation is None or empty extract timeseries data for all gauges within the
[5534]4850       files.
4851         
4852       Input:
4853           filenames:   List of filenames specifiying the file containing the
4854                        timeseries data (mux2 format) for each source
4855           weights:     Weights associated with each source
4856           permutation: Specifies the gauge numbers that for which data is to be
4857                        extracted
4858    """ 
[5485]4859
[5347]4860    from Numeric import ones,Float,compress,zeros,arange
4861    from urs_ext import read_mux2
4862
[5412]4863    numSrc=len(filenames)
[5347]4864
4865    file_params=-1*ones(3,Float)#[nsta,dt,nt]
[5470]4866   
4867    # Convert verbose to int C flag
4868    if verbose:
4869        verbose=1
4870    else:
4871        verbose=0
4872       
[5537]4873       
4874    #msg = 'I got the permutation vector:' + str(permutation)
4875    #assert permutation is not None, msg
4876    if permutation is None:
4877        permutation = ensure_numeric([], Float)   
[5541]4878
[5534]4879    # Call underlying C implementation urs2sts_ext.c   
[5537]4880    data=read_mux2(numSrc,filenames,weights,file_params,permutation,verbose)
[5347]4881
4882    msg='File parameter values were not read in correctly from c file'
4883    assert len(compress(file_params>0,file_params))!=0,msg
[4455]4884   
[5537]4885    msg='The number of stations specifed in the c array and in the file are inconsistent'
4886    assert file_params[0] >= len(permutation)
4887   
4888    msg='The number of stations returned is inconsistent with the requested number'   
4889    assert len(permutation) == 0 or len(permutation) == data.shape[0], msg
4890   
[5347]4891    nsta=int(file_params[0])
4892    msg='Must have at least one station'
4893    assert nsta>0,msg
[5537]4894   
[5347]4895    dt=file_params[1]
4896    msg='Must have a postive timestep'
4897    assert dt>0,msg
[5537]4898   
[5347]4899    nt=int(file_params[2])
4900    msg='Must have at least one gauge value'
4901    assert nt>0,msg
4902   
4903    OFFSET=5 #number of site parameters p passed back with data
4904    #p=[geolat,geolon,depth,start_tstep,finish_tstep]
4905
4906    times=dt*arange(1,(data.shape[1]-OFFSET)+1)
4907    latitudes=zeros(data.shape[0],Float)
4908    longitudes=zeros(data.shape[0],Float)
4909    elevation=zeros(data.shape[0],Float)
[5358]4910    quantity=zeros((data.shape[0],data.shape[1]-OFFSET),Float)
[5412]4911    starttime=1e16
[5347]4912    for i in range(0,data.shape[0]):
4913        latitudes[i]=data[i][data.shape[1]-OFFSET]
4914        longitudes[i]=data[i][data.shape[1]-OFFSET+1]
4915        elevation[i]=-data[i][data.shape[1]-OFFSET+2]
[5358]4916        quantity[i]=data[i][:-OFFSET]
[5412]4917        starttime=min(dt*data[i][data.shape[1]-OFFSET+3],starttime)
4918       
4919    return times, latitudes, longitudes, elevation, quantity, starttime
[5347]4920
[4295]4921def mux2sww_time(mux_times, mint, maxt):
4922    """
4923    """
[4271]4924
[4295]4925    if mint == None:
4926        mux_times_start_i = 0
4927    else:
4928        mux_times_start_i = searchsorted(mux_times, mint)
4929       
4930    if maxt == None:
4931        mux_times_fin_i = len(mux_times)
4932    else:
4933        maxt += 0.5 # so if you specify a time where there is
4934                    # data that time will be included
4935        mux_times_fin_i = searchsorted(mux_times, maxt)
4936
4937    return mux_times_start_i, mux_times_fin_i
4938
[4455]4939
[5534]4940def urs2sts(basename_in, basename_out=None, 
4941            weights=None,
4942            verbose=False, 
4943            origin=None,
4944            mean_stage=0.0, 
4945            zscale=1.0,
4946            ordering_filename=None):
[5347]4947    """Convert URS mux2 format for wave propagation to sts format
4948
4949    Also convert latitude and longitude to UTM. All coordinates are
4950    assumed to be given in the GDA94 datum
4951
4952    origin is a 3-tuple with geo referenced
4953    UTM coordinates (zone, easting, northing)
[5462]4954   
[5534]4955    inputs:
4956       
[5462]4957    basename_in: list of source file prefixes
4958   
[5534]4959        These are combined with the extensions:
4960        WAVEHEIGHT_MUX2_LABEL = '-z-mux2' for stage
4961        EAST_VELOCITY_MUX2_LABEL =  '-e-mux2' xmomentum
4962        NORTH_VELOCITY_MUX2_LABEL =  '-n-mux2' and ymomentum
[5462]4963   
[5534]4964        to create a 2D list of mux2 file. The rows are associated with each
4965        quantity and must have the above extensions
4966        the columns are the list of file prefixes.
[5462]4967   
[5534]4968    ordering: a .txt file name specifying which mux2 gauge points are
4969              to be stored. This is indicated by the index of the gauge
4970              in the ordering file.
[5462]4971   
[5534]4972              ordering file format:
4973              1st line:    'index,longitude,latitude\n'
4974              other lines: index,longitude,latitude
4975             
[5537]4976              If ordering is None or ordering file is empty then
4977               all points are taken in the order they
[5534]4978              appear in the mux2 file.
[5462]4979   
4980   
4981    output:
[5534]4982      basename_out: name of sts file in which mux2 data is stored.
[5462]4983   
4984   
[5347]4985    """
4986    import os
4987    from Scientific.IO.NetCDF import NetCDFFile
4988    from Numeric import Float, Int, Int32, searchsorted, zeros, array
[5412]4989    from Numeric import allclose, around,ones,Float
4990    from types import ListType,StringType
4991    from operator import __and__
4992   
4993    if not isinstance(basename_in, ListType):
4994        if verbose: print 'Reading single source'
4995        basename_in=[basename_in]
[5347]4996
[5412]4997    # Check that basename is a list of strings
[5534]4998    if not reduce(__and__, map(lambda z:isinstance(z,StringType), basename_in)):
[5412]4999        msg= 'basename_in must be a string or list of strings'
5000        raise Exception, msg
5001
5002    # Find the number of sources to be used
5003    numSrc=len(basename_in)
5004
5005    # A weight must be specified for each source
5006    if weights is None:
[5534]5007        # Default is equal weighting
[5412]5008        weights=ones(numSrc,Float)/numSrc
5009    else:
5010        weights = ensure_numeric(weights)
5011        msg = 'When combining multiple sources must specify a weight for '+\
5012              'mux2 source file'
5013        assert len(weights)== numSrc, msg
5014
[5462]5015    # Check output filename   
[5347]5016    if basename_out is None:
[5546]5017        msg = 'STS filename must be specified as basename_out'
5018        msg += ' in function urs2sts'
[5462]5019        raise Exception, msg
[5546]5020   
5021    if basename_out.endswith('.sts'):
5022        stsname = basename_out
5023    else:   
5024        stsname = basename_out + '.sts'       
[5347]5025
[5534]5026    # Create input filenames from basenames and check their existence
[5412]5027    files_in=[[],[],[]]
5028    for files in basename_in:
5029        files_in[0].append(files + WAVEHEIGHT_MUX2_LABEL),
5030        files_in[1].append(files + EAST_VELOCITY_MUX2_LABEL)
5031        files_in[2].append(files + NORTH_VELOCITY_MUX2_LABEL)
5032   
[5534]5033    quantities = ['HA','UA','VA'] # Quantity names used in the MUX2 format
[5412]5034    for i in range(len(quantities)): 
5035        for file_in in files_in[i]:
5036            if (os.access(file_in, os.F_OK) == 0):
5037                msg = 'File %s does not exist or is not accessible' %file_in
5038                raise IOError, msg
[5358]5039
[5537]5040    # Establish permutation array
5041    if ordering_filename is not None:
[5550]5042
5043        if verbose is True:
5044            print 'Reading ordering file', ordering_filename
[5537]5045        # Read ordering file
5046        try:
5047            fid=open(ordering_filename, 'r')
5048            file_header=fid.readline().split(',')
5049            ordering_lines=fid.readlines()
5050            fid.close()
5051        except:
5052            msg = 'Cannot open %s'%ordering_filename
5053            raise Exception, msg
5054
5055        reference_header = 'index, longitude, latitude\n'
5056        reference_header_split = reference_header.split(',')
5057        for i in range(3):
[5550]5058            if not file_header[i].strip() == reference_header_split[i].strip():
5059                msg = 'File must contain header: '+reference_header
[5537]5060                raise Exception, msg
5061
5062        if len(ordering_lines)<2:
5063            msg = 'File must contain at least two points'
5064            raise Exception, msg
5065
5066        permutation = ensure_numeric([int(line.split(',')[0]) for line in ordering_lines])
5067    else:
[5539]5068        permutation = None
[5537]5069
[5541]5070
5071    #print 'permutation', permutation
[5534]5072    # Read MUX2 files
[5485]5073    if (verbose): print 'reading mux2 file'
5074    mux={}
[5534]5075    for i, quantity in enumerate(quantities):
5076   
5077        # For each quantity read the associated list of source mux2 file with
5078        # extention associated with that quantity
5079        times,\
[5537]5080        latitudes,\
5081        longitudes,\
[5534]5082        elevation,\
5083        mux[quantity],\
[5537]5084        starttime = read_mux2_py(files_in[i], weights, permutation, verbose=verbose)
[5534]5085   
5086        # Check that all quantities have consistent time and space information     
[5485]5087        if quantity!=quantities[0]:
[5534]5088            msg='%s, %s and %s have inconsistent gauge data'\
5089                %(files_in[0], files_in[1], files_in[2])
5090            assert allclose(times, times_old), msg
[5537]5091            assert allclose(latitudes, latitudes_old), msg
5092            assert allclose(longitudes, longitudes_old), msg
[5534]5093            assert allclose(elevation, elevation_old), msg
5094            assert allclose(starttime, starttime_old), msg
[5485]5095        times_old=times
[5537]5096        latitudes_old=latitudes
5097        longitudes_old=longitudes
[5485]5098        elevation_old=elevation
5099        starttime_old=starttime
5100
[5537]5101
[5485]5102       
[5462]5103       
5104       
5105        # Self check - can be removed to improve speed
[5537]5106
5107        #ref_longitudes = [float(line.split(',')[1]) for line in ordering_lines]               
5108        #ref_latitudes = [float(line.split(',')[2]) for line in ordering_lines]       
5109        #
5110        #msg = 'Longitudes specified in ordering file do not match those found in mux files'
5111        #msg += 'I got %s instead of %s (only beginning shown)'\
5112        #    %(str(longitudes[:10]) + '...',
5113        #      str(ref_longitudes[:10]) + '...')       
5114        #assert allclose(longitudes, ref_longitudes), msg       
5115        #
5116        #msg = 'Latitudes specified in ordering file do not match those found in mux files. '
5117        #msg += 'I got %s instead of %s (only beginning shown)'\
5118        #    %(str(latitudes[:10]) + '...',
5119        #      str(ref_latitudes[:10]) + '...')               
5120        #assert allclose(latitudes, ref_latitudes), msg
[5462]5121       
[5537]5122               
[5462]5123       
[5534]5124    # Store timeseries in NetCDF STS file   
[5358]5125    msg='File is empty and or clipped region not in file region'
5126    assert len(latitudes>0),msg
5127
[5537]5128    number_of_points = latitudes.shape[0]      # Number of stations retrieved
5129    number_of_times = times.shape[0]           # Number of timesteps
5130    number_of_latitudes = latitudes.shape[0]   # Number latitudes
5131    number_of_longitudes = longitudes.shape[0] # Number longitudes
[5347]5132
5133    # NetCDF file definition
5134    outfile = NetCDFFile(stsname, 'w')
5135
5136    description = 'Converted from URS mux2 files: %s'\
5137                  %(basename_in)
5138   
5139    # Create new file
5140    sts = Write_sts()
[5462]5141    sts.store_header(outfile, 
5142                     times+starttime,
5143                     number_of_points, 
5144                     description=description,
5145                     verbose=verbose,
5146                     sts_precision=Float)
[5347]5147   
5148    # Store
5149    from anuga.coordinate_transforms.redfearn import redfearn
[5537]5150    x = zeros(number_of_points, Float)  # Easting
5151    y = zeros(number_of_points, Float)  # Northing
[5347]5152
5153    # Check zone boundaries
5154    refzone, _, _ = redfearn(latitudes[0],longitudes[0]) 
5155
5156    for i in range(number_of_points):
5157        zone, easting, northing = redfearn(latitudes[i],longitudes[i])
5158        x[i] = easting
5159        y[i] = northing
[5358]5160        msg='all sts gauges need to be in the same zone'
5161        assert zone==refzone,msg
[5347]5162
5163    if origin is None:
5164        origin = Geo_reference(refzone,min(x),min(y))
5165    geo_ref = write_NetCDF_georeference(origin, outfile)
5166
[5546]5167
[5347]5168   
5169    #print geo_ref.get_xllcorner()
5170    #print geo_ref.get_yllcorner()
5171
[5546]5172    elevation = resize(elevation,outfile.variables['elevation'][:].shape)
[5347]5173    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
5174    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
[5546]5175    outfile.variables['elevation'][:] = elevation
[5347]5176
5177    stage = outfile.variables['stage']
5178    xmomentum = outfile.variables['xmomentum']
[5462]5179    ymomentum = outfile.variables['ymomentum']
[5347]5180
5181    if verbose: print 'Converting quantities'
5182    for j in range(len(times)):
[5537]5183        for i in range(number_of_points):
5184            w = zscale*mux['HA'][i,j] + mean_stage
5185            h=w-elevation[i]
[5347]5186            stage[j,i] = w
5187
[5537]5188            xmomentum[j,i] = mux['UA'][i,j]*h
5189            ymomentum[j,i] = mux['VA'][i,j]*h
[5462]5190
[5347]5191    outfile.close()
5192
[5463]5193def create_sts_boundary(stsname):
[5418]5194    """
5195        Create boundary segments from .sts file. Points can be stored in
5196    arbitrary order within the .sts file. The order in which the .sts points
5197    make up the boundary are given in order.txt file
[5463]5198   
5199    FIXME:
5200    Point coordinates are stored in relative eastings and northings.
5201    But boundary is produced in absolute coordinates
5202   
[5418]5203    """
5204    try:
5205        fid = NetCDFFile(stsname+'.sts', 'r')
5206    except:
5207        msg = 'Cannot open %s'%filename+'.sts'
5208
5209
5210    xllcorner = fid.xllcorner[0]
5211    yllcorner = fid.yllcorner[0]
[5462]5212    #Points stored in sts file are normalised to [xllcorner,yllcorner] but
[5418]5213    #we cannot assume that boundary polygon will be. At least the
5214    #additional points specified by the user after this function is called
5215    x = fid.variables['x'][:]+xllcorner
5216    y = fid.variables['y'][:]+yllcorner
5217
5218    x = reshape(x, (len(x),1))
5219    y = reshape(y, (len(y),1))
5220    sts_points = concatenate((x,y), axis=1)
5221
[5463]5222    return sts_points.tolist()
[5418]5223
[4455]5224class Write_sww:
[4704]5225    from anuga.shallow_water.shallow_water_domain import Domain
5226
5227    # FIXME (Ole): Hardwiring the conserved quantities like
5228    # this could be a problem. I would prefer taking them from
5229    # the instantiation of Domain.
[4780]5230    #
5231    # (DSG) There is not always a Domain instance when Write_sww is used.
5232    # Check to see if this is the same level of hardwiring as is in
5233    # shallow water doamain.
5234   
[4455]5235    sww_quantities = Domain.conserved_quantities
[4704]5236
5237
[4455]5238    RANGE = '_range'
[4704]5239    EXTREMA = ':extrema'
[4699]5240
[4455]5241    def __init__(self):
5242        pass
[4387]5243   
[4704]5244    def store_header(self,
5245                     outfile,
5246                     times,
5247                     number_of_volumes,
5248                     number_of_points,
5249                     description='Converted from XXX',
5250                     smoothing=True,
[4862]5251                     order=1,
5252                     sww_precision=Float32,
5253                     verbose=False):
[4455]5254        """
5255        outfile - the name of the file that will be written
[4558]5256        times - A list of the time slice times OR a start time
5257        Note, if a list is given the info will be made relative.
[4455]5258        number_of_volumes - the number of triangles
5259        """
[4268]5260   
[4455]5261        outfile.institution = 'Geoscience Australia'
5262        outfile.description = description
[4268]5263
[4699]5264        # For sww compatibility
[4455]5265        if smoothing is True:
5266            # Smoothing to be depreciated
5267            outfile.smoothing = 'Yes'
5268            outfile.vertices_are_stored_uniquely = 'False'
5269        else:
5270            # Smoothing to be depreciated
5271            outfile.smoothing = 'No'
5272            outfile.vertices_are_stored_uniquely = 'True'
5273        outfile.order = order
[4268]5274
[4455]5275        try:
5276            revision_number = get_revision_number()
5277        except:
5278            revision_number = None
[4699]5279        # Allow None to be stored as a string               
5280        outfile.revision_number = str(revision_number) 
[4268]5281
[4699]5282
5283       
5284        # times - A list or array of the time slice times OR a start time
5285        # times = ensure_numeric(times)
5286        # Start time in seconds since the epoch (midnight 1/1/1970)
5287
5288        # This is being used to seperate one number from a list.
[4455]5289        # what it is actually doing is sorting lists from numeric arrays.
5290        if type(times) is list or type(times) is ArrayType: 
5291            number_of_times = len(times)
[4558]5292            times = ensure_numeric(times) 
5293            if number_of_times == 0:
5294                starttime = 0
5295            else:
5296                starttime = times[0]
5297                times = times - starttime  #Store relative times
[4455]5298        else:
5299            number_of_times = 0
[4558]5300            starttime = times
5301            #times = ensure_numeric([])
5302        outfile.starttime = starttime
[4455]5303        # dimension definitions
5304        outfile.createDimension('number_of_volumes', number_of_volumes)
5305        outfile.createDimension('number_of_vertices', 3)
5306        outfile.createDimension('numbers_in_range', 2)
5307   
5308        if smoothing is True:
5309            outfile.createDimension('number_of_points', number_of_points)
5310       
5311            # FIXME(Ole): This will cause sww files for paralle domains to
5312            # have ghost nodes stored (but not used by triangles).
5313            # To clean this up, we have to change get_vertex_values and
5314            # friends in quantity.py (but I can't be bothered right now)
5315        else:
5316            outfile.createDimension('number_of_points', 3*number_of_volumes)
5317        outfile.createDimension('number_of_timesteps', number_of_times)
[4268]5318
[4455]5319        # variable definitions
[4862]5320        outfile.createVariable('x', sww_precision, ('number_of_points',))
5321        outfile.createVariable('y', sww_precision, ('number_of_points',))
5322        outfile.createVariable('elevation', sww_precision, ('number_of_points',))
[4455]5323        q = 'elevation'
[4862]5324        outfile.createVariable(q+Write_sww.RANGE, sww_precision,
[4455]5325                               ('numbers_in_range',))
[4268]5326
[4704]5327
[4699]5328        # Initialise ranges with small and large sentinels.
5329        # If this was in pure Python we could have used None sensibly
[4862]5330        outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
[4699]5331        outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
5332
[4704]5333        # FIXME: Backwards compatibility
[4862]5334        outfile.createVariable('z', sww_precision, ('number_of_points',))
[4455]5335        #################################
[4268]5336
[4455]5337        outfile.createVariable('volumes', Int, ('number_of_volumes',
5338                                                'number_of_vertices'))
[4862]5339        # Doing sww_precision instead of Float gives cast errors.
5340        outfile.createVariable('time', Float,
[4455]5341                               ('number_of_timesteps',))
5342       
5343        for q in Write_sww.sww_quantities:
[4862]5344            outfile.createVariable(q, sww_precision,
[4455]5345                                   ('number_of_timesteps',
5346                                    'number_of_points')) 
[4862]5347            outfile.createVariable(q+Write_sww.RANGE, sww_precision,
[4455]5348                                   ('numbers_in_range',))
[4699]5349
5350            # Initialise ranges with small and large sentinels.
5351            # If this was in pure Python we could have used None sensibly
[4862]5352            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
[4699]5353            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
[4455]5354           
[4541]5355        if type(times) is list or type(times) is ArrayType: 
5356            outfile.variables['time'][:] = times    #Store time relative
5357           
[4455]5358        if verbose:
5359            print '------------------------------------------------'
5360            print 'Statistics:'
5361            print '    t in [%f, %f], len(t) == %d'\
5362                  %(min(times.flat), max(times.flat), len(times.flat))
[4268]5363
[4455]5364       
[4704]5365    def store_triangulation(self,
5366                            outfile,
5367                            points_utm,
5368                            volumes,
5369                            elevation, zone=None, new_origin=None, 
5370                            points_georeference=None, verbose=False):
[4455]5371        """
5372       
5373        new_origin - qa georeference that the points can be set to. (Maybe
5374        do this before calling this function.)
5375       
5376        points_utm - currently a list or array of the points in UTM.
5377        points_georeference - the georeference of the points_utm
5378       
5379        How about passing new_origin and current_origin.
5380        If you get both, do a convertion from the old to the new.
5381       
[4665]5382        If you only get new_origin, the points are absolute,
5383        convert to relative
[4455]5384       
5385        if you only get the current_origin the points are relative, store
5386        as relative.
5387       
5388        if you get no georefs create a new georef based on the minimums of
5389        points_utm.  (Another option would be to default to absolute)
5390       
5391        Yes, and this is done in another part of the code.
5392        Probably geospatial.
5393       
5394        If you don't supply either geo_refs, then supply a zone. If not
5395        the default zone will be used.
5396       
5397       
5398        precon
5399       
5400        header has been called.
5401        """
5402       
5403        number_of_points = len(points_utm)   
[4665]5404        volumes = array(volumes) 
5405        points_utm = array(points_utm)
[4268]5406
[4455]5407        # given the two geo_refs and the points, do the stuff
5408        # described in the method header
5409        # if this is needed else where, pull out as a function
5410        points_georeference = ensure_geo_reference(points_georeference)
5411        new_origin = ensure_geo_reference(new_origin)
5412        if new_origin is None and points_georeference is not None:
5413            points = points_utm
5414            geo_ref = points_georeference
5415        else:
5416            if new_origin is None:
5417                new_origin = Geo_reference(zone,min(points_utm[:,0]),
5418                                           min(points_utm[:,1]))
5419            points = new_origin.change_points_geo_ref(points_utm,
5420                                                      points_georeference)
5421            geo_ref = new_origin
[4268]5422
[4455]5423        # At this stage I need a georef and points
5424        # the points are relative to the georef
5425        geo_ref.write_NetCDF(outfile)
[4280]5426   
[4455]5427        # This will put the geo ref in the middle
[4665]5428        #geo_ref=Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
5429       
[4455]5430        x =  points[:,0]
5431        y =  points[:,1]
5432        z = outfile.variables['z'][:]
5433   
5434        if verbose:
5435            print '------------------------------------------------'
5436            print 'More Statistics:'
5437            print '  Extent (/lon):'
5438            print '    x in [%f, %f], len(lat) == %d'\
5439                  %(min(x), max(x),
5440                    len(x))
5441            print '    y in [%f, %f], len(lon) == %d'\
5442                  %(min(y), max(y),
5443                    len(y))
5444            print '    z in [%f, %f], len(z) == %d'\
5445                  %(min(elevation), max(elevation),
5446                    len(elevation))
5447            print 'geo_ref: ',geo_ref
5448            print '------------------------------------------------'
5449           
5450        #z = resize(bath_grid,outfile.variables['z'][:].shape)
[4862]5451        #print "points[:,0]", points[:,0]
[4455]5452        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
5453        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
5454        outfile.variables['z'][:] = elevation
5455        outfile.variables['elevation'][:] = elevation  #FIXME HACK
5456        outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
[4268]5457
[4455]5458        q = 'elevation'
5459        # This updates the _range values
5460        outfile.variables[q+Write_sww.RANGE][0] = min(elevation)
5461        outfile.variables[q+Write_sww.RANGE][1] = max(elevation)
[4280]5462
[4704]5463
[4862]5464    def store_quantities(self, outfile, sww_precision=Float32,
[4704]5465                         slice_index=None, time=None,
5466                         verbose=False, **quant):
[4455]5467        """
5468        Write the quantity info.
[4280]5469
[4455]5470        **quant is extra keyword arguments passed in. These must be
5471          the sww quantities, currently; stage, xmomentum, ymomentum.
5472       
5473        if the time array is already been built, use the slice_index
5474        to specify the index.
5475       
5476        Otherwise, use time to increase the time dimension
[4280]5477
[4455]5478        Maybe make this general, but the viewer assumes these quantities,
5479        so maybe we don't want it general - unless the viewer is general
5480       
5481        precon
5482        triangulation and
5483        header have been called.
5484        """
[4280]5485
[4455]5486        if time is not None:
5487            file_time = outfile.variables['time']
5488            slice_index = len(file_time)
5489            file_time[slice_index] = time   
5490
[4938]5491        # Write the conserved quantities from Domain.
[4455]5492        # Typically stage,  xmomentum, ymomentum
5493        # other quantities will be ignored, silently.
[4938]5494        # Also write the ranges: stage_range,
5495        # xmomentum_range and ymomentum_range
[4455]5496        for q in Write_sww.sww_quantities:
5497            if not quant.has_key(q):
5498                msg = 'SWW file can not write quantity %s' %q
5499                raise NewQuantity, msg
5500            else:
5501                q_values = quant[q]
[4862]5502                outfile.variables[q][slice_index] = \
5503                                q_values.astype(sww_precision)
[4455]5504
5505                # This updates the _range values
5506                q_range = outfile.variables[q+Write_sww.RANGE][:]
5507                q_values_min = min(q_values)
5508                if q_values_min < q_range[0]:
5509                    outfile.variables[q+Write_sww.RANGE][0] = q_values_min
5510                q_values_max = max(q_values)
5511                if q_values_max > q_range[1]:
5512                    outfile.variables[q+Write_sww.RANGE][1] = q_values_max
5513
5514    def verbose_quantities(self, outfile):
[4280]5515        print '------------------------------------------------'
5516        print 'More Statistics:'
[4455]5517        for q in Write_sww.sww_quantities:
[4699]5518            print %s in [%f, %f]' %(q,
5519                                       outfile.variables[q+Write_sww.RANGE][0],
5520                                       outfile.variables[q+Write_sww.RANGE][1])
[4280]5521        print '------------------------------------------------'
[4699]5522
[4704]5523
[4455]5524       
5525def obsolete_write_sww_time_slices(outfile, has, uas, vas, elevation,
[4280]5526                         mean_stage=0, zscale=1,
5527                         verbose=False):   
5528    #Time stepping
5529    stage = outfile.variables['stage']
5530    xmomentum = outfile.variables['xmomentum']
5531    ymomentum = outfile.variables['ymomentum']
5532
5533    n = len(has)
5534    j=0
5535    for ha, ua, va in map(None, has, uas, vas):
5536        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
5537        w = zscale*ha + mean_stage
5538        stage[j] = w
5539        h = w - elevation
5540        xmomentum[j] = ua*h
[4348]5541        ymomentum[j] = -1*va*#  -1 since in mux files south is positive.
[4280]5542        j += 1
[4455]5543   
[4303]5544def urs2txt(basename_in, location_index=None):
[4301]5545    """
5546    Not finished or tested
5547    """
[4378]5548   
5549    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
5550                basename_in + EAST_VELOCITY_LABEL,
5551                basename_in + NORTH_VELOCITY_LABEL]
[4301]5552    quantities = ['HA','UA','VA']
5553
[4303]5554    d = ","
5555   
[4301]5556    # instanciate urs_points of the three mux files.
5557    mux = {}
5558    for quantity, file in map(None, quantities, files_in):
5559        mux[quantity] = Urs_points(file)
[4280]5560       
[4301]5561    # Could check that the depth is the same. (hashing)
5562
5563    # handle to a mux file to do depth stuff
5564    a_mux = mux[quantities[0]]
5565   
5566    # Convert to utm
[4303]5567    latitudes = a_mux.lonlatdep[:,1]
5568    longitudes = a_mux.lonlatdep[:,0]
[4301]5569    points_utm, zone = convert_from_latlon_to_utm( \
[4303]5570        latitudes=latitudes, longitudes=longitudes)
[4301]5571    #print "points_utm", points_utm
5572    #print "zone", zone
[4303]5573    depths = a_mux.lonlatdep[:,2]  #
[4301]5574   
5575    fid = open(basename_in + '.txt', 'w')
5576
[4303]5577    fid.write("zone: " + str(zone) + "\n")
[4301]5578
[4303]5579    if location_index is not None:
5580        #Title
5581        li = location_index
5582        fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
5583                  'Northing' + "\n")
5584        fid.write(str(li) +d+ str(latitudes[li])+d+ \
5585              str(longitudes[li]) +d+ str(points_utm[li][0]) +d+ \
5586              str(points_utm[li][01]) + "\n")
5587
5588    # the non-time dependent stuff
5589    #Title
5590    fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
[5403]5591              'Northing' +d+ 'depth m' + "\n")
[4303]5592    i = 0
5593    for depth, point_utm, lat, long in map(None, depths,
5594                                               points_utm, latitudes,
5595                                               longitudes):
5596       
5597        fid.write(str(i) +d+ str(lat)+d+ str(long) +d+ str(point_utm[0]) +d+ \
5598                  str(point_utm[01]) +d+ str(depth) + "\n")
5599        i +=1
5600    #Time dependent
5601    if location_index is not None:
5602        time_step = a_mux.time_step
5603        i = 0
5604        #Title
5605        fid.write('time' +d+ 'HA depth m'+d+ \
[4665]5606                 'UA momentum East x m/sec' +d+ 'VA momentum North y m/sec' \
[4303]5607                      + "\n")
5608        for HA, UA, VA in map(None, mux['HA'], mux['UA'], mux['VA']):
5609            fid.write(str(i*time_step) +d+ str(HA[location_index])+d+ \
5610                      str(UA[location_index]) +d+ str(VA[location_index]) \
5611                      + "\n")
5612           
5613            i +=1
[5347]5614
5615class Write_sts:
5616
5617    sts_quantities = ['stage','xmomentum','ymomentum']
5618
5619
5620    RANGE = '_range'
5621    EXTREMA = ':extrema'
[4301]5622   
[5347]5623    def __init__(self):
5624        pass
5625
5626    def store_header(self,
5627                     outfile,
5628                     times,
5629                     number_of_points,
5630                     description='Converted from URS mux2 format',
5631                     sts_precision=Float32,
5632                     verbose=False):
5633        """
5634        outfile - the name of the file that will be written
5635        times - A list of the time slice times OR a start time
5636        Note, if a list is given the info will be made relative.
5637        number_of_points - the number of urs gauges sites
5638        """
5639
5640        outfile.institution = 'Geoscience Australia'
5641        outfile.description = description
5642       
5643        try:
5644            revision_number = get_revision_number()
5645        except:
5646            revision_number = None
5647        # Allow None to be stored as a string               
5648        outfile.revision_number = str(revision_number) 
5649       
5650        # times - A list or array of the time slice times OR a start time
5651        # Start time in seconds since the epoch (midnight 1/1/1970)
5652
5653        # This is being used to seperate one number from a list.
5654        # what it is actually doing is sorting lists from numeric arrays.
5655        if type(times) is list or type(times) is ArrayType: 
5656            number_of_times = len(times)
5657            times = ensure_numeric(times) 
5658            if number_of_times == 0:
5659                starttime = 0
5660            else:
5661                starttime = times[0]
5662                times = times - starttime  #Store relative times
5663        else:
5664            number_of_times = 0
5665            starttime = times
5666
5667        outfile.starttime = starttime
5668
5669        # Dimension definitions
5670        outfile.createDimension('number_of_points', number_of_points)
5671        outfile.createDimension('number_of_timesteps', number_of_times)
5672        outfile.createDimension('numbers_in_range', 2)
5673
5674        # Variable definitions
5675        outfile.createVariable('x', sts_precision, ('number_of_points',))
5676        outfile.createVariable('y', sts_precision, ('number_of_points',))
5677        outfile.createVariable('elevation', sts_precision,('number_of_points',))
5678
5679        q = 'elevation'
5680        outfile.createVariable(q+Write_sts.RANGE, sts_precision,
5681                               ('numbers_in_range',))
5682
5683        # Initialise ranges with small and large sentinels.
5684        # If this was in pure Python we could have used None sensibly
5685        outfile.variables[q+Write_sts.RANGE][0] = max_float  # Min
5686        outfile.variables[q+Write_sts.RANGE][1] = -max_float # Max
5687
5688        # Doing sts_precision instead of Float gives cast errors.
5689        outfile.createVariable('time', Float, ('number_of_timesteps',))
5690
5691        for q in Write_sts.sts_quantities:
5692            outfile.createVariable(q, sts_precision,
5693                                   ('number_of_timesteps',
5694                                    'number_of_points'))
5695            outfile.createVariable(q+Write_sts.RANGE, sts_precision,
5696                                   ('numbers_in_range',))
5697            # Initialise ranges with small and large sentinels.
5698            # If this was in pure Python we could have used None sensibly
5699            outfile.variables[q+Write_sts.RANGE][0] = max_float  # Min
5700            outfile.variables[q+Write_sts.RANGE][1] = -max_float # Max
5701
5702        if type(times) is list or type(times) is ArrayType: 
5703            outfile.variables['time'][:] = times    #Store time relative
5704
5705        if verbose:
5706            print '------------------------------------------------'
5707            print 'Statistics:'
5708            print '    t in [%f, %f], len(t) == %d'\
5709                  %(min(times.flat), max(times.flat), len(times.flat))
5710
5711    def store_points(self,
5712                     outfile,
5713                     points_utm,
5714                     elevation, zone=None, new_origin=None, 
5715                     points_georeference=None, verbose=False):
5716
5717        """
5718        points_utm - currently a list or array of the points in UTM.
5719        points_georeference - the georeference of the points_utm
5720
5721        How about passing new_origin and current_origin.
5722        If you get both, do a convertion from the old to the new.
5723       
5724        If you only get new_origin, the points are absolute,
5725        convert to relative
5726       
5727        if you only get the current_origin the points are relative, store
5728        as relative.
5729       
5730        if you get no georefs create a new georef based on the minimums of
5731        points_utm.  (Another option would be to default to absolute)
5732       
5733        Yes, and this is done in another part of the code.
5734        Probably geospatial.
5735       
5736        If you don't supply either geo_refs, then supply a zone. If not
5737        the default zone will be used.
5738
5739        precondition:
5740             header has been called.
5741        """
5742
5743        number_of_points = len(points_utm)
5744        points_utm = array(points_utm)
5745
5746        # given the two geo_refs and the points, do the stuff
5747        # described in the method header
5748        points_georeference = ensure_geo_reference(points_georeference)
5749        new_origin = ensure_geo_reference(new_origin)
5750       
5751        if new_origin is None and points_georeference is not None:
5752            points = points_utm
5753            geo_ref = points_georeference
5754        else:
5755            if new_origin is None:
5756                new_origin = Geo_reference(zone,min(points_utm[:,0]),
5757                                           min(points_utm[:,1]))
5758            points = new_origin.change_points_geo_ref(points_utm,
5759                                                      points_georeference)
5760            geo_ref = new_origin
5761
5762        # At this stage I need a georef and points
5763        # the points are relative to the georef
5764        geo_ref.write_NetCDF(outfile)
5765
5766        x =  points[:,0]
5767        y =  points[:,1]
5768        z = outfile.variables['z'][:]
5769   
5770        if verbose:
5771            print '------------------------------------------------'
5772            print 'More Statistics:'
5773            print '  Extent (/lon):'
5774            print '    x in [%f, %f], len(lat) == %d'\
5775                  %(min(x), max(x),
5776                    len(x))
5777            print '    y in [%f, %f], len(lon) == %d'\
5778                  %(min(y), max(y),
5779                    len(y))
5780            print '    z in [%f, %f], len(z) == %d'\
5781                  %(min(elevation), max(elevation),
5782                    len(elevation))
5783            print 'geo_ref: ',geo_ref
5784            print '------------------------------------------------'
5785           
5786        #z = resize(bath_grid,outfile.variables['z'][:].shape)
5787        #print "points[:,0]", points[:,0]
5788        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
5789        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
5790        outfile.variables['z'][:] = elevation
5791        outfile.variables['elevation'][:] = elevation  #FIXME HACK4
5792
5793        q = 'elevation'
5794        # This updates the _range values
5795        outfile.variables[q+Write_sts.RANGE][0] = min(elevation)
5796        outfile.variables[q+Write_sts.RANGE][1] = max(elevation)
5797
5798    def store_quantities(self, outfile, sts_precision=Float32,
5799                         slice_index=None, time=None,
5800                         verbose=False, **quant):
5801       
5802        """
5803        Write the quantity info.
5804
5805        **quant is extra keyword arguments passed in. These must be
5806          the sts quantities, currently; stage.
5807       
5808        if the time array is already been built, use the slice_index
5809        to specify the index.
5810       
5811        Otherwise, use time to increase the time dimension
5812
5813        Maybe make this general, but the viewer assumes these quantities,
5814        so maybe we don't want it general - unless the viewer is general
5815       
5816        precondition:
5817            triangulation and
5818            header have been called.
5819        """
5820        if time is not None:
5821            file_time = outfile.variables['time']
5822            slice_index = len(file_time)
5823            file_time[slice_index] = time   
5824
5825        # Write the conserved quantities from Domain.
5826        # Typically stage,  xmomentum, ymomentum
5827        # other quantities will be ignored, silently.
5828        # Also write the ranges: stage_range
5829        for q in Write_sts.sts_quantities:
5830            if not quant.has_key(q):
5831                msg = 'STS file can not write quantity %s' %q
5832                raise NewQuantity, msg
5833            else:
5834                q_values = quant[q]
5835                outfile.variables[q][slice_index] = \
5836                                q_values.astype(sts_precision)
5837
5838                # This updates the _range values
5839                q_range = outfile.variables[q+Write_sts.RANGE][:]
5840                q_values_min = min(q_values)
5841                if q_values_min < q_range[0]:
5842                    outfile.variables[q+Write_sts.RANGE][0] = q_values_min
5843                q_values_max = max(q_values)
5844                if q_values_max > q_range[1]:
5845                    outfile.variables[q+Write_sts.RANGE][1] = q_values_max
5846
5847
5848   
[4271]5849class Urs_points:
5850    """
5851    Read the info in URS mux files.
[4268]5852
[4271]5853    for the quantities heres a correlation between the file names and
5854    what they mean;
5855    z-mux is height above sea level, m
5856    e-mux is velocity is Eastern direction, m/s
[4280]5857    n-mux is velocity is Northern direction, m/s   
[4271]5858    """
[4268]5859    def __init__(self,urs_file):
[4271]5860        self.iterated = False
[4268]5861        columns = 3 # long, lat , depth
5862        mux_file = open(urs_file, 'rb')
5863       
5864        # Number of points/stations
5865        (self.points_num,)= unpack('i',mux_file.read(4))
5866       
5867        # nt, int - Number of time steps
5868        (self.time_step_count,)= unpack('i',mux_file.read(4))
[4280]5869        #print "self.time_step_count", self.time_step_count
[4268]5870        #dt, float - time step, seconds
5871        (self.time_step,) = unpack('f', mux_file.read(4))
[4280]5872        #print "self.time_step", self.time_step
[4268]5873        msg = "Bad data in the urs file."
5874        if self.points_num < 0:
5875            mux_file.close()
5876            raise ANUGAError, msg
5877        if self.time_step_count < 0:
5878            mux_file.close()
5879            raise ANUGAError, msg
5880        if self.time_step < 0:
5881            mux_file.close()
5882            raise ANUGAError, msg
[4271]5883
5884        # the depth is in meters, and it is the distance from the ocean
5885        # to the sea bottom.
[4268]5886        lonlatdep = p_array.array('f')
5887        lonlatdep.read(mux_file, columns * self.points_num)
5888        lonlatdep = array(lonlatdep, typecode=Float)   
5889        lonlatdep = reshape(lonlatdep, (self.points_num, columns))
5890        #print 'lonlatdep',lonlatdep
5891        self.lonlatdep = lonlatdep
[4271]5892       
5893        self.mux_file = mux_file
[4268]5894        # check this array
5895
5896    def __iter__(self):
5897        """
[4271]5898        iterate over quantity data which is with respect to time.
5899
5900        Note: You can only interate once over an object
5901       
5902        returns quantity infomation for each time slice
[4268]5903        """
[4271]5904        msg =  "You can only interate once over a urs file."
5905        assert not self.iterated, msg
[4280]5906        self.iter_time_step = 0
[4271]5907        self.iterated = True
[4268]5908        return self
5909   
5910    def next(self):
[4280]5911        if self.time_step_count == self.iter_time_step:
[4271]5912            self.close()
[4268]5913            raise StopIteration
5914        #Read in a time slice  from mux file 
5915        hz_p_array = p_array.array('f')
[4271]5916        hz_p_array.read(self.mux_file, self.points_num)
[4268]5917        hz_p = array(hz_p_array, typecode=Float)
[4280]5918        self.iter_time_step += 1
[4271]5919       
5920        return hz_p
[4268]5921
5922    def close(self):
5923        self.mux_file.close()
[4500]5924       
[4223]5925    #### END URS UNGRIDDED 2 SWW ###
5926
[4500]5927       
[5070]5928def start_screen_catcher(dir_name=None, myid='', numprocs='', extra_info='',
[4924]5929                         verbose=True):
[5070]5930    """
[4500]5931    Used to store screen output and errors to file, if run on multiple
5932    processes eachprocessor will have its own output and error file.
5933   
5934    extra_info - is used as a string that can identify outputs with another
5935    string eg. '_other'
[4850]5936   
5937    FIXME: Would be good if you could suppress all the screen output and
[5070]5938    only save it to file... however it seems a bit tricky as this capture
[4850]5939    techique response to sys.stdout and by this time it is already printed out.
[5070]5940    """
[4850]5941   
[4500]5942    import sys
[5070]5943#    dir_name = dir_name
5944    if dir_name == None:
5945        dir_name=getcwd()
5946       
[4500]5947    if access(dir_name,W_OK) == 0:
[4850]5948        if verbose: print 'Making directory %s' %dir_name
5949      #  if verbose: print "myid", myid
[4500]5950        mkdir (dir_name,0777)
[5070]5951
[4500]5952    if myid <>'':
5953        myid = '_'+str(myid)
5954    if numprocs <>'':
5955        numprocs = '_'+str(numprocs)
5956    if extra_info <>'':
5957        extra_info = '_'+str(extra_info)
[4924]5958#    print 'hello1'
[5070]5959    screen_output_name = join(dir_name, "screen_output%s%s%s.txt" %(myid,
[4665]5960                                                                numprocs,
[5070]5961                                                                extra_info))
5962    screen_error_name = join(dir_name,  "screen_error%s%s%s.txt" %(myid,
[4665]5963                                                              numprocs,
[5070]5964                                                              extra_info))
[4924]5965
5966    if verbose: print 'Starting ScreenCatcher, all output will be stored in %s' \
5967                                     %(screen_output_name)
[4500]5968    #used to catch screen output to file
5969    sys.stdout = Screen_Catcher(screen_output_name)
5970    sys.stderr = Screen_Catcher(screen_error_name)
5971
5972class Screen_Catcher:
5973    """this simply catches the screen output and stores it to file defined by
5974    start_screen_catcher (above)
5975    """
5976   
5977    def __init__(self, filename):
5978        self.filename = filename
[4924]5979#        print 'init'
[4500]5980        if exists(self.filename)is True:
5981            print'Old existing file "%s" has been deleted' %(self.filename)
5982            remove(self.filename)
5983
5984    def write(self, stuff):
5985        fid = open(self.filename, 'a')
5986        fid.write(stuff)
[4924]5987        fid.close()
5988       
[5391]5989# FIXME (DSG): Add unit test, make general, not just 2 files,
5990# but any number of files.
[4850]5991def copy_code_files(dir_name, filename1, filename2=None):
[4500]5992    """Copies "filename1" and "filename2" to "dir_name". Very useful for
5993    information management
5994    filename1 and filename2 are both absolute pathnames   
5995    """
5996
5997    if access(dir_name,F_OK) == 0:
5998        print 'Make directory %s' %dir_name
5999        mkdir (dir_name,0777)
6000    shutil.copy(filename1, dir_name + sep + basename(filename1))
[4850]6001    if filename2!=None:
6002        shutil.copy(filename2, dir_name + sep + basename(filename2))
6003        print 'Files %s and %s copied' %(filename1, filename2)
6004    else:
6005        print 'File %s copied' %(filename1)
[4500]6006
6007def get_data_from_file(filename,separator_value = ','):
6008    """
6009    Read in data information from file and
6010   
6011    Returns:
6012        header_fields, a string? of the first line separated
6013        by the 'separator_value'
6014       
6015        data, a array (N data columns X M lines) in the file
6016        excluding the header
6017       
6018    NOTE: wont deal with columns with different lenghts and there must be
6019    no blank lines at the end.
6020    """
6021   
6022    fid = open(filename)
6023    lines = fid.readlines()
6024   
6025    fid.close()
6026   
6027    header_line = lines[0]
6028    header_fields = header_line.split(separator_value)
6029
6030    #array to store data, number in there is to allow float...
6031    #i'm sure there is a better way!
6032    data=array([],typecode=Float)
6033    data=resize(data,((len(lines)-1),len(header_fields)))
6034#    print 'number of fields',range(len(header_fields))
6035#    print 'number of lines',len(lines), shape(data)
6036#    print'data',data[1,1],header_line
6037
6038    array_number = 0
6039    line_number = 1
6040    while line_number < (len(lines)):
6041        for i in range(len(header_fields)): 
6042            #this get line below the header, explaining the +1
6043            #and also the line_number can be used as the array index
6044            fields = lines[line_number].split(separator_value)
6045            #assign to array
6046            data[array_number,i] = float(fields[i])
6047           
6048        line_number = line_number +1
6049        array_number = array_number +1
6050       
6051    return header_fields, data
6052
6053def store_parameters(verbose=False,**kwargs):
6054    """
[4519]6055    Store "kwargs" into a temp csv file, if "completed" is a kwargs csv file is
6056    kwargs[file_name] else it is kwargs[output_dir] + details_temp.csv
6057   
[4500]6058    Must have a file_name keyword arg, this is what is writing to.
6059    might be a better way to do this using CSV module Writer and writeDict
6060   
[4665]6061    writes file to "output_dir" unless "completed" is in kwargs, then
6062    it writes to "file_name" kwargs
[4519]6063
[4500]6064    """
6065    import types
[4595]6066#    import os
[4500]6067   
6068    # Check that kwargs is a dictionary
6069    if type(kwargs) != types.DictType:
6070        raise TypeError
6071   
[4519]6072    #is completed is kwargs?
[4500]6073    try:
6074        kwargs['completed']
6075        completed=True
6076    except:
6077        completed=False
6078 
6079    #get file name and removes from dict and assert that a file_name exists
6080    if completed:
6081        try:
[4519]6082            file = str(kwargs['file_name'])
[4500]6083        except:
6084            raise 'kwargs must have file_name'
6085    else:
[4519]6086        #write temp file in output directory
[4500]6087        try:
[4519]6088            file = str(kwargs['output_dir'])+'detail_temp.csv'
[4500]6089        except:
6090            raise 'kwargs must have output_dir'
6091       
6092    #extracts the header info and the new line info
6093    line=''
6094    header=''
6095    count=0
6096    keys = kwargs.keys()
6097    keys.sort()
6098   
6099    #used the sorted keys to create the header and line data
6100    for k in keys:
[4519]6101#        print "%s = %s" %(k, kwargs[k])
[4500]6102        header = header+str(k)
6103        line = line+str(kwargs[k])
6104        count+=1
6105        if count <len(kwargs):
6106            header = header+','
6107            line = line+','
[4519]6108    header+='\n'
6109    line+='\n'
[4500]6110
6111    # checks the header info, if the same, then write, if not create a new file
6112    #try to open!
6113    try:
6114        fid = open(file,"r")
6115        file_header=fid.readline()
6116        fid.close()
6117        if verbose: print 'read file header %s' %file_header
6118       
6119    except:
6120        msg = 'try to create new file',file
6121        if verbose: print msg
6122        #tries to open file, maybe directory is bad
6123        try:
6124            fid = open(file,"w")
[4519]6125            fid.write(header)
[4500]6126            fid.close()
6127            file_header=header
6128        except:
6129            msg = 'cannot create new file',file
6130            raise msg
6131           
6132    #if header is same or this is a new file
[4519]6133    if file_header==str(header):
[4500]6134        fid=open(file,"a")
6135        #write new line
[4519]6136        fid.write(line)
[4500]6137        fid.close()
6138    else:
[4665]6139        #backup plan,
6140        # if header is different and has completed will append info to
[4500]6141        #end of details_temp.cvs file in output directory
6142        file = str(kwargs['output_dir'])+'detail_temp.csv'
6143        fid=open(file,"a")
[4519]6144        fid.write(header)
6145        fid.write(line)
[4500]6146        fid.close()
[4519]6147        if verbose: print 'file',file_header.strip('\n')
6148        if verbose: print 'head',header.strip('\n')
6149        if file_header.strip('\n')==str(header): print 'they equal'
[4500]6150        msg = 'WARNING: File header does not match input info, the input variables have changed, suggest to change file name'
6151        print msg
6152
6153
[4551]6154
[5226]6155# ----------------------------------------------
6156# Functions to obtain diagnostics from sww files
6157#-----------------------------------------------
[4551]6158
[5276]6159def get_mesh_and_quantities_from_file(filename,
6160                                      quantities=None,
6161                                      verbose=False):
[5226]6162    """Get and rebuild mesh structure and the associated quantities from sww file
[5276]6163
6164    Input:
6165        filename - Name os sww file
6166        quantities - Names of quantities to load
6167
6168    Output:
6169        mesh - instance of class Interpolate
6170               (including mesh and interpolation functionality)
6171        quantities - arrays with quantity values at each mesh node
6172        time - vector of stored timesteps
[5226]6173    """
[5276]6174 
6175    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
6176   
[5226]6177    import types
[5276]6178    from Scientific.IO.NetCDF import NetCDFFile
6179    from shallow_water import Domain
6180    from Numeric import asarray, transpose, resize
6181    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
[5226]6182
[5276]6183    if verbose: print 'Reading from ', filename
6184    fid = NetCDFFile(filename, 'r')    # Open existing file for read
[5307]6185    time = fid.variables['time'][:]    # Time vector
[5276]6186    time += fid.starttime[0]
[5226]6187   
[5276]6188    # Get the variables as Numeric arrays
6189    x = fid.variables['x'][:]                   # x-coordinates of nodes
6190    y = fid.variables['y'][:]                   # y-coordinates of nodes
6191    elevation = fid.variables['elevation'][:]   # Elevation
6192    stage = fid.variables['stage'][:]           # Water level
6193    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
6194    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
[5226]6195
6196
[5276]6197    # Mesh (nodes (Mx2), triangles (Nx3))
6198    nodes = concatenate( (x[:, NewAxis], y[:, NewAxis]), axis=1 )
6199    triangles = fid.variables['volumes'][:]
6200   
6201    # Get geo_reference
6202    try:
6203        geo_reference = Geo_reference(NetCDFObject=fid)
6204    except: #AttributeError, e:
6205        # Sww files don't have to have a geo_ref
6206        geo_reference = None
[5226]6207
[5276]6208    if verbose: print '    building mesh from sww file %s' %filename
6209    boundary = None
[5226]6210
[5276]6211    #FIXME (Peter Row): Should this be in mesh?
6212    if fid.smoothing != 'Yes':
6213        nodes = nodes.tolist()
6214        triangles = triangles.tolist()
6215        nodes, triangles, boundary=weed(nodes, triangles, boundary)
[5226]6216
6217
6218    try:
[5276]6219        mesh = Mesh(nodes, triangles, boundary,
6220                    geo_reference=geo_reference)
6221    except AssertionError, e:
6222        fid.close()
6223        msg = 'Domain could not be created: %s. "' %e
6224        raise DataDomainError, msg
[5226]6225
6226
6227    quantities = {}
[5276]6228    quantities['elevation'] = elevation
6229    quantities['stage'] = stage   
6230    quantities['xmomentum'] = xmomentum
6231    quantities['ymomentum'] = ymomentum   
[5336]6232
6233    fid.close()
[5276]6234    return mesh, quantities, time
[5226]6235
6236
6237
6238def get_flow_through_cross_section(filename,
6239                                   polyline,
6240                                   verbose=False):
[5276]6241    """Obtain flow (m^3/s) perpendicular to specified cross section.
[5226]6242
6243    Inputs:
6244        filename: Name of sww file
6245        polyline: Representation of desired cross section - it may contain multiple
[5288]6246                  sections allowing for complex shapes. Assume absolute UTM coordinates.
6247                  Format [[x0, y0], [x1, y1], ...]
[5226]6248
6249    Output:
[5288]6250        time: All stored times in sww file
[5276]6251        Q: Hydrograph of total flow across given segments for all stored times.
[5226]6252
[5276]6253    The normal flow is computed for each triangle intersected by the polyline and
[5288]6254    added up.  Multiple segments at different angles are specified the normal flows
[5276]6255    may partially cancel each other.
[5226]6256
[5288]6257    The typical usage of this function would be to get flow through a channel,
6258    and the polyline would then be a cross section perpendicular to the flow.
6259
[5226]6260    """
6261
[5288]6262    from anuga.fit_interpolate.interpolate import Interpolation_function
6263
6264    quantity_names =['elevation',
6265                     'stage',
6266                     'xmomentum',
6267                     'ymomentum']
6268
[5226]6269    # Get mesh and quantities from sww file
[5276]6270    X = get_mesh_and_quantities_from_file(filename,
[5288]6271                                          quantities=quantity_names,
[5276]6272                                          verbose=verbose)
6273    mesh, quantities, time = X
[5226]6274
6275
[5288]6276    # Adjust polyline to mesh spatial origin
6277    polyline = mesh.geo_reference.get_relative(polyline)
[5226]6278   
6279    # Find all intersections and associated triangles.
[5321]6280    segments = mesh.get_intersecting_segments(polyline, verbose=verbose)
[5288]6281    #print
6282    #for x in segments:
6283    #    print x
6284
[5226]6285   
6286    # Then store for each triangle the length of the intersecting segment(s),
[5288]6287    # right hand normal(s) and midpoints as interpolation_points.
6288    interpolation_points = []
6289    for segment in segments:
6290        midpoint = sum(array(segment.segment))/2
6291        interpolation_points.append(midpoint)
[5226]6292
[5288]6293
[5321]6294    if verbose:
6295        print 'Interpolating - ',       
6296        print 'total number of interpolation points = %d'\
6297              %len(interpolation_points)
6298
[5288]6299    I = Interpolation_function(time,
6300                               quantities,
6301                               quantity_names=quantity_names,
6302                               vertex_coordinates=mesh.nodes,
6303                               triangles=mesh.triangles,
6304                               interpolation_points=interpolation_points,
6305                               verbose=verbose)
6306
[5321]6307    if verbose: print 'Computing hydrograph'
[5288]6308    # Compute hydrograph
6309    Q = []
6310    for t in time:
6311        total_flow=0
6312        for i, p in enumerate(interpolation_points):
6313            elevation, stage, uh, vh = I(t, point_id=i)
6314            normal = segments[i].normal
6315
6316            # Inner product of momentum vector with segment normal [m^2/s]
6317            normal_momentum = uh*normal[0] + vh*normal[1] 
6318
6319            # Flow across this segment [m^3/s]
6320            segment_flow = normal_momentum*segments[i].length
6321
6322            # Accumulate
6323            total_flow += segment_flow
6324             
6325
6326        # Store flow at this timestep   
6327        Q.append(total_flow)
6328   
6329
6330
[5276]6331    return time, Q
6332   
[5226]6333
6334
[5276]6335
6336
[4554]6337def get_maximum_inundation_elevation(filename,
6338                                     polygon=None,
6339                                     time_interval=None,
6340                                     verbose=False):
6341   
6342    """Return highest elevation where depth > 0
6343   
6344    Usage:
6345    max_runup = get_maximum_inundation_elevation(filename,
6346                                                 polygon=None,
6347                                                 time_interval=None,
6348                                                 verbose=False)
6349
[4665]6350    filename is a NetCDF sww file containing ANUGA model output.   
6351    Optional arguments polygon and time_interval restricts the maximum
6352    runup calculation
[4554]6353    to a points that lie within the specified polygon and time interval.
6354
6355    If no inundation is found within polygon and time_interval the return value
6356    is None signifying "No Runup" or "Everything is dry".
6357
6358    See general function get_maximum_inundation_data for details.
6359   
6360    """
6361   
6362    runup, _ = get_maximum_inundation_data(filename,
6363                                           polygon=polygon,
6364                                           time_interval=time_interval,
6365                                           verbose=verbose)
6366    return runup
6367
6368
6369
6370
6371def get_maximum_inundation_location(filename,
6372                                    polygon=None,
6373                                    time_interval=None,
6374                                    verbose=False):
6375    """Return location of highest elevation where h > 0
6376   
6377   
6378    Usage:
6379    max_runup_location = get_maximum_inundation_location(filename,
6380                                                         polygon=None,
6381                                                         time_interval=None,
6382                                                         verbose=False)
6383
6384    filename is a NetCDF sww file containing ANUGA model output.
[4665]6385    Optional arguments polygon and time_interval restricts the maximum
6386    runup calculation
[4554]6387    to a points that lie within the specified polygon and time interval.
6388
6389    If no inundation is found within polygon and time_interval the return value
6390    is None signifying "No Runup" or "Everything is dry".
6391
6392    See general function get_maximum_inundation_data for details.
6393    """
6394   
6395    _, max_loc = get_maximum_inundation_data(filename,
6396                                             polygon=polygon,
6397                                             time_interval=time_interval,
6398                                             verbose=verbose)
6399    return max_loc
6400   
6401
6402
6403def get_maximum_inundation_data(filename, polygon=None, time_interval=None,
[4567]6404                                use_centroid_values=False,
[4554]6405                                verbose=False):
[4551]6406    """Compute maximum run up height from sww file.
6407
[4554]6408
6409    Usage:
6410    runup, location = get_maximum_inundation_data(filename,
6411                                                  polygon=None,
6412                                                  time_interval=None,
6413                                                  verbose=False)
6414   
6415
[4665]6416    Algorithm is as in get_maximum_inundation_elevation from
6417    shallow_water_domain
[4551]6418    except that this function works with the sww file and computes the maximal
6419    runup height over multiple timesteps.
6420   
[4665]6421    Optional arguments polygon and time_interval restricts the
6422    maximum runup calculation
6423    to a points that lie within the specified polygon and time interval.
6424    Polygon is
[4563]6425    assumed to be in (absolute) UTM coordinates in the same zone as domain.
[4554]6426
6427    If no inundation is found within polygon and time_interval the return value
6428    is None signifying "No Runup" or "Everything is dry".
[4551]6429    """
6430
6431    # We are using nodal values here as that is what is stored in sww files.
6432
6433    # Water depth below which it is considered to be 0 in the model
6434    # FIXME (Ole): Allow this to be specified as a keyword argument as well
[4554]6435
6436    from anuga.utilities.polygon import inside_polygon   
[4551]6437    from anuga.config import minimum_allowed_height
[4595]6438    from Scientific.IO.NetCDF import NetCDFFile
[4551]6439
[4595]6440    dir, base = os.path.split(filename)
6441           
6442    iterate_over = get_all_swwfiles(dir,base)
[4551]6443   
6444    # Read sww file
6445    if verbose: 
[4554]6446        print 'Reading from %s' %filename
[4563]6447        # FIXME: Use general swwstats (when done)
[4551]6448   
[4595]6449    maximal_runup = None
6450    maximal_runup_location = None
[4563]6451   
[4595]6452    for file, swwfile in enumerate (iterate_over):
[4554]6453       
[4595]6454        # Read sww file
6455        filename = join(dir,swwfile+'.sww')
[4554]6456       
[4595]6457        if verbose: 
6458            print 'Reading from %s' %filename
6459            # FIXME: Use general swwstats (when done)
6460               
6461        fid = NetCDFFile(filename)
6462   
6463        # Get geo_reference
6464        # sww files don't have to have a geo_ref
6465        try:
6466            geo_reference = Geo_reference(NetCDFObject=fid)
6467        except AttributeError, e:
6468            geo_reference = Geo_reference() # Default georef object
6469           
6470        xllcorner = geo_reference.get_xllcorner()
6471        yllcorner = geo_reference.get_yllcorner()
6472        zone = geo_reference.get_zone()
[4554]6473       
[4595]6474        # Get extent
6475        volumes = fid.variables['volumes'][:]   
6476        x = fid.variables['x'][:] + xllcorner
6477        y = fid.variables['y'][:] + yllcorner
[4551]6478   
[4595]6479   
[4863]6480        # Get the relevant quantities (Convert from single precison)
6481        elevation = array(fid.variables['elevation'][:], Float) 
6482        stage = array(fid.variables['stage'][:], Float)
[4595]6483   
6484   
[4665]6485        # Here's where one could convert nodal information to centroid
6486        # information
[4595]6487        # but is probably something we need to write in C.
6488        # Here's a Python thought which is NOT finished!!!
[4567]6489        if use_centroid_values is True:
[4595]6490            x = get_centroid_values(x, volumes)
6491            y = get_centroid_values(y, volumes)   
6492            elevation = get_centroid_values(elevation, volumes)   
6493   
6494   
6495        # Spatial restriction
6496        if polygon is not None:
6497            msg = 'polygon must be a sequence of points.'
6498            assert len(polygon[0]) == 2, msg
[4665]6499            # FIXME (Ole): Make a generic polygon input check in polygon.py
6500            # and call it here
[4595]6501           
6502            points = concatenate((x[:,NewAxis], y[:,NewAxis]), axis=1)
6503   
6504            point_indices = inside_polygon(points, polygon)
6505   
6506            # Restrict quantities to polygon
6507            elevation = take(elevation, point_indices)
6508            stage = take(stage, point_indices, axis=1)
6509   
6510            # Get info for location of maximal runup
6511            points_in_polygon = take(points, point_indices)
6512            x = points_in_polygon[:,0]
6513            y = points_in_polygon[:,1]       
[4567]6514        else:
[4595]6515            # Take all points
6516            point_indices = arange(len(x))
[4567]6517           
[4551]6518   
[4595]6519        # Temporal restriction
6520        time = fid.variables['time'][:]
6521        all_timeindices = arange(len(time))       
6522        if time_interval is not None:
6523           
6524            msg = 'time_interval must be a sequence of length 2.'
6525            assert len(time_interval) == 2, msg
[4665]6526            msg = 'time_interval %s must not be decreasing.' %(time_interval)
[4595]6527            assert time_interval[1] >= time_interval[0], msg
6528           
6529            msg = 'Specified time interval [%.8f:%.8f]' %tuple(time_interval)
6530            msg += ' must does not match model time interval: [%.8f, %.8f]\n'\
6531                   %(time[0], time[-1])
6532            if time_interval[1] < time[0]: raise ValueError(msg)
6533            if time_interval[0] > time[-1]: raise ValueError(msg)
6534   
6535            # Take time indices corresponding to interval (& is bitwise AND)
6536            timesteps = compress((time_interval[0] <= time) & (time <= time_interval[1]),
6537                                 all_timeindices)
6538   
6539   
6540            msg = 'time_interval %s did not include any model timesteps.' %(time_interval)       
6541            assert not alltrue(timesteps == 0), msg
6542   
6543   
6544        else:
6545            # Take them all
6546            timesteps = all_timeindices
6547       
6548   
6549        fid.close()
6550   
6551        # Compute maximal runup for each timestep
6552        #maximal_runup = None
6553        #maximal_runup_location = None
6554        #maximal_runups = [None]
6555        #maximal_runup_locations = [None]
6556       
6557        for i in timesteps:
6558            if use_centroid_values is True:
[4665]6559                stage_i = get_centroid_values(stage[i,:], volumes)   
[4595]6560            else:
6561                stage_i = stage[i,:]
6562               
6563            depth = stage_i  - elevation
6564       
6565            # Get wet nodes i.e. nodes with depth>0 within given region and timesteps
6566            wet_nodes = compress(depth > minimum_allowed_height, arange(len(depth)))
6567   
6568            if alltrue(wet_nodes == 0):
6569                runup = None
6570            else:   
6571                # Find maximum elevation among wet nodes
6572                wet_elevation = take(elevation, wet_nodes)
6573   
6574                runup_index = argmax(wet_elevation)
6575                runup = max(wet_elevation)
[4862]6576                assert wet_elevation[runup_index] == runup # Must be True
6577            #print "runup", runup
6578            #print "maximal_runup", maximal_runup
6579           
[4595]6580            if runup > maximal_runup:
6581                maximal_runup = runup      # This works even if maximal_runups is None
6582                #print "NEW RUNUP",runup
6583   
6584                # Record location
6585                wet_x = take(x, wet_nodes)
6586                wet_y = take(y, wet_nodes)           
6587                maximal_runup_location = [wet_x[runup_index], wet_y[runup_index]]
6588   
6589    #print 'maximal_runup, maximal_runup_location',maximal_runup, maximal_runup_location
[4554]6590    return maximal_runup, maximal_runup_location
[4551]6591
[4586]6592def get_all_swwfiles(look_in_dir='',base_name='',verbose=False):
6593    '''
[4595]6594    Finds all the sww files in a "look_in_dir" which contains a "base_name".
6595    will accept base_name with or without the extension ".sww"
[4586]6596   
6597    Returns: a list of strings
[4595]6598       
6599    Usage:     iterate_over = get_all_swwfiles(dir, name)
6600    then
6601               for swwfile in iterate_over:
6602                   do stuff
6603                   
6604    Check "export_grids" and "get_maximum_inundation_data" for examples
[4586]6605    '''
[4595]6606   
6607    #plus tests the extension
6608    name, extension = os.path.splitext(base_name)
6609
6610    if extension <>'' and extension <> '.sww':
6611        msg = msg='file %s %s must be an NetCDF sww file!'%(base_name,extension)
6612        raise IOError, msg
6613
[4586]6614    if look_in_dir == "":
6615        look_in_dir = "." # Unix compatibility
6616   
6617    dir_ls = os.listdir(look_in_dir)
6618    #print 'dir_ls',dir_ls, base
[4595]6619    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == '.sww']
[4586]6620    if len(iterate_over) == 0:
6621        msg = 'No files of the base name %s'\
[4595]6622              %(name)
[4586]6623        raise IOError, msg
6624    if verbose: print 'iterate over %s' %(iterate_over)
[4554]6625
[4586]6626    return iterate_over
6627
[4636]6628def get_all_files_with_extension(look_in_dir='',base_name='',extension='.sww',verbose=False):
6629    '''
6630    Finds all the sww files in a "look_in_dir" which contains a "base_name".
6631   
6632   
6633    Returns: a list of strings
6634       
6635    Usage:     iterate_over = get_all_swwfiles(dir, name)
6636    then
6637               for swwfile in iterate_over:
6638                   do stuff
6639                   
6640    Check "export_grids" and "get_maximum_inundation_data" for examples
6641    '''
6642   
6643    #plus tests the extension
6644    name, ext = os.path.splitext(base_name)
6645#    print 'look_in_dir',look_in_dir
[4586]6646
[4636]6647    if ext <>'' and ext <> extension:
6648        msg = msg='base_name %s must be an file with %s extension!'%(base_name,extension)
6649        raise IOError, msg
[4598]6650
[4636]6651    if look_in_dir == "":
6652        look_in_dir = "." # Unix compatibility
6653#    print 'look_in_dir',look_in_dir, getcwd()
6654    dir_ls = os.listdir(look_in_dir)
6655    #print 'dir_ls',dir_ls, base_name
6656    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == extension]
6657    if len(iterate_over) == 0:
6658        msg = 'No files of the base name %s in %s'\
6659              %(name, look_in_dir)
6660        raise IOError, msg
6661    if verbose: print 'iterate over %s' %(iterate_over)
6662
6663    return iterate_over
6664
6665def get_all_directories_with_name(look_in_dir='',base_name='',verbose=False):
6666    '''
6667    Finds all the sww files in a "look_in_dir" which contains a "base_name".
6668   
6669   
6670    Returns: a list of strings
6671       
6672    Usage:     iterate_over = get_all_swwfiles(dir, name)
6673    then
6674               for swwfile in iterate_over:
6675                   do stuff
6676                   
6677    Check "export_grids" and "get_maximum_inundation_data" for examples
6678    '''
6679   
6680    #plus tests the extension
6681
6682    if look_in_dir == "":
6683        look_in_dir = "." # Unix compatibility
6684#    print 'look_in_dir',look_in_dir
6685    dir_ls = os.listdir(look_in_dir)
6686#    print 'dir_ls',dir_ls
6687    iterate_over = [x for x in dir_ls if base_name in x]
6688    if len(iterate_over) == 0:
6689        msg = 'No files of the base name %s'\
6690              %(name)
6691        raise IOError, msg
6692    if verbose: print 'iterate over %s' %(iterate_over)
6693
6694    return iterate_over
6695
[5189]6696def points2polygon(points_file,
6697                    minimum_triangle_angle=3.0):
6698    """
6699    WARNING: This function is not fully working. 
6700   
6701    Function to return a polygon returned from alpha shape, given a points file.
6702   
6703    WARNING: Alpha shape returns multiple polygons, but this function only returns one polygon.
6704   
6705    """
6706    from anuga.pmesh.mesh import Mesh, importMeshFromFile
6707    from anuga.shallow_water import Domain   
6708    from anuga.pmesh.mesh_interface import create_mesh_from_regions
6709   
6710    mesh = importMeshFromFile(points_file)
6711    mesh.auto_segment()
6712    mesh.exportASCIIsegmentoutlinefile("outline.tsh")
6713    mesh2 = importMeshFromFile("outline.tsh")
6714    mesh2.generate_mesh(maximum_triangle_area=1000000000, minimum_triangle_angle=minimum_triangle_angle, verbose=False)
6715    mesh2.export_mesh_file('outline_meshed.tsh')
6716    domain = Domain("outline_meshed.tsh", use_cache = False)
6717    polygon =  domain.get_boundary_polygon()
6718    return polygon
[4636]6719
[3292]6720#-------------------------------------------------------------
[4500]6721if __name__ == "__main__":
6722    #setting umask from config to force permissions for all files and directories
6723    # created to the same. (it was noticed the "mpirun" doesn't honour the umask
6724    # set in your .bashrc etc file)
6725    from config import umask
6726    import os 
6727    os.umask(umask)
Note: See TracBrowser for help on using the repository browser.