source: anuga_core/source/anuga/shallow_water/data_manager.py @ 5546

Last change on this file since 5546 was 5546, checked in by ole, 16 years ago

Slight update of data_manager (file name in urs2sts)

File size: 221.0 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.csv: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
48
49"""
50
51# This file was reverted from changeset:5484 to changeset:5470 on 10th July
52# by Ole.
53
54import exceptions
55class TitleValueError(exceptions.Exception): pass
56class DataMissingValuesError(exceptions.Exception): pass
57class DataFileNotOpenError(exceptions.Exception): pass
58class DataTimeError(exceptions.Exception): pass
59class DataDomainError(exceptions.Exception): pass
60class NewQuantity(exceptions.Exception): pass
61
62
63
64import csv
65import os, sys
66import shutil
67from struct import unpack
68import array as p_array
69#import time, os
70from os import sep, path, remove, mkdir, access, F_OK, W_OK, getcwd
71
72
73from Numeric import concatenate, array, Float, Int, Int32, resize, \
74     sometrue, searchsorted, zeros, allclose, around, reshape, \
75     transpose, sort, NewAxis, ArrayType, compress, take, arange, \
76     argmax, alltrue, shape, Float32, size
77
78import string
79
80from Scientific.IO.NetCDF import NetCDFFile
81#from shutil import copy
82from os.path import exists, basename, join
83from os import getcwd
84
85
86from anuga.coordinate_transforms.redfearn import redfearn, \
87     convert_from_latlon_to_utm
88from anuga.coordinate_transforms.geo_reference import Geo_reference, \
89     write_NetCDF_georeference, ensure_geo_reference
90from anuga.geospatial_data.geospatial_data import Geospatial_data,\
91     ensure_absolute
92from anuga.config import minimum_storable_height as default_minimum_storable_height
93from anuga.config import max_float
94from anuga.utilities.numerical_tools import ensure_numeric,  mean
95from anuga.caching.caching import myhash
96from anuga.utilities.anuga_exceptions import ANUGAError
97from anuga.shallow_water import Domain
98from anuga.abstract_2d_finite_volumes.pmesh2domain import \
99     pmesh_to_domain_instance
100from anuga.abstract_2d_finite_volumes.util import get_revision_number, \
101     remove_lone_verts, sww2timeseries, get_centroid_values
102from anuga.load_mesh.loadASCII import export_mesh_file
103from anuga.utilities.polygon import intersection
104
105
106# formula mappings
107
108quantity_formula = {'momentum':'(xmomentum**2 + ymomentum**2)**0.5',
109                    'depth':'stage-elevation',
110                    'speed': \
111 '(xmomentum**2 + ymomentum**2)**0.5/(stage-elevation+1.e-6/(stage-elevation))'}
112
113
114   
115def make_filename(s):
116    """Transform argument string into a Sexsuitable filename
117    """
118
119    s = s.strip()
120    s = s.replace(' ', '_')
121    s = s.replace('(', '')
122    s = s.replace(')', '')
123    s = s.replace('__', '_')
124
125    return s
126
127
128def check_dir(path, verbose=None):
129    """Check that specified path exists.
130    If path does not exist it will be created if possible
131
132    USAGE:
133       checkdir(path, verbose):
134
135    ARGUMENTS:
136        path -- Directory
137        verbose -- Flag verbose output (default: None)
138
139    RETURN VALUE:
140        Verified path including trailing separator
141
142    """
143
144    import os.path
145
146    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
147        unix = 0
148    else:
149        unix = 1
150
151
152    if path[-1] != os.sep:
153        path = path + os.sep  # Add separator for directories
154
155    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
156    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
157        try:
158            exitcode=os.mkdir(path)
159
160            # Change access rights if possible
161            #
162            if unix:
163                exitcode=os.system('chmod 775 '+path)
164            else:
165                pass  # FIXME: What about acces rights under Windows?
166
167            if verbose: print 'MESSAGE: Directory', path, 'created.'
168
169        except:
170            print 'WARNING: Directory', path, 'could not be created.'
171            if unix:
172                path = '/tmp/'
173            else:
174                path = 'C:'
175
176            print 'Using directory %s instead' %path
177
178    return(path)
179
180
181
182def del_dir(path):
183    """Recursively delete directory path and all its contents
184    """
185
186    import os
187
188    if os.path.isdir(path):
189        for file in os.listdir(path):
190            X = os.path.join(path, file)
191
192
193            if os.path.isdir(X) and not os.path.islink(X):
194                del_dir(X)
195            else:
196                try:
197                    os.remove(X)
198                except:
199                    print "Could not remove file %s" %X
200
201        os.rmdir(path)
202       
203       
204# ANOTHER OPTION, IF NEED IN THE FUTURE, Nick B 7/2007   
205def rmgeneric(path, __func__,verbose=False):
206    ERROR_STR= """Error removing %(path)s, %(error)s """
207
208    try:
209        __func__(path)
210        if verbose: print 'Removed ', path
211    except OSError, (errno, strerror):
212        print ERROR_STR % {'path' : path, 'error': strerror }
213           
214def removeall(path,verbose=False):
215
216    if not os.path.isdir(path):
217        return
218   
219    files=os.listdir(path)
220
221    for x in files:
222        fullpath=os.path.join(path, x)
223        if os.path.isfile(fullpath):
224            f=os.remove
225            rmgeneric(fullpath, f)
226        elif os.path.isdir(fullpath):
227            removeall(fullpath)
228            f=os.rmdir
229            rmgeneric(fullpath, f,verbose)
230
231
232
233def create_filename(datadir, filename, format, size=None, time=None):
234
235    import os
236    #from anuga.config import data_dir
237
238    FN = check_dir(datadir) + filename
239
240    if size is not None:
241        FN += '_size%d' %size
242
243    if time is not None:
244        FN += '_time%.2f' %time
245
246    FN += '.' + format
247    return FN
248
249
250def get_files(datadir, filename, format, size):
251    """Get all file (names) with given name, size and format
252    """
253
254    import glob
255
256    import os
257    #from anuga.config import data_dir
258
259    dir = check_dir(datadir)
260
261    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
262    return glob.glob(pattern)
263
264
265
266#Generic class for storing output to e.g. visualisation or checkpointing
267class Data_format:
268    """Generic interface to data formats
269    """
270
271
272    def __init__(self, domain, extension, mode = 'w'):
273        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
274                                        '''   'w' (write)'''+\
275                                        '''   'r' (read)''' +\
276                                        '''   'a' (append)'''
277
278        #Create filename
279        self.filename = create_filename(domain.get_datadir(),
280                                        domain.get_name(), extension)
281
282        #print 'F', self.filename
283        self.timestep = 0
284        self.domain = domain
285       
286
287
288        # Exclude ghosts in case this is a parallel domain
289        self.number_of_nodes = domain.number_of_full_nodes       
290        self.number_of_volumes = domain.number_of_full_triangles
291        #self.number_of_volumes = len(domain)       
292
293
294
295
296        #FIXME: Should we have a general set_precision function?
297
298
299
300#Class for storing output to e.g. visualisation
301class Data_format_sww(Data_format):
302    """Interface to native NetCDF format (.sww) for storing model output
303
304    There are two kinds of data
305
306    1: Constant data: Vertex coordinates and field values. Stored once
307    2: Variable data: Conserved quantities. Stored once per timestep.
308
309    All data is assumed to reside at vertex locations.
310    """
311
312
313    def __init__(self, domain, mode = 'w',\
314                 max_size = 2000000000,
315                 recursion = False):
316        from Scientific.IO.NetCDF import NetCDFFile
317        from Numeric import Int, Float, Float32
318
319        self.precision = Float32 #Use single precision for quantities
320        if hasattr(domain, 'max_size'):
321            self.max_size = domain.max_size #file size max is 2Gig
322        else:
323            self.max_size = max_size
324        self.recursion = recursion
325        self.mode = mode
326
327        Data_format.__init__(self, domain, 'sww', mode)
328
329        if hasattr(domain, 'minimum_storable_height'):
330            self.minimum_storable_height = domain.minimum_storable_height
331        else:
332            self.minimum_storable_height = default_minimum_storable_height
333
334        # NetCDF file definition
335        fid = NetCDFFile(self.filename, mode)
336
337        if mode == 'w':
338            description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
339            self.writer = Write_sww()
340            self.writer.store_header(fid,
341                                     domain.starttime,
342                                     self.number_of_volumes,
343                                     self.domain.number_of_full_nodes,
344                                     description=description,
345                                     smoothing=domain.smooth,
346                                     order=domain.default_order,
347                                     sww_precision=self.precision)
348
349            # Extra optional information
350            if hasattr(domain, 'texture'):
351                fid.texture = domain.texture
352
353            if domain.quantities_to_be_monitored is not None:
354                fid.createDimension('singleton', 1)
355                fid.createDimension('two', 2)               
356
357                poly = domain.monitor_polygon
358                if poly is not None:
359                    N = len(poly)
360                    fid.createDimension('polygon_length', N)
361                    fid.createVariable('extrema.polygon',
362                                       self.precision,
363                                       ('polygon_length',
364                                        'two'))
365                    fid.variables['extrema.polygon'][:] = poly                                   
366
367                   
368                interval = domain.monitor_time_interval
369                if interval is not None:
370                    fid.createVariable('extrema.time_interval',
371                                       self.precision,
372                                       ('two',))
373                    fid.variables['extrema.time_interval'][:] = interval
374
375               
376                for q in domain.quantities_to_be_monitored:
377                    #print 'doing', q
378                    fid.createVariable(q+'.extrema', self.precision,
379                                       ('numbers_in_range',))
380                    fid.createVariable(q+'.min_location', self.precision,
381                                       ('numbers_in_range',))
382                    fid.createVariable(q+'.max_location', self.precision,
383                                       ('numbers_in_range',))
384                    fid.createVariable(q+'.min_time', self.precision,
385                                       ('singleton',))
386                    fid.createVariable(q+'.max_time', self.precision,
387                                       ('singleton',))
388
389                   
390        fid.close()
391
392
393    def store_connectivity(self):
394        """Specialisation of store_connectivity for net CDF format
395
396        Writes x,y,z coordinates of triangles constituting
397        the bed elevation.
398        """
399
400        from Scientific.IO.NetCDF import NetCDFFile
401
402        from Numeric import concatenate, Int
403
404        domain = self.domain
405
406        #Get NetCDF
407        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
408
409        # Get the variables
410        x = fid.variables['x']
411        y = fid.variables['y']
412        z = fid.variables['elevation']
413
414        volumes = fid.variables['volumes']
415
416        # Get X, Y and bed elevation Z
417        Q = domain.quantities['elevation']
418        X,Y,Z,V = Q.get_vertex_values(xy=True,
419                                      precision=self.precision)
420
421        #
422        points = concatenate( (X[:,NewAxis],Y[:,NewAxis]), axis=1 )
423        self.writer.store_triangulation(fid,
424                                        points,
425                                        V.astype(volumes.typecode()),
426                                        Z,
427                                        points_georeference= \
428                                        domain.geo_reference)
429
430        # Close
431        fid.close()
432
433
434    def store_timestep(self, names=None):
435        """Store time and named quantities to file
436        """
437       
438        from Scientific.IO.NetCDF import NetCDFFile
439        import types
440        from time import sleep
441        from os import stat
442
443        from Numeric import choose
444
445
446        if names is None:
447            # Standard shallow water wave equation quantitites in ANUGA
448            names = ['stage', 'xmomentum', 'ymomentum']
449       
450        # Get NetCDF       
451        retries = 0
452        file_open = False
453        while not file_open and retries < 10:
454            try:
455                fid = NetCDFFile(self.filename, 'a') # Open existing file
456            except IOError:
457                # This could happen if someone was reading the file.
458                # In that case, wait a while and try again
459                msg = 'Warning (store_timestep): File %s could not be opened'\
460                      %self.filename
461                msg += ' - trying step %s again' %self.domain.time
462                print msg
463                retries += 1
464                sleep(1)
465            else:
466                file_open = True
467
468        if not file_open:
469            msg = 'File %s could not be opened for append' %self.filename
470            raise DataFileNotOpenError, msg
471
472
473
474        # Check to see if the file is already too big:
475        time = fid.variables['time']
476        i = len(time)+1
477        file_size = stat(self.filename)[6]
478        file_size_increase =  file_size/i
479        if file_size + file_size_increase > self.max_size*(2**self.recursion):
480            # In order to get the file name and start time correct,
481            # I change the domain.filename and domain.starttime.
482            # This is the only way to do this without changing
483            # other modules (I think).
484
485            # Write a filename addon that won't break swollens reader
486            # (10.sww is bad)
487            filename_ext = '_time_%s'%self.domain.time
488            filename_ext = filename_ext.replace('.', '_')
489           
490            # Remember the old filename, then give domain a
491            # name with the extension
492            old_domain_filename = self.domain.get_name()
493            if not self.recursion:
494                self.domain.set_name(old_domain_filename+filename_ext)
495
496
497            # Change the domain starttime to the current time
498            old_domain_starttime = self.domain.starttime
499            self.domain.starttime = self.domain.time
500
501            # Build a new data_structure.
502            next_data_structure=\
503                Data_format_sww(self.domain, mode=self.mode,\
504                                max_size = self.max_size,\
505                                recursion = self.recursion+1)
506            if not self.recursion:
507                print '    file_size = %s'%file_size
508                print '    saving file to %s'%next_data_structure.filename
509            #set up the new data_structure
510            self.domain.writer = next_data_structure
511
512            #FIXME - could be cleaner to use domain.store_timestep etc.
513            next_data_structure.store_connectivity()
514            next_data_structure.store_timestep(names)
515            fid.sync()
516            fid.close()
517
518            #restore the old starttime and filename
519            self.domain.starttime = old_domain_starttime
520            self.domain.set_name(old_domain_filename)           
521        else:
522            self.recursion = False
523            domain = self.domain
524
525            # Get the variables
526            time = fid.variables['time']
527            stage = fid.variables['stage']
528            xmomentum = fid.variables['xmomentum']
529            ymomentum = fid.variables['ymomentum']
530            i = len(time)
531            if type(names) not in [types.ListType, types.TupleType]:
532                names = [names]
533
534            if 'stage' in names and 'xmomentum' in names and \
535               'ymomentum' in names:
536
537                # Get stage, elevation, depth and select only those
538                # values where minimum_storable_height is exceeded
539                Q = domain.quantities['stage']
540                A, _ = Q.get_vertex_values(xy = False,
541                                           precision = self.precision)
542                z = fid.variables['elevation']
543
544                storable_indices = A-z[:] >= self.minimum_storable_height
545                stage = choose(storable_indices, (z[:], A))
546               
547                # Define a zero vector of same size and type as A
548                # for use with momenta
549                null = zeros(size(A), A.typecode())
550               
551                # Get xmomentum where depth exceeds minimum_storable_height
552                Q = domain.quantities['xmomentum']
553                xmom, _ = Q.get_vertex_values(xy = False,
554                                              precision = self.precision)
555                xmomentum = choose(storable_indices, (null, xmom))
556               
557
558                # Get ymomentum where depth exceeds minimum_storable_height
559                Q = domain.quantities['ymomentum']
560                ymom, _ = Q.get_vertex_values(xy = False,
561                                              precision = self.precision)
562                ymomentum = choose(storable_indices, (null, ymom))               
563               
564                # Write quantities to NetCDF
565                self.writer.store_quantities(fid, 
566                                             time=self.domain.time,
567                                             sww_precision=self.precision,
568                                             stage=stage,
569                                             xmomentum=xmomentum,
570                                             ymomentum=ymomentum)
571            else:
572                msg = 'Quantities stored must be: stage, xmomentum, ymomentum.'
573                msg += ' Instead I got: ' + str(names)
574                raise Exception, msg
575           
576
577
578            # Update extrema if requested
579            domain = self.domain
580            if domain.quantities_to_be_monitored is not None:
581                for q, info in domain.quantities_to_be_monitored.items():
582
583                    if info['min'] is not None:
584                        fid.variables[q + '.extrema'][0] = info['min']
585                        fid.variables[q + '.min_location'][:] =\
586                                        info['min_location']
587                        fid.variables[q + '.min_time'][0] = info['min_time']
588                       
589                    if info['max'] is not None:
590                        fid.variables[q + '.extrema'][1] = info['max']
591                        fid.variables[q + '.max_location'][:] =\
592                                        info['max_location']
593                        fid.variables[q + '.max_time'][0] = info['max_time']
594
595           
596
597            # Flush and close
598            fid.sync()
599            fid.close()
600
601
602
603# Class for handling checkpoints data
604class Data_format_cpt(Data_format):
605    """Interface to native NetCDF format (.cpt)
606    """
607
608
609    def __init__(self, domain, mode = 'w'):
610        from Scientific.IO.NetCDF import NetCDFFile
611        from Numeric import Int, Float, Float
612
613        self.precision = Float #Use full precision
614
615        Data_format.__init__(self, domain, 'sww', mode)
616
617
618        # NetCDF file definition
619        fid = NetCDFFile(self.filename, mode)
620
621        if mode == 'w':
622            #Create new file
623            fid.institution = 'Geoscience Australia'
624            fid.description = 'Checkpoint data'
625            #fid.smooth = domain.smooth
626            fid.order = domain.default_order
627
628            # dimension definitions
629            fid.createDimension('number_of_volumes', self.number_of_volumes)
630            fid.createDimension('number_of_vertices', 3)
631
632            #Store info at all vertices (no smoothing)
633            fid.createDimension('number_of_points', 3*self.number_of_volumes)
634            fid.createDimension('number_of_timesteps', None) #extensible
635
636            # variable definitions
637
638            #Mesh
639            fid.createVariable('x', self.precision, ('number_of_points',))
640            fid.createVariable('y', self.precision, ('number_of_points',))
641
642
643            fid.createVariable('volumes', Int, ('number_of_volumes',
644                                                'number_of_vertices'))
645
646            fid.createVariable('time', self.precision,
647                               ('number_of_timesteps',))
648
649            #Allocate space for all quantities
650            for name in domain.quantities.keys():
651                fid.createVariable(name, self.precision,
652                                   ('number_of_timesteps',
653                                    'number_of_points'))
654
655        #Close
656        fid.close()
657
658
659    def store_checkpoint(self):
660        """
661        Write x,y coordinates of triangles.
662        Write connectivity (
663        constituting
664        the bed elevation.
665        """
666
667        from Scientific.IO.NetCDF import NetCDFFile
668
669        from Numeric import concatenate
670
671        domain = self.domain
672
673        #Get NetCDF
674        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
675
676        # Get the variables
677        x = fid.variables['x']
678        y = fid.variables['y']
679
680        volumes = fid.variables['volumes']
681
682        # Get X, Y and bed elevation Z
683        Q = domain.quantities['elevation']
684        X,Y,Z,V = Q.get_vertex_values(xy=True,
685                      precision = self.precision)
686
687
688
689        x[:] = X.astype(self.precision)
690        y[:] = Y.astype(self.precision)
691        z[:] = Z.astype(self.precision)
692
693        volumes[:] = V
694
695        #Close
696        fid.close()
697
698
699    def store_timestep(self, name):
700        """Store time and named quantity to file
701        """
702        from Scientific.IO.NetCDF import NetCDFFile
703        from time import sleep
704
705        #Get NetCDF
706        retries = 0
707        file_open = False
708        while not file_open and retries < 10:
709            try:
710                fid = NetCDFFile(self.filename, 'a') #Open existing file
711            except IOError:
712                #This could happen if someone was reading the file.
713                #In that case, wait a while and try again
714                msg = 'Warning (store_timestep): File %s could not be opened'\
715                  %self.filename
716                msg += ' - trying again'
717                print msg
718                retries += 1
719                sleep(1)
720            else:
721                file_open = True
722
723        if not file_open:
724            msg = 'File %s could not be opened for append' %self.filename
725            raise DataFileNotOPenError, msg
726
727
728        domain = self.domain
729
730        # Get the variables
731        time = fid.variables['time']
732        stage = fid.variables['stage']
733        i = len(time)
734
735        #Store stage
736        time[i] = self.domain.time
737
738        # Get quantity
739        Q = domain.quantities[name]
740        A,V = Q.get_vertex_values(xy=False,
741                                  precision = self.precision)
742
743        stage[i,:] = A.astype(self.precision)
744
745        #Flush and close
746        fid.sync()
747        fid.close()
748
749
750#### NED is national exposure database (name changed to NEXIS)
751   
752LAT_TITLE = 'LATITUDE'
753LONG_TITLE = 'LONGITUDE'
754X_TITLE = 'x'
755Y_TITLE = 'y'
756class Exposure_csv:
757    def __init__(self,file_name, latitude_title=LAT_TITLE,
758                 longitude_title=LONG_TITLE, is_x_y_locations=None,
759                 x_title=X_TITLE, y_title=Y_TITLE,
760                 refine_polygon=None, title_check_list=None):
761        """
762        This class is for handling the exposure csv file.
763        It reads the file in and converts the lats and longs to a geospatial
764        data object.
765        Use the methods to read and write columns.
766
767        The format of the csv files it reads is;
768           The first row is a title row.
769           comma's are the delimiters
770           each column is a 'set' of data
771
772        Feel free to use/expand it to read other csv files.
773           
774           
775        It is not for adding and deleting rows
776       
777        Can geospatial handle string attributes? It's not made for them.
778        Currently it can't load and save string att's.
779
780        So just use geospatial to hold the x, y and georef? Bad, since
781        different att's are in diferent structures.  Not so bad, the info
782        to write if the .csv file is saved is in attribute_dic
783
784        The location info is in the geospatial attribute.
785       
786       
787        """
788        self._file_name = file_name
789        self._geospatial = None #
790
791        # self._attribute_dic is a dictionary.
792        #The keys are the column titles.
793        #The values are lists of column data
794       
795        # self._title_index_dic is a dictionary.
796        #The keys are the column titles.
797        #The values are the index positions of file columns.
798        self._attribute_dic, self._title_index_dic = \
799            csv2dict(self._file_name, title_check_list=title_check_list)
800        try:
801            #Have code here that handles caps or lower
802            lats = self._attribute_dic[latitude_title]
803            longs = self._attribute_dic[longitude_title]
804           
805        except KeyError:
806            # maybe a warning..
807            #Let's see if this works..
808            if False != is_x_y_locations:
809                is_x_y_locations = True
810            pass
811        else:
812            self._geospatial = Geospatial_data(latitudes = lats,
813                 longitudes = longs)
814
815        if is_x_y_locations is True:
816            if self._geospatial is not None:
817                pass #fixme throw an error
818            try:
819                xs = self._attribute_dic[x_title]
820                ys = self._attribute_dic[y_title]
821                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
822            except KeyError:
823                # maybe a warning..
824                msg = "Could not find location information."
825                raise TitleValueError, msg
826            else:
827                self._geospatial = Geospatial_data(data_points=points)
828           
829        # create a list of points that are in the refining_polygon
830        # described by a list of indexes representing the points
831
832    def __cmp__(self, other):
833        #print "self._attribute_dic",self._attribute_dic
834        #print "other._attribute_dic",other._attribute_dic
835        #print "self._title_index_dic", self._title_index_dic
836        #print "other._title_index_dic", other._title_index_dic
837       
838        #check that a is an instance of this class
839        if isinstance(self, type(other)):
840            result = cmp(self._attribute_dic, other._attribute_dic)
841            if result <>0:
842                return result
843            # The order of the columns is important. Therefore..
844            result = cmp(self._title_index_dic, other._title_index_dic)
845            if result <>0:
846                return result
847            for self_ls, other_ls in map(None,self._attribute_dic, \
848                    other._attribute_dic):
849                result = cmp(self._attribute_dic[self_ls],
850                             other._attribute_dic[other_ls])
851                if result <>0:
852                    return result
853            return 0
854        else:
855            return 1
856   
857
858    def get_column(self, column_name, use_refind_polygon=False):
859        """
860        Given a column name return a list of the column values
861
862        Note, the type of the values will be String!
863        do this to change a list of strings to a list of floats
864        time = [float(x) for x in time]
865       
866        Not implemented:
867        if use_refind_polygon is True, only return values in the
868        refined polygon
869        """
870        if not self._attribute_dic.has_key(column_name):
871            msg = 'Therer is  no column called %s!' %column_name
872            raise TitleValueError, msg
873        return self._attribute_dic[column_name]
874
875
876    def get_value(self, value_column_name,
877                  known_column_name,
878                  known_values,
879                  use_refind_polygon=False):
880        """
881        Do linear interpolation on the known_colum, using the known_value,
882        to return a value of the column_value_name.
883        """
884        pass
885
886
887    def get_location(self, use_refind_polygon=False):
888        """
889        Return a geospatial object which describes the
890        locations of the location file.
891
892        Note, if there is not location info, this returns None.
893       
894        Not implemented:
895        if use_refind_polygon is True, only return values in the
896        refined polygon
897        """
898        return self._geospatial
899
900    def set_column(self, column_name, column_values, overwrite=False):
901        """
902        Add a column to the 'end' (with the right most column being the end)
903        of the csv file.
904
905        Set overwrite to True if you want to overwrite a column.
906       
907        Note, in column_name white space is removed and case is not checked.
908        Precondition
909        The column_name and column_values cannot have comma's in it.
910        """
911       
912        value_row_count = \
913            len(self._attribute_dic[self._title_index_dic.keys()[0]])
914        if len(column_values) <> value_row_count: 
915            msg = 'The number of column values must equal the number of rows.'
916            raise DataMissingValuesError, msg
917       
918        if self._attribute_dic.has_key(column_name):
919            if not overwrite:
920                msg = 'Column name %s already in use!' %column_name
921                raise TitleValueError, msg
922        else:
923            # New title.  Add it to the title index.
924            self._title_index_dic[column_name] = len(self._title_index_dic)
925        self._attribute_dic[column_name] = column_values
926        #print "self._title_index_dic[column_name]",self._title_index_dic
927
928    def save(self, file_name=None):
929        """
930        Save the exposure csv file
931        """
932        if file_name is None:
933            file_name = self._file_name
934       
935        fd = open(file_name,'wb')
936        writer = csv.writer(fd)
937       
938        #Write the title to a cvs file
939        line = [None]* len(self._title_index_dic)
940        for title in self._title_index_dic.iterkeys():
941            line[self._title_index_dic[title]]= title
942        writer.writerow(line)
943       
944        # Write the values to a cvs file
945        value_row_count = \
946            len(self._attribute_dic[self._title_index_dic.keys()[0]])
947        for row_i in range(value_row_count):
948            line = [None]* len(self._title_index_dic)
949            for title in self._title_index_dic.iterkeys():
950                line[self._title_index_dic[title]]= \
951                     self._attribute_dic[title][row_i]
952            writer.writerow(line)
953
954
955def csv2dict(file_name, title_check_list=None):
956    """
957    Load in the csv as a dic, title as key and column info as value, .
958    Also, create a dic, title as key and column index as value,
959    to keep track of the column order.
960
961    Two dictionaries are returned.
962   
963    WARNING: Vaules are returned as strings.
964    do this to change a list of strings to a list of floats
965        time = [float(x) for x in time]
966
967       
968    """
969   
970    #
971    attribute_dic = {}
972    title_index_dic = {}
973    titles_stripped = [] # list of titles
974    reader = csv.reader(file(file_name))
975
976    # Read in and manipulate the title info
977    titles = reader.next()
978    for i,title in enumerate(titles):
979        titles_stripped.append(title.strip())
980        title_index_dic[title.strip()] = i
981    title_count = len(titles_stripped)       
982    #print "title_index_dic",title_index_dic
983    if title_check_list is not None:
984        for title_check in title_check_list:
985            #msg = "Reading error.  This row is not present ", title_check
986            #assert title_index_dic.has_key(title_check), msg
987            if not title_index_dic.has_key(title_check):
988                #reader.close()
989                msg = "Reading error.  This row is not present ", \
990                      title_check                     
991                raise IOError, msg
992               
993   
994   
995    #create a dic of colum values, indexed by column title
996    for line in reader:
997        if len(line) <> title_count:
998            raise IOError #FIXME make this nicer
999        for i, value in enumerate(line):
1000            attribute_dic.setdefault(titles_stripped[i],[]).append(value)
1001       
1002    return attribute_dic, title_index_dic
1003
1004
1005#Auxiliary
1006def write_obj(filename,x,y,z):
1007    """Store x,y,z vectors into filename (obj format)
1008       Vectors are assumed to have dimension (M,3) where
1009       M corresponds to the number elements.
1010       triangles are assumed to be disconnected
1011
1012       The three numbers in each vector correspond to three vertices,
1013
1014       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
1015
1016    """
1017    #print 'Writing obj to %s' % filename
1018
1019    import os.path
1020
1021    root, ext = os.path.splitext(filename)
1022    if ext == '.obj':
1023        FN = filename
1024    else:
1025        FN = filename + '.obj'
1026
1027
1028    outfile = open(FN, 'wb')
1029    outfile.write("# Triangulation as an obj file\n")
1030
1031    M, N = x.shape
1032    assert N==3  #Assuming three vertices per element
1033
1034    for i in range(M):
1035        for j in range(N):
1036            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1037
1038    for i in range(M):
1039        base = i*N
1040        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1041
1042    outfile.close()
1043
1044
1045#########################################################
1046#Conversion routines
1047########################################################
1048
1049def sww2obj(basefilename, size):
1050    """Convert netcdf based data output to obj
1051    """
1052    from Scientific.IO.NetCDF import NetCDFFile
1053
1054    from Numeric import Float, zeros
1055
1056    #Get NetCDF
1057    FN = create_filename('.', basefilename, 'sww', size)
1058    print 'Reading from ', FN
1059    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1060
1061
1062    # Get the variables
1063    x = fid.variables['x']
1064    y = fid.variables['y']
1065    z = fid.variables['elevation']
1066    time = fid.variables['time']
1067    stage = fid.variables['stage']
1068
1069    M = size  #Number of lines
1070    xx = zeros((M,3), Float)
1071    yy = zeros((M,3), Float)
1072    zz = zeros((M,3), Float)
1073
1074    for i in range(M):
1075        for j in range(3):
1076            xx[i,j] = x[i+j*M]
1077            yy[i,j] = y[i+j*M]
1078            zz[i,j] = z[i+j*M]
1079
1080    #Write obj for bathymetry
1081    FN = create_filename('.', basefilename, 'obj', size)
1082    write_obj(FN,xx,yy,zz)
1083
1084
1085    #Now read all the data with variable information, combine with
1086    #x,y info and store as obj
1087
1088    for k in range(len(time)):
1089        t = time[k]
1090        print 'Processing timestep %f' %t
1091
1092        for i in range(M):
1093            for j in range(3):
1094                zz[i,j] = stage[k,i+j*M]
1095
1096
1097        #Write obj for variable data
1098        #FN = create_filename(basefilename, 'obj', size, time=t)
1099        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1100        write_obj(FN,xx,yy,zz)
1101
1102
1103def dat2obj(basefilename):
1104    """Convert line based data output to obj
1105    FIXME: Obsolete?
1106    """
1107
1108    import glob, os
1109    from anuga.config import data_dir
1110
1111
1112    #Get bathymetry and x,y's
1113    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1114
1115    from Numeric import zeros, Float
1116
1117    M = len(lines)  #Number of lines
1118    x = zeros((M,3), Float)
1119    y = zeros((M,3), Float)
1120    z = zeros((M,3), Float)
1121
1122    ##i = 0
1123    for i, line in enumerate(lines):
1124        tokens = line.split()
1125        values = map(float,tokens)
1126
1127        for j in range(3):
1128            x[i,j] = values[j*3]
1129            y[i,j] = values[j*3+1]
1130            z[i,j] = values[j*3+2]
1131
1132        ##i += 1
1133
1134
1135    #Write obj for bathymetry
1136    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1137
1138
1139    #Now read all the data files with variable information, combine with
1140    #x,y info
1141    #and store as obj
1142
1143    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1144
1145    for filename in files:
1146        print 'Processing %s' % filename
1147
1148        lines = open(data_dir+os.sep+filename,'r').readlines()
1149        assert len(lines) == M
1150        root, ext = os.path.splitext(filename)
1151
1152        #Get time from filename
1153        i0 = filename.find('_time=')
1154        if i0 == -1:
1155            #Skip bathymetry file
1156            continue
1157
1158        i0 += 6  #Position where time starts
1159        i1 = filename.find('.dat')
1160
1161        if i1 > i0:
1162            t = float(filename[i0:i1])
1163        else:
1164            raise DataTimeError, 'Hmmmm'
1165
1166
1167
1168        ##i = 0
1169        for i, line in enumerate(lines):
1170            tokens = line.split()
1171            values = map(float,tokens)
1172
1173            for j in range(3):
1174                z[i,j] = values[j]
1175
1176            ##i += 1
1177
1178        #Write obj for variable data
1179        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1180
1181
1182def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1183    """Read netcdf filename1, pick timesteps first:step:last and save to
1184    nettcdf file filename2
1185    """
1186    from Scientific.IO.NetCDF import NetCDFFile
1187
1188    #Get NetCDF
1189    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1190    outfile = NetCDFFile(filename2, 'w')  #Open new file
1191
1192
1193    #Copy dimensions
1194    for d in infile.dimensions:
1195        outfile.createDimension(d, infile.dimensions[d])
1196
1197    for name in infile.variables:
1198        var = infile.variables[name]
1199        outfile.createVariable(name, var.typecode(), var.dimensions)
1200
1201
1202    #Copy the static variables
1203    for name in infile.variables:
1204        if name == 'time' or name == 'stage':
1205            pass
1206        else:
1207            #Copy
1208            outfile.variables[name][:] = infile.variables[name][:]
1209
1210    #Copy selected timesteps
1211    time = infile.variables['time']
1212    stage = infile.variables['stage']
1213
1214    newtime = outfile.variables['time']
1215    newstage = outfile.variables['stage']
1216
1217    if last is None:
1218        last = len(time)
1219
1220    selection = range(first, last, step)
1221    for i, j in enumerate(selection):
1222        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1223        newtime[i] = time[j]
1224        newstage[i,:] = stage[j,:]
1225
1226    #Close
1227    infile.close()
1228    outfile.close()
1229
1230
1231#Get data objects
1232def get_dataobject(domain, mode='w'):
1233    """Return instance of class of given format using filename
1234    """
1235
1236    cls = eval('Data_format_%s' %domain.format)
1237    return cls(domain, mode)
1238
1239
1240
1241
1242def dem2pts(basename_in, basename_out=None,
1243            easting_min=None, easting_max=None,
1244            northing_min=None, northing_max=None,
1245            use_cache=False, verbose=False,):
1246    """Read Digitial Elevation model from the following NetCDF format (.dem)
1247
1248    Example:
1249
1250    ncols         3121
1251    nrows         1800
1252    xllcorner     722000
1253    yllcorner     5893000
1254    cellsize      25
1255    NODATA_value  -9999
1256    138.3698 137.4194 136.5062 135.5558 ..........
1257
1258    Convert to NetCDF pts format which is
1259
1260    points:  (Nx2) Float array
1261    elevation: N Float array
1262    """
1263
1264
1265
1266    kwargs = {'basename_out': basename_out,
1267              'easting_min': easting_min,
1268              'easting_max': easting_max,
1269              'northing_min': northing_min,
1270              'northing_max': northing_max,
1271              'verbose': verbose}
1272
1273    if use_cache is True:
1274        from caching import cache
1275        result = cache(_dem2pts, basename_in, kwargs,
1276                       dependencies = [basename_in + '.dem'],
1277                       verbose = verbose)
1278
1279    else:
1280        result = apply(_dem2pts, [basename_in], kwargs)
1281
1282    return result
1283
1284
1285def _dem2pts(basename_in, basename_out=None, verbose=False,
1286            easting_min=None, easting_max=None,
1287            northing_min=None, northing_max=None):
1288    """Read Digitial Elevation model from the following NetCDF format (.dem)
1289
1290    Internal function. See public function dem2pts for details.
1291    """
1292
1293    # FIXME: Can this be written feasibly using write_pts?
1294
1295    import os
1296    from Scientific.IO.NetCDF import NetCDFFile
1297    from Numeric import Float, zeros, reshape, sum
1298
1299    root = basename_in
1300
1301    # Get NetCDF
1302    infile = NetCDFFile(root + '.dem', 'r')  # Open existing netcdf file for read
1303
1304    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1305
1306    ncols = infile.ncols[0]
1307    nrows = infile.nrows[0]
1308    xllcorner = infile.xllcorner[0]  # Easting of lower left corner
1309    yllcorner = infile.yllcorner[0]  # Northing of lower left corner
1310    cellsize = infile.cellsize[0]
1311    NODATA_value = infile.NODATA_value[0]
1312    dem_elevation = infile.variables['elevation']
1313
1314    zone = infile.zone[0]
1315    false_easting = infile.false_easting[0]
1316    false_northing = infile.false_northing[0]
1317
1318    # Text strings
1319    projection = infile.projection
1320    datum = infile.datum
1321    units = infile.units
1322
1323
1324    # Get output file
1325    if basename_out == None:
1326        ptsname = root + '.pts'
1327    else:
1328        ptsname = basename_out + '.pts'
1329
1330    if verbose: print 'Store to NetCDF file %s' %ptsname
1331    # NetCDF file definition
1332    outfile = NetCDFFile(ptsname, 'w')
1333
1334    # Create new file
1335    outfile.institution = 'Geoscience Australia'
1336    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1337                      'of spatial point data'
1338    # Assign default values
1339    if easting_min is None: easting_min = xllcorner
1340    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1341    if northing_min is None: northing_min = yllcorner
1342    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1343
1344    # Compute offsets to update georeferencing
1345    easting_offset = xllcorner - easting_min
1346    northing_offset = yllcorner - northing_min
1347
1348    # Georeferencing
1349    outfile.zone = zone
1350    outfile.xllcorner = easting_min # Easting of lower left corner
1351    outfile.yllcorner = northing_min # Northing of lower left corner
1352    outfile.false_easting = false_easting
1353    outfile.false_northing = false_northing
1354
1355    outfile.projection = projection
1356    outfile.datum = datum
1357    outfile.units = units
1358
1359
1360    # Grid info (FIXME: probably not going to be used, but heck)
1361    outfile.ncols = ncols
1362    outfile.nrows = nrows
1363
1364    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1365    totalnopoints = nrows*ncols
1366
1367    # Calculating number of NODATA_values for each row in clipped region
1368    # FIXME: use array operations to do faster
1369    nn = 0
1370    k = 0
1371    i1_0 = 0
1372    j1_0 = 0
1373    thisj = 0
1374    thisi = 0
1375    for i in range(nrows):
1376        y = (nrows-i-1)*cellsize + yllcorner
1377        for j in range(ncols):
1378            x = j*cellsize + xllcorner
1379            if easting_min <= x <= easting_max and \
1380               northing_min <= y <= northing_max:
1381                thisj = j
1382                thisi = i
1383                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1384
1385                if k == 0:
1386                    i1_0 = i
1387                    j1_0 = j
1388                k += 1
1389
1390    index1 = j1_0
1391    index2 = thisj
1392
1393    # Dimension definitions
1394    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1395    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1396
1397    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1398    nopoints = clippednopoints-nn
1399
1400    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1401
1402    if verbose:
1403        print 'There are %d values in the elevation' %totalnopoints
1404        print 'There are %d values in the clipped elevation' %clippednopoints
1405        print 'There are %d NODATA_values in the clipped elevation' %nn
1406
1407    outfile.createDimension('number_of_points', nopoints)
1408    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1409
1410    # Variable definitions
1411    outfile.createVariable('points', Float, ('number_of_points',
1412                                             'number_of_dimensions'))
1413    outfile.createVariable('elevation', Float, ('number_of_points',))
1414
1415    # Get handles to the variables
1416    points = outfile.variables['points']
1417    elevation = outfile.variables['elevation']
1418
1419    lenv = index2-index1+1
1420    # Store data
1421    global_index = 0
1422    # for i in range(nrows):
1423    for i in range(i1_0,thisi+1,1):
1424        if verbose and i%((nrows+10)/10)==0:
1425            print 'Processing row %d of %d' %(i, nrows)
1426
1427        lower_index = global_index
1428
1429        v = dem_elevation_r[i,index1:index2+1]
1430        no_NODATA = sum(v == NODATA_value)
1431        if no_NODATA > 0:
1432            newcols = lenv - no_NODATA # ncols_in_bounding_box - no_NODATA
1433        else:
1434            newcols = lenv # ncols_in_bounding_box
1435
1436        telev = zeros(newcols, Float)
1437        tpoints = zeros((newcols, 2), Float)
1438
1439        local_index = 0
1440
1441        y = (nrows-i-1)*cellsize + yllcorner
1442        #for j in range(ncols):
1443        for j in range(j1_0,index2+1,1):
1444
1445            x = j*cellsize + xllcorner
1446            if easting_min <= x <= easting_max and \
1447               northing_min <= y <= northing_max and \
1448               dem_elevation_r[i,j] <> NODATA_value:
1449                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1450                telev[local_index] = dem_elevation_r[i, j]
1451                global_index += 1
1452                local_index += 1
1453
1454        upper_index = global_index
1455
1456        if upper_index == lower_index + newcols:
1457            points[lower_index:upper_index, :] = tpoints
1458            elevation[lower_index:upper_index] = telev
1459
1460    assert global_index == nopoints, 'index not equal to number of points'
1461
1462    infile.close()
1463    outfile.close()
1464
1465
1466
1467def _read_hecras_cross_sections(lines):
1468    """Return block of surface lines for each cross section
1469    Starts with SURFACE LINE,
1470    Ends with END CROSS-SECTION
1471    """
1472
1473    points = []
1474
1475    reading_surface = False
1476    for i, line in enumerate(lines):
1477
1478        if len(line.strip()) == 0:    #Ignore blanks
1479            continue
1480
1481        if lines[i].strip().startswith('SURFACE LINE'):
1482            reading_surface = True
1483            continue
1484
1485        if lines[i].strip().startswith('END') and reading_surface:
1486            yield points
1487            reading_surface = False
1488            points = []
1489
1490        if reading_surface:
1491            fields = line.strip().split(',')
1492            easting = float(fields[0])
1493            northing = float(fields[1])
1494            elevation = float(fields[2])
1495            points.append([easting, northing, elevation])
1496
1497
1498
1499
1500def hecras_cross_sections2pts(basename_in,
1501                              basename_out=None,
1502                              verbose=False):
1503    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1504
1505    Example:
1506
1507
1508# RAS export file created on Mon 15Aug2005 11:42
1509# by HEC-RAS Version 3.1.1
1510
1511BEGIN HEADER:
1512  UNITS: METRIC
1513  DTM TYPE: TIN
1514  DTM: v:\1\cit\perth_topo\river_tin
1515  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1516  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1517  MAP PROJECTION: UTM
1518  PROJECTION ZONE: 50
1519  DATUM: AGD66
1520  VERTICAL DATUM:
1521  NUMBER OF REACHES:  19
1522  NUMBER OF CROSS-SECTIONS:  14206
1523END HEADER:
1524
1525
1526Only the SURFACE LINE data of the following form will be utilised
1527
1528  CROSS-SECTION:
1529    STREAM ID:Southern-Wungong
1530    REACH ID:Southern-Wungong
1531    STATION:19040.*
1532    CUT LINE:
1533      405548.671603161 , 6438142.7594925
1534      405734.536092045 , 6438326.10404912
1535      405745.130459356 , 6438331.48627354
1536      405813.89633823 , 6438368.6272789
1537    SURFACE LINE:
1538     405548.67,   6438142.76,   35.37
1539     405552.24,   6438146.28,   35.41
1540     405554.78,   6438148.78,   35.44
1541     405555.80,   6438149.79,   35.44
1542     405559.37,   6438153.31,   35.45
1543     405560.88,   6438154.81,   35.44
1544     405562.93,   6438156.83,   35.42
1545     405566.50,   6438160.35,   35.38
1546     405566.99,   6438160.83,   35.37
1547     ...
1548   END CROSS-SECTION
1549
1550    Convert to NetCDF pts format which is
1551
1552    points:  (Nx2) Float array
1553    elevation: N Float array
1554    """
1555
1556    import os
1557    from Scientific.IO.NetCDF import NetCDFFile
1558    from Numeric import Float, zeros, reshape
1559
1560    root = basename_in
1561
1562    #Get ASCII file
1563    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1564
1565    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1566
1567    lines = infile.readlines()
1568    infile.close()
1569
1570    if verbose: print 'Converting to pts format'
1571
1572    i = 0
1573    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1574        i += 1
1575
1576    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1577    i += 1
1578
1579    assert lines[i].strip().upper().startswith('UNITS:')
1580    units = lines[i].strip().split()[1]
1581    i += 1
1582
1583    assert lines[i].strip().upper().startswith('DTM TYPE:')
1584    i += 1
1585
1586    assert lines[i].strip().upper().startswith('DTM:')
1587    i += 1
1588
1589    assert lines[i].strip().upper().startswith('STREAM')
1590    i += 1
1591
1592    assert lines[i].strip().upper().startswith('CROSS')
1593    i += 1
1594
1595    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1596    projection = lines[i].strip().split(':')[1]
1597    i += 1
1598
1599    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1600    zone = int(lines[i].strip().split(':')[1])
1601    i += 1
1602
1603    assert lines[i].strip().upper().startswith('DATUM:')
1604    datum = lines[i].strip().split(':')[1]
1605    i += 1
1606
1607    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1608    i += 1
1609
1610    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1611    i += 1
1612
1613    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1614    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1615    i += 1
1616
1617
1618    #Now read all points
1619    points = []
1620    elevation = []
1621    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1622        for k, entry in enumerate(entries):
1623            points.append(entry[:2])
1624            elevation.append(entry[2])
1625
1626
1627    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1628          %(j+1, number_of_cross_sections)
1629    assert j+1 == number_of_cross_sections, msg
1630
1631    #Get output file
1632    if basename_out == None:
1633        ptsname = root + '.pts'
1634    else:
1635        ptsname = basename_out + '.pts'
1636
1637    geo_ref = Geo_reference(zone, 0, 0, datum, projection, units)
1638    geo = Geospatial_data(points, {"elevation":elevation},
1639                          verbose=verbose, geo_reference=geo_ref)
1640    geo.export_points_file(ptsname)
1641
1642def export_grid(basename_in, extra_name_out = None,
1643                quantities = None, # defaults to elevation
1644                timestep = None,
1645                reduction = None,
1646                cellsize = 10,
1647                NODATA_value = -9999,
1648                easting_min = None,
1649                easting_max = None,
1650                northing_min = None,
1651                northing_max = None,
1652                verbose = False,
1653                origin = None,
1654                datum = 'WGS84',
1655                format = 'ers'):
1656    """
1657   
1658    Wrapper for sww2dem. - see sww2dem to find out what most of the
1659    parameters do.
1660
1661    Quantities is a list of quantities.  Each quantity will be
1662    calculated for each sww file.
1663
1664    This returns the basenames of the files returned, which is made up
1665    of the dir and all of the file name, except the extension.
1666
1667    This function returns the names of the files produced.
1668
1669    It will also produce as many output files as there are input sww files.
1670    """
1671   
1672    if quantities is None:
1673        quantities = ['elevation']
1674       
1675    if type(quantities) is str:
1676            quantities = [quantities]
1677
1678    # How many sww files are there?
1679    dir, base = os.path.split(basename_in)
1680    #print "basename_in",basename_in
1681    #print "base",base
1682
1683    iterate_over = get_all_swwfiles(dir,base,verbose)
1684   
1685    if dir == "":
1686        dir = "." # Unix compatibility
1687   
1688    files_out = []
1689    #print 'sww_file',iterate_over
1690    for sww_file in iterate_over:
1691        for quantity in quantities:
1692            if extra_name_out is None:
1693                basename_out = sww_file + '_' + quantity
1694            else:
1695                basename_out = sww_file + '_' + quantity + '_' \
1696                               + extra_name_out
1697#            print "basename_out", basename_out
1698       
1699            file_out = sww2dem(dir+sep+sww_file, dir+sep+basename_out,
1700                               quantity, 
1701                               timestep,
1702                               reduction,
1703                               cellsize,
1704                               NODATA_value,
1705                               easting_min,
1706                               easting_max,
1707                               northing_min,
1708                               northing_max,
1709                               verbose,
1710                               origin,
1711                               datum,
1712                               format)
1713            files_out.append(file_out)
1714    #print "basenames_out after",basenames_out
1715    return files_out
1716
1717
1718def get_timeseries(production_dirs, output_dir, scenario_name, gauges_dir_name,
1719                   plot_quantity, generate_fig = False,
1720                   reportname = None, surface = False, time_min = None,
1721                   time_max = None, title_on = False, verbose = True,
1722                   nodes=None):
1723    """
1724    nodes - number of processes used.
1725
1726    warning - this function has no tests
1727    """
1728    if reportname == None:
1729        report = False
1730    else:
1731        report = True
1732       
1733    if nodes is None:
1734        is_parallel = False
1735    else:
1736        is_parallel = True
1737       
1738    # Generate figures
1739    swwfiles = {}
1740   
1741    if is_parallel is True:   
1742        for i in range(nodes):
1743            print 'Sending node %d of %d' %(i,nodes)
1744            swwfiles = {}
1745            if not reportname == None:
1746                reportname = report_name + '_%s' %(i)
1747            for label_id in production_dirs.keys():
1748                if label_id == 'boundaries':
1749                    swwfile = best_boundary_sww
1750                else:
1751                    file_loc = output_dir + label_id + sep
1752                    sww_extra = '_P%s_%s' %(i,nodes)
1753                    swwfile = file_loc + scenario_name + sww_extra + '.sww'
1754                    print 'swwfile',swwfile
1755                    swwfiles[swwfile] = label_id
1756
1757                texname, elev_output = sww2timeseries(swwfiles,
1758                                              gauges_dir_name,
1759                                              production_dirs,
1760                                              report = report,
1761                                              reportname = reportname,
1762                                              plot_quantity = plot_quantity,
1763                                              generate_fig = generate_fig,
1764                                              surface = surface,
1765                                              time_min = time_min,
1766                                              time_max = time_max,
1767                                              title_on = title_on,
1768                                              verbose = verbose)
1769    else:   
1770        for label_id in production_dirs.keys():       
1771            if label_id == 'boundaries':
1772                print 'boundaries'
1773                file_loc = project.boundaries_in_dir
1774                swwfile = project.boundaries_dir_name3 + '.sww'
1775                #  swwfile = boundary_dir_filename
1776            else:
1777                file_loc = output_dir + label_id + sep
1778                swwfile = file_loc + scenario_name + '.sww'
1779            swwfiles[swwfile] = label_id
1780       
1781        texname, elev_output = sww2timeseries(swwfiles,
1782                                              gauges_dir_name,
1783                                              production_dirs,
1784                                              report = report,
1785                                              reportname = reportname,
1786                                              plot_quantity = plot_quantity,
1787                                              generate_fig = generate_fig,
1788                                              surface = surface,
1789                                              time_min = time_min,
1790                                              time_max = time_max,
1791                                              title_on = title_on,
1792                                              verbose = verbose)
1793                                         
1794
1795   
1796def sww2dem(basename_in, basename_out = None,
1797            quantity = None, # defaults to elevation
1798            timestep = None,
1799            reduction = None,
1800            cellsize = 10,
1801            NODATA_value = -9999,
1802            easting_min = None,
1803            easting_max = None,
1804            northing_min = None,
1805            northing_max = None,
1806            verbose = False,
1807            origin = None,
1808            datum = 'WGS84',
1809            format = 'ers'):
1810
1811    """Read SWW file and convert to Digitial Elevation model format
1812    (.asc or .ers)
1813
1814    Example (ASC):
1815
1816    ncols         3121
1817    nrows         1800
1818    xllcorner     722000
1819    yllcorner     5893000
1820    cellsize      25
1821    NODATA_value  -9999
1822    138.3698 137.4194 136.5062 135.5558 ..........
1823
1824    Also write accompanying file with same basename_in but extension .prj
1825    used to fix the UTM zone, datum, false northings and eastings.
1826
1827    The prj format is assumed to be as
1828
1829    Projection    UTM
1830    Zone          56
1831    Datum         WGS84
1832    Zunits        NO
1833    Units         METERS
1834    Spheroid      WGS84
1835    Xshift        0.0000000000
1836    Yshift        10000000.0000000000
1837    Parameters
1838
1839
1840    The parameter quantity must be the name of an existing quantity or
1841    an expression involving existing quantities. The default is
1842    'elevation'. Quantity is not a list of quantities.
1843
1844    if timestep (an index) is given, output quantity at that timestep
1845
1846    if reduction is given use that to reduce quantity over all timesteps.
1847
1848    datum
1849
1850    format can be either 'asc' or 'ers'
1851    """
1852
1853    import sys
1854    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, \
1855         sometrue
1856    from Numeric import array2string
1857
1858    from anuga.utilities.polygon import inside_polygon, outside_polygon, \
1859         separate_points_by_polygon
1860    from anuga.abstract_2d_finite_volumes.util import \
1861         apply_expression_to_dictionary
1862
1863    msg = 'Format must be either asc or ers'
1864    assert format.lower() in ['asc', 'ers'], msg
1865
1866
1867    false_easting = 500000
1868    false_northing = 10000000
1869
1870    if quantity is None:
1871        quantity = 'elevation'
1872       
1873    if reduction is None:
1874        reduction = max
1875
1876    if basename_out is None:
1877        basename_out = basename_in + '_%s' %quantity
1878
1879    if quantity_formula.has_key(quantity):
1880        quantity = quantity_formula[quantity]
1881       
1882    swwfile = basename_in + '.sww'
1883    demfile = basename_out + '.' + format
1884    # Note the use of a .ers extension is optional (write_ermapper_grid will
1885    # deal with either option
1886   
1887    #if verbose: bye= nsuadsfd[0] # uncomment to check catching verbose errors
1888   
1889    # Read sww file
1890    if verbose: 
1891        print 'Reading from %s' %swwfile
1892        print 'Output directory is %s' %basename_out
1893   
1894    from Scientific.IO.NetCDF import NetCDFFile
1895    fid = NetCDFFile(swwfile)
1896
1897    #Get extent and reference
1898    x = fid.variables['x'][:]
1899    y = fid.variables['y'][:]
1900    volumes = fid.variables['volumes'][:]
1901    if timestep is not None:
1902        times = fid.variables['time'][timestep]
1903    else:
1904        times = fid.variables['time'][:]
1905
1906    number_of_timesteps = fid.dimensions['number_of_timesteps']
1907    number_of_points = fid.dimensions['number_of_points']
1908   
1909    if origin is None:
1910
1911        # Get geo_reference
1912        # sww files don't have to have a geo_ref
1913        try:
1914            geo_reference = Geo_reference(NetCDFObject=fid)
1915        except AttributeError, e:
1916            geo_reference = Geo_reference() # Default georef object
1917
1918        xllcorner = geo_reference.get_xllcorner()
1919        yllcorner = geo_reference.get_yllcorner()
1920        zone = geo_reference.get_zone()
1921    else:
1922        zone = origin[0]
1923        xllcorner = origin[1]
1924        yllcorner = origin[2]
1925
1926
1927
1928    # FIXME: Refactor using code from Interpolation_function.statistics
1929    # (in interpolate.py)
1930    # Something like print swwstats(swwname)
1931    if verbose:
1932        print '------------------------------------------------'
1933        print 'Statistics of SWW file:'
1934        print '  Name: %s' %swwfile
1935        print '  Reference:'
1936        print '    Lower left corner: [%f, %f]'\
1937              %(xllcorner, yllcorner)
1938        if timestep is not None:
1939            print '    Time: %f' %(times)
1940        else:
1941            print '    Start time: %f' %fid.starttime[0]
1942        print '  Extent:'
1943        print '    x [m] in [%f, %f], len(x) == %d'\
1944              %(min(x.flat), max(x.flat), len(x.flat))
1945        print '    y [m] in [%f, %f], len(y) == %d'\
1946              %(min(y.flat), max(y.flat), len(y.flat))
1947        if timestep is not None:
1948            print '    t [s] = %f, len(t) == %d' %(times, 1)
1949        else:
1950            print '    t [s] in [%f, %f], len(t) == %d'\
1951              %(min(times), max(times), len(times))
1952        print '  Quantities [SI units]:'
1953        # Comment out for reduced memory consumption
1954        for name in ['stage', 'xmomentum', 'ymomentum']:
1955            q = fid.variables[name][:].flat
1956            if timestep is not None:
1957                q = q[timestep*len(x):(timestep+1)*len(x)]
1958            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1959        for name in ['elevation']:
1960            q = fid.variables[name][:].flat
1961            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1962           
1963    # Get quantity and reduce if applicable
1964    if verbose: print 'Processing quantity %s' %quantity
1965
1966    # Turn NetCDF objects into Numeric arrays
1967    try:
1968        q = fid.variables[quantity][:] 
1969       
1970       
1971    except:
1972        quantity_dict = {}
1973        for name in fid.variables.keys():
1974            quantity_dict[name] = fid.variables[name][:] 
1975        #Convert quantity expression to quantities found in sww file   
1976        q = apply_expression_to_dictionary(quantity, quantity_dict)
1977    #print "q.shape",q.shape
1978    if len(q.shape) == 2:
1979        #q has a time component and needs to be reduced along
1980        #the temporal dimension
1981        if verbose: print 'Reducing quantity %s' %quantity
1982        q_reduced = zeros( number_of_points, Float )
1983       
1984        if timestep is not None:
1985            for k in range(number_of_points):
1986                q_reduced[k] = q[timestep,k] 
1987        else:
1988            for k in range(number_of_points):
1989                q_reduced[k] = reduction( q[:,k] )
1990
1991        q = q_reduced
1992
1993    #Post condition: Now q has dimension: number_of_points
1994    assert len(q.shape) == 1
1995    assert q.shape[0] == number_of_points
1996
1997    if verbose:
1998        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1999
2000
2001    #Create grid and update xll/yll corner and x,y
2002
2003    #Relative extent
2004    if easting_min is None:
2005        xmin = min(x)
2006    else:
2007        xmin = easting_min - xllcorner
2008
2009    if easting_max is None:
2010        xmax = max(x)
2011    else:
2012        xmax = easting_max - xllcorner
2013
2014    if northing_min is None:
2015        ymin = min(y)
2016    else:
2017        ymin = northing_min - yllcorner
2018
2019    if northing_max is None:
2020        ymax = max(y)
2021    else:
2022        ymax = northing_max - yllcorner
2023
2024
2025
2026    if verbose: print 'Creating grid'
2027    ncols = int((xmax-xmin)/cellsize)+1
2028    nrows = int((ymax-ymin)/cellsize)+1
2029
2030
2031    #New absolute reference and coordinates
2032    newxllcorner = xmin+xllcorner
2033    newyllcorner = ymin+yllcorner
2034
2035    x = x+xllcorner-newxllcorner
2036    y = y+yllcorner-newyllcorner
2037   
2038    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2039    assert len(vertex_points.shape) == 2
2040
2041    grid_points = zeros ( (ncols*nrows, 2), Float )
2042
2043
2044    for i in xrange(nrows):
2045        if format.lower() == 'asc':
2046            yg = i*cellsize
2047        else:
2048        #this will flip the order of the y values for ers
2049            yg = (nrows-i)*cellsize
2050
2051        for j in xrange(ncols):
2052            xg = j*cellsize
2053            k = i*ncols + j
2054
2055            grid_points[k,0] = xg
2056            grid_points[k,1] = yg
2057
2058    #Interpolate
2059    from anuga.fit_interpolate.interpolate import Interpolate
2060
2061    # Remove loners from vertex_points, volumes here
2062    vertex_points, volumes = remove_lone_verts(vertex_points, volumes)
2063    #export_mesh_file('monkey.tsh',{'vertices':vertex_points, 'triangles':volumes})
2064    #import sys; sys.exit()
2065    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2066
2067    #Interpolate using quantity values
2068    if verbose: print 'Interpolating'
2069    grid_values = interp.interpolate(q, grid_points).flat
2070
2071
2072    if verbose:
2073        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2074                                                      max(grid_values))
2075
2076    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2077    P = interp.mesh.get_boundary_polygon()
2078    outside_indices = outside_polygon(grid_points, P, closed=True)
2079
2080    for i in outside_indices:
2081        grid_values[i] = NODATA_value
2082
2083
2084
2085
2086    if format.lower() == 'ers':
2087        # setup ERS header information
2088        grid_values = reshape(grid_values,(nrows, ncols))
2089        header = {}
2090        header['datum'] = '"' + datum + '"'
2091        # FIXME The use of hardwired UTM and zone number needs to be made optional
2092        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2093        header['projection'] = '"UTM-' + str(zone) + '"'
2094        header['coordinatetype'] = 'EN'
2095        if header['coordinatetype'] == 'LL':
2096            header['longitude'] = str(newxllcorner)
2097            header['latitude'] = str(newyllcorner)
2098        elif header['coordinatetype'] == 'EN':
2099            header['eastings'] = str(newxllcorner)
2100            header['northings'] = str(newyllcorner)
2101        header['nullcellvalue'] = str(NODATA_value)
2102        header['xdimension'] = str(cellsize)
2103        header['ydimension'] = str(cellsize)
2104        header['value'] = '"' + quantity + '"'
2105        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2106
2107
2108        #Write
2109        if verbose: print 'Writing %s' %demfile
2110        import ermapper_grids
2111        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2112
2113        fid.close()
2114    else:
2115        #Write to Ascii format
2116
2117        #Write prj file
2118        prjfile = basename_out + '.prj'
2119
2120        if verbose: print 'Writing %s' %prjfile
2121        prjid = open(prjfile, 'w')
2122        prjid.write('Projection    %s\n' %'UTM')
2123        prjid.write('Zone          %d\n' %zone)
2124        prjid.write('Datum         %s\n' %datum)
2125        prjid.write('Zunits        NO\n')
2126        prjid.write('Units         METERS\n')
2127        prjid.write('Spheroid      %s\n' %datum)
2128        prjid.write('Xshift        %d\n' %false_easting)
2129        prjid.write('Yshift        %d\n' %false_northing)
2130        prjid.write('Parameters\n')
2131        prjid.close()
2132
2133
2134
2135        if verbose: print 'Writing %s' %demfile
2136
2137        ascid = open(demfile, 'w')
2138
2139        ascid.write('ncols         %d\n' %ncols)
2140        ascid.write('nrows         %d\n' %nrows)
2141        ascid.write('xllcorner     %d\n' %newxllcorner)
2142        ascid.write('yllcorner     %d\n' %newyllcorner)
2143        ascid.write('cellsize      %f\n' %cellsize)
2144        ascid.write('NODATA_value  %d\n' %NODATA_value)
2145
2146
2147        #Get bounding polygon from mesh
2148        #P = interp.mesh.get_boundary_polygon()
2149        #inside_indices = inside_polygon(grid_points, P)
2150
2151        for i in range(nrows):
2152            if verbose and i%((nrows+10)/10)==0:
2153                print 'Doing row %d of %d' %(i, nrows)
2154
2155            base_index = (nrows-i-1)*ncols
2156
2157            slice = grid_values[base_index:base_index+ncols]
2158            s = array2string(slice, max_line_width=sys.maxint)
2159            ascid.write(s[1:-1] + '\n')
2160
2161
2162            #print
2163            #for j in range(ncols):
2164            #    index = base_index+j#
2165            #    print grid_values[index],
2166            #    ascid.write('%f ' %grid_values[index])
2167            #ascid.write('\n')
2168
2169        #Close
2170        ascid.close()
2171        fid.close()
2172        return basename_out
2173
2174
2175#Backwards compatibility
2176def sww2asc(basename_in, basename_out = None,
2177            quantity = None,
2178            timestep = None,
2179            reduction = None,
2180            cellsize = 10,
2181            verbose = False,
2182            origin = None):
2183    print 'sww2asc will soon be obsoleted - please use sww2dem'
2184    sww2dem(basename_in,
2185            basename_out = basename_out,
2186            quantity = quantity,
2187            timestep = timestep,
2188            reduction = reduction,
2189            cellsize = cellsize,
2190            verbose = verbose,
2191            origin = origin,
2192        datum = 'WGS84',
2193        format = 'asc')
2194
2195def sww2ers(basename_in, basename_out = None,
2196            quantity = None,
2197            timestep = None,
2198            reduction = None,
2199            cellsize = 10,
2200            verbose = False,
2201            origin = None,
2202            datum = 'WGS84'):
2203    print 'sww2ers will soon be obsoleted - please use sww2dem'
2204    sww2dem(basename_in,
2205            basename_out = basename_out,
2206            quantity = quantity,
2207            timestep = timestep,
2208            reduction = reduction,
2209            cellsize = cellsize,
2210            verbose = verbose,
2211            origin = origin,
2212            datum = datum,
2213            format = 'ers')
2214################################# END COMPATIBILITY ##############
2215
2216
2217
2218def sww2pts(basename_in, basename_out=None,
2219            data_points=None,
2220            quantity=None,
2221            timestep=None,
2222            reduction=None,
2223            NODATA_value=-9999,
2224            verbose=False,
2225            origin=None):
2226            #datum = 'WGS84')
2227
2228
2229    """Read SWW file and convert to interpolated values at selected points
2230
2231    The parameter quantity' must be the name of an existing quantity or
2232    an expression involving existing quantities. The default is
2233    'elevation'.
2234
2235    if timestep (an index) is given, output quantity at that timestep
2236
2237    if reduction is given use that to reduce quantity over all timesteps.
2238
2239    data_points (Nx2 array) give locations of points where quantity is to be computed.
2240   
2241    """
2242
2243    import sys
2244    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2245    from Numeric import array2string
2246
2247    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2248    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
2249
2250    from anuga.geospatial_data.geospatial_data import Geospatial_data
2251
2252    if quantity is None:
2253        quantity = 'elevation'
2254
2255    if reduction is None:
2256        reduction = max
2257
2258    if basename_out is None:
2259        basename_out = basename_in + '_%s' %quantity
2260
2261    swwfile = basename_in + '.sww'
2262    ptsfile = basename_out + '.pts'
2263
2264    # Read sww file
2265    if verbose: print 'Reading from %s' %swwfile
2266    from Scientific.IO.NetCDF import NetCDFFile
2267    fid = NetCDFFile(swwfile)
2268
2269    # Get extent and reference
2270    x = fid.variables['x'][:]
2271    y = fid.variables['y'][:]
2272    volumes = fid.variables['volumes'][:]
2273
2274    number_of_timesteps = fid.dimensions['number_of_timesteps']
2275    number_of_points = fid.dimensions['number_of_points']
2276    if origin is None:
2277
2278        # Get geo_reference
2279        # sww files don't have to have a geo_ref
2280        try:
2281            geo_reference = Geo_reference(NetCDFObject=fid)
2282        except AttributeError, e:
2283            geo_reference = Geo_reference() #Default georef object
2284
2285        xllcorner = geo_reference.get_xllcorner()
2286        yllcorner = geo_reference.get_yllcorner()
2287        zone = geo_reference.get_zone()
2288    else:
2289        zone = origin[0]
2290        xllcorner = origin[1]
2291        yllcorner = origin[2]
2292
2293
2294
2295    # FIXME: Refactor using code from file_function.statistics
2296    # Something like print swwstats(swwname)
2297    if verbose:
2298        x = fid.variables['x'][:]
2299        y = fid.variables['y'][:]
2300        times = fid.variables['time'][:]
2301        print '------------------------------------------------'
2302        print 'Statistics of SWW file:'
2303        print '  Name: %s' %swwfile
2304        print '  Reference:'
2305        print '    Lower left corner: [%f, %f]'\
2306              %(xllcorner, yllcorner)
2307        print '    Start time: %f' %fid.starttime[0]
2308        print '  Extent:'
2309        print '    x [m] in [%f, %f], len(x) == %d'\
2310              %(min(x.flat), max(x.flat), len(x.flat))
2311        print '    y [m] in [%f, %f], len(y) == %d'\
2312              %(min(y.flat), max(y.flat), len(y.flat))
2313        print '    t [s] in [%f, %f], len(t) == %d'\
2314              %(min(times), max(times), len(times))
2315        print '  Quantities [SI units]:'
2316        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2317            q = fid.variables[name][:].flat
2318            print '    %s in [%f, %f]' %(name, min(q), max(q))
2319
2320
2321
2322    # Get quantity and reduce if applicable
2323    if verbose: print 'Processing quantity %s' %quantity
2324
2325    # Turn NetCDF objects into Numeric arrays
2326    quantity_dict = {}
2327    for name in fid.variables.keys():
2328        quantity_dict[name] = fid.variables[name][:]
2329
2330
2331
2332    # Convert quantity expression to quantities found in sww file   
2333    q = apply_expression_to_dictionary(quantity, quantity_dict)
2334
2335
2336
2337    if len(q.shape) == 2:
2338        # q has a time component and needs to be reduced along
2339        # the temporal dimension
2340        if verbose: print 'Reducing quantity %s' %quantity
2341        q_reduced = zeros( number_of_points, Float )
2342
2343        for k in range(number_of_points):
2344            q_reduced[k] = reduction( q[:,k] )
2345
2346        q = q_reduced
2347
2348    # Post condition: Now q has dimension: number_of_points
2349    assert len(q.shape) == 1
2350    assert q.shape[0] == number_of_points
2351
2352
2353    if verbose:
2354        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2355
2356
2357    # Create grid and update xll/yll corner and x,y
2358    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2359    assert len(vertex_points.shape) == 2
2360
2361    # Interpolate
2362    from anuga.fit_interpolate.interpolate import Interpolate
2363    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2364
2365    # Interpolate using quantity values
2366    if verbose: print 'Interpolating'
2367    interpolated_values = interp.interpolate(q, data_points).flat
2368
2369
2370    if verbose:
2371        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2372                                                      max(interpolated_values))
2373
2374    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2375    P = interp.mesh.get_boundary_polygon()
2376    outside_indices = outside_polygon(data_points, P, closed=True)
2377
2378    for i in outside_indices:
2379        interpolated_values[i] = NODATA_value
2380
2381
2382    # Store results   
2383    G = Geospatial_data(data_points=data_points,
2384                        attributes=interpolated_values)
2385
2386    G.export_points_file(ptsfile, absolute = True)
2387
2388    fid.close()
2389
2390
2391def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2392                                  use_cache = False,
2393                                  verbose = False):
2394    """Read Digitial Elevation model from the following ASCII format (.asc)
2395
2396    Example:
2397
2398    ncols         3121
2399    nrows         1800
2400    xllcorner     722000
2401    yllcorner     5893000
2402    cellsize      25
2403    NODATA_value  -9999
2404    138.3698 137.4194 136.5062 135.5558 ..........
2405
2406    Convert basename_in + '.asc' to NetCDF format (.dem)
2407    mimicking the ASCII format closely.
2408
2409
2410    An accompanying file with same basename_in but extension .prj must exist
2411    and is used to fix the UTM zone, datum, false northings and eastings.
2412
2413    The prj format is assumed to be as
2414
2415    Projection    UTM
2416    Zone          56
2417    Datum         WGS84
2418    Zunits        NO
2419    Units         METERS
2420    Spheroid      WGS84
2421    Xshift        0.0000000000
2422    Yshift        10000000.0000000000
2423    Parameters
2424    """
2425
2426
2427
2428    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2429
2430    if use_cache is True:
2431        from caching import cache
2432        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2433                       dependencies = [basename_in + '.asc',
2434                                       basename_in + '.prj'],
2435                       verbose = verbose)
2436
2437    else:
2438        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2439
2440    return result
2441
2442
2443
2444
2445
2446
2447def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2448                                  verbose = False):
2449    """Read Digitial Elevation model from the following ASCII format (.asc)
2450
2451    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2452    """
2453
2454    import os
2455    from Scientific.IO.NetCDF import NetCDFFile
2456    from Numeric import Float, array
2457
2458    #root, ext = os.path.splitext(basename_in)
2459    root = basename_in
2460
2461    ###########################################
2462    # Read Meta data
2463    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2464    metadatafile = open(root + '.prj')
2465    metalines = metadatafile.readlines()
2466    metadatafile.close()
2467
2468    L = metalines[0].strip().split()
2469    assert L[0].strip().lower() == 'projection'
2470    projection = L[1].strip()                   #TEXT
2471
2472    L = metalines[1].strip().split()
2473    assert L[0].strip().lower() == 'zone'
2474    zone = int(L[1].strip())
2475
2476    L = metalines[2].strip().split()
2477    assert L[0].strip().lower() == 'datum'
2478    datum = L[1].strip()                        #TEXT
2479
2480    L = metalines[3].strip().split()
2481    assert L[0].strip().lower() == 'zunits'     #IGNORE
2482    zunits = L[1].strip()                       #TEXT
2483
2484    L = metalines[4].strip().split()
2485    assert L[0].strip().lower() == 'units'
2486    units = L[1].strip()                        #TEXT
2487
2488    L = metalines[5].strip().split()
2489    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2490    spheroid = L[1].strip()                     #TEXT
2491
2492    L = metalines[6].strip().split()
2493    assert L[0].strip().lower() == 'xshift'
2494    false_easting = float(L[1].strip())
2495
2496    L = metalines[7].strip().split()
2497    assert L[0].strip().lower() == 'yshift'
2498    false_northing = float(L[1].strip())
2499
2500    #print false_easting, false_northing, zone, datum
2501
2502
2503    ###########################################
2504    #Read DEM data
2505
2506    datafile = open(basename_in + '.asc')
2507
2508    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2509    lines = datafile.readlines()
2510    datafile.close()
2511
2512    if verbose: print 'Got', len(lines), ' lines'
2513
2514    ncols = int(lines[0].split()[1].strip())
2515    nrows = int(lines[1].split()[1].strip())
2516
2517    # Do cellsize (line 4) before line 2 and 3
2518    cellsize = float(lines[4].split()[1].strip())   
2519
2520    # Checks suggested by Joaquim Luis
2521    # Our internal representation of xllcorner
2522    # and yllcorner is non-standard.
2523    xref = lines[2].split()
2524    if xref[0].strip() == 'xllcorner':
2525        xllcorner = float(xref[1].strip()) # + 0.5*cellsize # Correct offset
2526    elif xref[0].strip() == 'xllcenter':
2527        xllcorner = float(xref[1].strip())
2528    else:
2529        msg = 'Unknown keyword: %s' %xref[0].strip()
2530        raise Exception, msg
2531
2532    yref = lines[3].split()
2533    if yref[0].strip() == 'yllcorner':
2534        yllcorner = float(yref[1].strip()) # + 0.5*cellsize # Correct offset
2535    elif yref[0].strip() == 'yllcenter':
2536        yllcorner = float(yref[1].strip())
2537    else:
2538        msg = 'Unknown keyword: %s' %yref[0].strip()
2539        raise Exception, msg
2540       
2541
2542    NODATA_value = int(lines[5].split()[1].strip())
2543
2544    assert len(lines) == nrows + 6
2545
2546
2547    ##########################################
2548
2549
2550    if basename_out == None:
2551        netcdfname = root + '.dem'
2552    else:
2553        netcdfname = basename_out + '.dem'
2554
2555    if verbose: print 'Store to NetCDF file %s' %netcdfname
2556    # NetCDF file definition
2557    fid = NetCDFFile(netcdfname, 'w')
2558
2559    #Create new file
2560    fid.institution = 'Geoscience Australia'
2561    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2562                      'of spatial point data'
2563
2564    fid.ncols = ncols
2565    fid.nrows = nrows
2566    fid.xllcorner = xllcorner
2567    fid.yllcorner = yllcorner
2568    fid.cellsize = cellsize
2569    fid.NODATA_value = NODATA_value
2570
2571    fid.zone = zone
2572    fid.false_easting = false_easting
2573    fid.false_northing = false_northing
2574    fid.projection = projection
2575    fid.datum = datum
2576    fid.units = units
2577
2578
2579    # dimension definitions
2580    fid.createDimension('number_of_rows', nrows)
2581    fid.createDimension('number_of_columns', ncols)
2582
2583    # variable definitions
2584    fid.createVariable('elevation', Float, ('number_of_rows',
2585                                            'number_of_columns'))
2586
2587    # Get handles to the variables
2588    elevation = fid.variables['elevation']
2589
2590    #Store data
2591    n = len(lines[6:])
2592    for i, line in enumerate(lines[6:]):
2593        fields = line.split()
2594        if verbose and i%((n+10)/10)==0:
2595            print 'Processing row %d of %d' %(i, nrows)
2596
2597        elevation[i, :] = array([float(x) for x in fields])
2598
2599    fid.close()
2600
2601
2602
2603
2604
2605def ferret2sww(basename_in, basename_out = None,
2606               verbose = False,
2607               minlat = None, maxlat = None,
2608               minlon = None, maxlon = None,
2609               mint = None, maxt = None, mean_stage = 0,
2610               origin = None, zscale = 1,
2611               fail_on_NaN = True,
2612               NaN_filler = 0,
2613               elevation = None,
2614               inverted_bathymetry = True
2615               ): #FIXME: Bathymetry should be obtained
2616                                  #from MOST somehow.
2617                                  #Alternatively from elsewhere
2618                                  #or, as a last resort,
2619                                  #specified here.
2620                                  #The value of -100 will work
2621                                  #for the Wollongong tsunami
2622                                  #scenario but is very hacky
2623    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2624    sww format native to abstract_2d_finite_volumes.
2625
2626    Specify only basename_in and read files of the form
2627    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2628    relative height, x-velocity and y-velocity, respectively.
2629
2630    Also convert latitude and longitude to UTM. All coordinates are
2631    assumed to be given in the GDA94 datum.
2632
2633    min's and max's: If omitted - full extend is used.
2634    To include a value min may equal it, while max must exceed it.
2635    Lat and lon are assuemd to be in decimal degrees
2636
2637    origin is a 3-tuple with geo referenced
2638    UTM coordinates (zone, easting, northing)
2639
2640    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2641    which means that longitude is the fastest
2642    varying dimension (row major order, so to speak)
2643
2644    ferret2sww uses grid points as vertices in a triangular grid
2645    counting vertices from lower left corner upwards, then right
2646    """
2647
2648    import os
2649    from Scientific.IO.NetCDF import NetCDFFile
2650    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2651    from Numeric import allclose, around
2652
2653    precision = Float
2654
2655    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2656
2657    if minlat != None:
2658        assert -90 < minlat < 90 , msg
2659    if maxlat != None:
2660        assert -90 < maxlat < 90 , msg
2661        if minlat != None:
2662            assert maxlat > minlat
2663    if minlon != None:
2664        assert -180 < minlon < 180 , msg
2665    if maxlon != None:
2666        assert -180 < maxlon < 180 , msg
2667        if minlon != None:
2668            assert maxlon > minlon
2669       
2670
2671
2672    #Get NetCDF data
2673    if verbose: print 'Reading files %s_*.nc' %basename_in
2674    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
2675    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2676    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2677    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2678    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2679
2680    if basename_out is None:
2681        swwname = basename_in + '.sww'
2682    else:
2683        swwname = basename_out + '.sww'
2684
2685    # Get dimensions of file_h
2686    for dimension in file_h.dimensions.keys():
2687        if dimension[:3] == 'LON':
2688            dim_h_longitude = dimension
2689        if dimension[:3] == 'LAT':
2690            dim_h_latitude = dimension
2691        if dimension[:4] == 'TIME':
2692            dim_h_time = dimension
2693
2694#    print 'long:', dim_h_longitude
2695#    print 'lats:', dim_h_latitude
2696#    print 'times:', dim_h_time
2697
2698    times = file_h.variables[dim_h_time]
2699    latitudes = file_h.variables[dim_h_latitude]
2700    longitudes = file_h.variables[dim_h_longitude]
2701
2702    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2703                                                  longitudes[:],
2704                                                  minlat, maxlat,
2705                                                  minlon, maxlon)
2706    # get dimensions for file_e
2707    for dimension in file_e.dimensions.keys():
2708        if dimension[:3] == 'LON':
2709            dim_e_longitude = dimension
2710        if dimension[:3] == 'LAT':
2711            dim_e_latitude = dimension
2712
2713    # get dimensions for file_u
2714    for dimension in file_u.dimensions.keys():
2715        if dimension[:3] == 'LON':
2716            dim_u_longitude = dimension
2717        if dimension[:3] == 'LAT':
2718            dim_u_latitude = dimension
2719        if dimension[:4] == 'TIME':
2720            dim_u_time = dimension
2721           
2722    # get dimensions for file_v
2723    for dimension in file_v.dimensions.keys():
2724        if dimension[:3] == 'LON':
2725            dim_v_longitude = dimension
2726        if dimension[:3] == 'LAT':
2727            dim_v_latitude = dimension
2728        if dimension[:4] == 'TIME':
2729            dim_v_time = dimension
2730
2731
2732    # Precision used by most for lat/lon is 4 or 5 decimals
2733    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2734    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2735
2736    # Check that files are compatible
2737    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2738    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2739    assert allclose(latitudes, e_lat)
2740
2741    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2742    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2743    assert allclose(longitudes, e_lon)
2744
2745    if mint is None:
2746        jmin = 0
2747        mint = times[0]       
2748    else:
2749        jmin = searchsorted(times, mint)
2750       
2751    if maxt is None:
2752        jmax = len(times)
2753        maxt = times[-1]
2754    else:
2755        jmax = searchsorted(times, maxt)
2756
2757    #print "latitudes[:]",latitudes[:]
2758    #print "longitudes[:]",longitudes [:]
2759    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2760                                                  longitudes[:],
2761                                                  minlat, maxlat,
2762                                                  minlon, maxlon)
2763
2764
2765    times = times[jmin:jmax]
2766    latitudes = latitudes[kmin:kmax]
2767    longitudes = longitudes[lmin:lmax]
2768
2769    #print "latitudes[:]",latitudes[:]
2770    #print "longitudes[:]",longitudes [:]
2771
2772    if verbose: print 'cropping'
2773    zname = 'ELEVATION'
2774
2775    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2776    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2777    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2778    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2779
2780    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2781    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2782    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2783    #        from Numeric import asarray
2784    #        elevations=elevations.tolist()
2785    #        elevations.reverse()
2786    #        elevations=asarray(elevations)
2787    #    else:
2788    #        from Numeric import asarray
2789    #        elevations=elevations.tolist()
2790    #        elevations.reverse()
2791    #        elevations=asarray(elevations)
2792    #        'print hmmm'
2793
2794
2795
2796    #Get missing values
2797    nan_ha = file_h.variables['HA'].missing_value[0]
2798    nan_ua = file_u.variables['UA'].missing_value[0]
2799    nan_va = file_v.variables['VA'].missing_value[0]
2800    if hasattr(file_e.variables[zname],'missing_value'):
2801        nan_e  = file_e.variables[zname].missing_value[0]
2802    else:
2803        nan_e = None
2804
2805    #Cleanup
2806    from Numeric import sometrue
2807
2808    missing = (amplitudes == nan_ha)
2809    if sometrue (missing):
2810        if fail_on_NaN:
2811            msg = 'NetCDFFile %s contains missing values'\
2812                  %(basename_in+'_ha.nc')
2813            raise DataMissingValuesError, msg
2814        else:
2815            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2816
2817    missing = (uspeed == nan_ua)
2818    if sometrue (missing):
2819        if fail_on_NaN:
2820            msg = 'NetCDFFile %s contains missing values'\
2821                  %(basename_in+'_ua.nc')
2822            raise DataMissingValuesError, msg
2823        else:
2824            uspeed = uspeed*(missing==0) + missing*NaN_filler
2825
2826    missing = (vspeed == nan_va)
2827    if sometrue (missing):
2828        if fail_on_NaN:
2829            msg = 'NetCDFFile %s contains missing values'\
2830                  %(basename_in+'_va.nc')
2831            raise DataMissingValuesError, msg
2832        else:
2833            vspeed = vspeed*(missing==0) + missing*NaN_filler
2834
2835
2836    missing = (elevations == nan_e)
2837    if sometrue (missing):
2838        if fail_on_NaN:
2839            msg = 'NetCDFFile %s contains missing values'\
2840                  %(basename_in+'_e.nc')
2841            raise DataMissingValuesError, msg
2842        else:
2843            elevations = elevations*(missing==0) + missing*NaN_filler
2844
2845    #######
2846
2847
2848
2849    number_of_times = times.shape[0]
2850    number_of_latitudes = latitudes.shape[0]
2851    number_of_longitudes = longitudes.shape[0]
2852
2853    assert amplitudes.shape[0] == number_of_times
2854    assert amplitudes.shape[1] == number_of_latitudes
2855    assert amplitudes.shape[2] == number_of_longitudes
2856
2857    if verbose:
2858        print '------------------------------------------------'
2859        print 'Statistics:'
2860        print '  Extent (lat/lon):'
2861        print '    lat in [%f, %f], len(lat) == %d'\
2862              %(min(latitudes.flat), max(latitudes.flat),
2863                len(latitudes.flat))
2864        print '    lon in [%f, %f], len(lon) == %d'\
2865              %(min(longitudes.flat), max(longitudes.flat),
2866                len(longitudes.flat))
2867        print '    t in [%f, %f], len(t) == %d'\
2868              %(min(times.flat), max(times.flat), len(times.flat))
2869
2870        q = amplitudes.flat
2871        name = 'Amplitudes (ha) [cm]'
2872        print %s in [%f, %f]' %(name, min(q), max(q))
2873
2874        q = uspeed.flat
2875        name = 'Speeds (ua) [cm/s]'
2876        print %s in [%f, %f]' %(name, min(q), max(q))
2877
2878        q = vspeed.flat
2879        name = 'Speeds (va) [cm/s]'
2880        print %s in [%f, %f]' %(name, min(q), max(q))
2881
2882        q = elevations.flat
2883        name = 'Elevations (e) [m]'
2884        print %s in [%f, %f]' %(name, min(q), max(q))
2885
2886
2887    # print number_of_latitudes, number_of_longitudes
2888    number_of_points = number_of_latitudes*number_of_longitudes
2889    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2890
2891
2892    file_h.close()
2893    file_u.close()
2894    file_v.close()
2895    file_e.close()
2896
2897
2898    # NetCDF file definition
2899    outfile = NetCDFFile(swwname, 'w')
2900
2901    description = 'Converted from Ferret files: %s, %s, %s, %s'\
2902                  %(basename_in + '_ha.nc',
2903                    basename_in + '_ua.nc',
2904                    basename_in + '_va.nc',
2905                    basename_in + '_e.nc')
2906   
2907    # Create new file
2908    starttime = times[0]
2909   
2910    sww = Write_sww()
2911    sww.store_header(outfile, times, number_of_volumes,
2912                     number_of_points, description=description,
2913                     verbose=verbose,sww_precision=Float)
2914
2915    # Store
2916    from anuga.coordinate_transforms.redfearn import redfearn
2917    x = zeros(number_of_points, Float)  #Easting
2918    y = zeros(number_of_points, Float)  #Northing
2919
2920
2921    if verbose: print 'Making triangular grid'
2922
2923    # Check zone boundaries
2924    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2925
2926    vertices = {}
2927    i = 0
2928    for k, lat in enumerate(latitudes):       #Y direction
2929        for l, lon in enumerate(longitudes):  #X direction
2930
2931            vertices[l,k] = i
2932
2933            zone, easting, northing = redfearn(lat,lon)
2934
2935            msg = 'Zone boundary crossed at longitude =', lon
2936            #assert zone == refzone, msg
2937            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2938            x[i] = easting
2939            y[i] = northing
2940            i += 1
2941
2942    #Construct 2 triangles per 'rectangular' element
2943    volumes = []
2944    for l in range(number_of_longitudes-1):    #X direction
2945        for k in range(number_of_latitudes-1): #Y direction
2946            v1 = vertices[l,k+1]
2947            v2 = vertices[l,k]
2948            v3 = vertices[l+1,k+1]
2949            v4 = vertices[l+1,k]
2950
2951            volumes.append([v1,v2,v3]) #Upper element
2952            volumes.append([v4,v3,v2]) #Lower element
2953
2954    volumes = array(volumes)
2955
2956    if origin is None:
2957        origin = Geo_reference(refzone,min(x),min(y))
2958    geo_ref = write_NetCDF_georeference(origin, outfile)
2959   
2960    if elevation is not None:
2961        z = elevation
2962    else:
2963        if inverted_bathymetry:
2964            z = -1*elevations
2965        else:
2966            z = elevations
2967    #FIXME: z should be obtained from MOST and passed in here
2968
2969    #FIXME use the Write_sww instance(sww) to write this info
2970    from Numeric import resize
2971    z = resize(z,outfile.variables['z'][:].shape)
2972    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
2973    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
2974    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
2975    outfile.variables['elevation'][:] = z
2976    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
2977
2978
2979
2980    #Time stepping
2981    stage = outfile.variables['stage']
2982    xmomentum = outfile.variables['xmomentum']
2983    ymomentum = outfile.variables['ymomentum']
2984
2985    if verbose: print 'Converting quantities'
2986    n = len(times)
2987    for j in range(n):
2988        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2989        i = 0
2990        for k in range(number_of_latitudes):      #Y direction
2991            for l in range(number_of_longitudes): #X direction
2992                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2993                stage[j,i] = w
2994                h = w - z[i]
2995                xmomentum[j,i] = uspeed[j,k,l]/100*h
2996                ymomentum[j,i] = vspeed[j,k,l]/100*h
2997                i += 1
2998
2999    #outfile.close()
3000
3001    #FIXME: Refactor using code from file_function.statistics
3002    #Something like print swwstats(swwname)
3003    if verbose:
3004        x = outfile.variables['x'][:]
3005        y = outfile.variables['y'][:]
3006        print '------------------------------------------------'
3007        print 'Statistics of output file:'
3008        print '  Name: %s' %swwname
3009        print '  Reference:'
3010        print '    Lower left corner: [%f, %f]'\
3011              %(geo_ref.get_xllcorner(), geo_ref.get_yllcorner())
3012        print ' Start time: %f' %starttime
3013        print '    Min time: %f' %mint
3014        print '    Max time: %f' %maxt
3015        print '  Extent:'
3016        print '    x [m] in [%f, %f], len(x) == %d'\
3017              %(min(x.flat), max(x.flat), len(x.flat))
3018        print '    y [m] in [%f, %f], len(y) == %d'\
3019              %(min(y.flat), max(y.flat), len(y.flat))
3020        print '    t [s] in [%f, %f], len(t) == %d'\
3021              %(min(times), max(times), len(times))
3022        print '  Quantities [SI units]:'
3023        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3024            q = outfile.variables[name][:].flat
3025            print '    %s in [%f, %f]' %(name, min(q), max(q))
3026
3027
3028
3029    outfile.close()
3030
3031
3032
3033
3034
3035def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False):
3036    """Template for converting typical text files with time series to
3037    NetCDF tms file.
3038
3039
3040    The file format is assumed to be either two fields separated by a comma:
3041
3042        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3043
3044    E.g
3045
3046      31/08/04 00:00:00, 1.328223 0 0
3047      31/08/04 00:15:00, 1.292912 0 0
3048
3049    or time (seconds), value0 value1 value2 ...
3050   
3051      0.0, 1.328223 0 0
3052      0.1, 1.292912 0 0
3053
3054    will provide a time dependent function f(t) with three attributes
3055
3056    filename is assumed to be the rootname with extenisons .txt and .sww
3057    """
3058
3059    import time, calendar
3060    from Numeric import array
3061    from anuga.config import time_format
3062    from anuga.utilities.numerical_tools import ensure_numeric
3063
3064
3065
3066    fid = open(filename + '.txt')
3067    line = fid.readline()
3068    fid.close()
3069
3070    fields = line.split(',')
3071    msg = 'File %s must have the format date, value0 value1 value2 ...'
3072    assert len(fields) == 2, msg
3073
3074    if not time_as_seconds:
3075        try:
3076            starttime = calendar.timegm(time.strptime(fields[0], time_format))
3077        except ValueError:
3078            msg = 'First field in file %s must be' %filename
3079            msg += ' date-time with format %s.\n' %time_format
3080            msg += 'I got %s instead.' %fields[0]
3081            raise DataTimeError, msg
3082    else:
3083        try:
3084            starttime = float(fields[0])
3085        except Error:
3086            msg = "Bad time format"
3087            raise DataTimeError, msg
3088
3089
3090    #Split values
3091    values = []
3092    for value in fields[1].split():
3093        values.append(float(value))
3094
3095    q = ensure_numeric(values)
3096
3097    msg = 'ERROR: File must contain at least one independent value'
3098    assert len(q.shape) == 1, msg
3099
3100
3101
3102    #Read times proper
3103    from Numeric import zeros, Float, alltrue
3104    from anuga.config import time_format
3105    import time, calendar
3106
3107    fid = open(filename + '.txt')
3108    lines = fid.readlines()
3109    fid.close()
3110
3111    N = len(lines)
3112    d = len(q)
3113
3114    T = zeros(N, Float)       #Time
3115    Q = zeros((N, d), Float)  #Values
3116
3117    for i, line in enumerate(lines):
3118        fields = line.split(',')
3119        if not time_as_seconds:
3120            realtime = calendar.timegm(time.strptime(fields[0], time_format))
3121        else:
3122             realtime = float(fields[0])
3123        T[i] = realtime - starttime
3124
3125        for j, value in enumerate(fields[1].split()):
3126            Q[i, j] = float(value)
3127
3128    msg = 'File %s must list time as a monotonuosly ' %filename
3129    msg += 'increasing sequence'
3130    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3131
3132    #Create NetCDF file
3133    from Scientific.IO.NetCDF import NetCDFFile
3134
3135    fid = NetCDFFile(filename + '.tms', 'w')
3136
3137
3138    fid.institution = 'Geoscience Australia'
3139    fid.description = 'Time series'
3140
3141
3142    #Reference point
3143    #Start time in seconds since the epoch (midnight 1/1/1970)
3144    #FIXME: Use Georef
3145    fid.starttime = starttime
3146
3147    # dimension definitions
3148    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3149    #fid.createDimension('number_of_vertices', 3)
3150
3151
3152    fid.createDimension('number_of_timesteps', len(T))
3153
3154    fid.createVariable('time', Float, ('number_of_timesteps',))
3155
3156    fid.variables['time'][:] = T
3157
3158    for i in range(Q.shape[1]):
3159        try:
3160            name = quantity_names[i]
3161        except:
3162            name = 'Attribute%d'%i
3163
3164        fid.createVariable(name, Float, ('number_of_timesteps',))
3165        fid.variables[name][:] = Q[:,i]
3166
3167    fid.close()
3168
3169
3170def extent_sww(file_name):
3171    """
3172    Read in an sww file.
3173
3174    Input;
3175    file_name - the sww file
3176
3177    Output;
3178    z - Vector of bed elevation
3179    volumes - Array.  Each row has 3 values, representing
3180    the vertices that define the volume
3181    time - Vector of the times where there is stage information
3182    stage - array with respect to time and vertices (x,y)
3183    """
3184
3185
3186    from Scientific.IO.NetCDF import NetCDFFile
3187
3188    #Check contents
3189    #Get NetCDF
3190    fid = NetCDFFile(file_name, 'r')
3191
3192    # Get the variables
3193    x = fid.variables['x'][:]
3194    y = fid.variables['y'][:]
3195    stage = fid.variables['stage'][:]
3196    #print "stage",stage
3197    #print "stage.shap",stage.shape
3198    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3199    #print "min(stage)",min(stage)
3200
3201    fid.close()
3202
3203    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3204
3205
3206def sww2domain(filename, boundary=None, t=None,
3207               fail_if_NaN=True ,NaN_filler=0,
3208               verbose = False, very_verbose = False):
3209    """
3210    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3211
3212    Boundary is not recommended if domain.smooth is not selected, as it
3213    uses unique coordinates, but not unique boundaries. This means that
3214    the boundary file will not be compatable with the coordinates, and will
3215    give a different final boundary, or crash.
3216    """
3217    NaN=9.969209968386869e+036
3218    #initialise NaN.
3219
3220    from Scientific.IO.NetCDF import NetCDFFile
3221    from shallow_water import Domain
3222    from Numeric import asarray, transpose, resize
3223
3224    if verbose: print 'Reading from ', filename
3225    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3226    time = fid.variables['time']       #Timesteps
3227    if t is None:
3228        t = time[-1]
3229    time_interp = get_time_interp(time,t)
3230
3231    # Get the variables as Numeric arrays
3232    x = fid.variables['x'][:]             #x-coordinates of vertices
3233    y = fid.variables['y'][:]             #y-coordinates of vertices
3234    elevation = fid.variables['elevation']     #Elevation
3235    stage = fid.variables['stage']     #Water level
3236    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3237    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3238
3239    starttime = fid.starttime[0]
3240    volumes = fid.variables['volumes'][:] #Connectivity
3241    coordinates = transpose(asarray([x.tolist(),y.tolist()]))
3242    #FIXME (Ole): Something like this might be better: concatenate( (x, y), axis=1 )
3243    # or concatenate( (x[:,NewAxis],x[:,NewAxis]), axis=1 )
3244
3245    conserved_quantities = []
3246    interpolated_quantities = {}
3247    other_quantities = []
3248
3249    # get geo_reference
3250    #sww files don't have to have a geo_ref
3251    try:
3252        geo_reference = Geo_reference(NetCDFObject=fid)
3253    except: #AttributeError, e:
3254        geo_reference = None
3255
3256    if verbose: print '    getting quantities'
3257    for quantity in fid.variables.keys():
3258        dimensions = fid.variables[quantity].dimensions
3259        if 'number_of_timesteps' in dimensions:
3260            conserved_quantities.append(quantity)
3261            interpolated_quantities[quantity]=\
3262                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3263        else: other_quantities.append(quantity)
3264
3265    other_quantities.remove('x')
3266    other_quantities.remove('y')
3267    other_quantities.remove('z')
3268    other_quantities.remove('volumes')
3269    try:
3270        other_quantities.remove('stage_range')
3271        other_quantities.remove('xmomentum_range')
3272        other_quantities.remove('ymomentum_range')
3273        other_quantities.remove('elevation_range')
3274    except:
3275        pass
3276       
3277
3278    conserved_quantities.remove('time')
3279
3280    if verbose: print '    building domain'
3281    #    From domain.Domain:
3282    #    domain = Domain(coordinates, volumes,\
3283    #                    conserved_quantities = conserved_quantities,\
3284    #                    other_quantities = other_quantities,zone=zone,\
3285    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3286
3287    #   From shallow_water.Domain:
3288    coordinates=coordinates.tolist()
3289    volumes=volumes.tolist()
3290    #FIXME:should this be in mesh?(peter row)
3291    if fid.smoothing == 'Yes': unique = False
3292    else: unique = True
3293    if unique:
3294        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3295
3296
3297    try:
3298        domain = Domain(coordinates, volumes, boundary)
3299    except AssertionError, e:
3300        fid.close()
3301        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3302        raise DataDomainError, msg
3303
3304    if not boundary is None:
3305        domain.boundary = boundary
3306
3307    domain.geo_reference = geo_reference
3308
3309    domain.starttime=float(starttime)+float(t)
3310    domain.time=0.0
3311
3312    for quantity in other_quantities:
3313        try:
3314            NaN = fid.variables[quantity].missing_value
3315        except:
3316            pass #quantity has no missing_value number
3317        X = fid.variables[quantity][:]
3318        if very_verbose:
3319            print '       ',quantity
3320            print '        NaN =',NaN
3321            print '        max(X)'
3322            print '       ',max(X)
3323            print '        max(X)==NaN'
3324            print '       ',max(X)==NaN
3325            print ''
3326        if (max(X)==NaN) or (min(X)==NaN):
3327            if fail_if_NaN:
3328                msg = 'quantity "%s" contains no_data entry'%quantity
3329                raise DataMissingValuesError, msg
3330            else:
3331                data = (X<>NaN)
3332                X = (X*data)+(data==0)*NaN_filler
3333        if unique:
3334            X = resize(X,(len(X)/3,3))
3335        domain.set_quantity(quantity,X)
3336    #
3337    for quantity in conserved_quantities:
3338        try:
3339            NaN = fid.variables[quantity].missing_value
3340        except:
3341            pass #quantity has no missing_value number
3342        X = interpolated_quantities[quantity]
3343        if very_verbose:
3344            print '       ',quantity
3345            print '        NaN =',NaN
3346            print '        max(X)'
3347            print '       ',max(X)
3348            print '        max(X)==NaN'
3349            print '       ',max(X)==NaN
3350            print ''
3351        if (max(X)==NaN) or (min(X)==NaN):
3352            if fail_if_NaN:
3353                msg = 'quantity "%s" contains no_data entry'%quantity
3354                raise DataMissingValuesError, msg
3355            else:
3356                data = (X<>NaN)
3357                X = (X*data)+(data==0)*NaN_filler
3358        if unique:
3359            X = resize(X,(X.shape[0]/3,3))
3360        domain.set_quantity(quantity,X)
3361
3362    fid.close()
3363    return domain
3364
3365
3366def interpolated_quantity(saved_quantity,time_interp):
3367
3368    #given an index and ratio, interpolate quantity with respect to time.
3369    index,ratio = time_interp
3370    Q = saved_quantity
3371    if ratio > 0:
3372        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3373    else:
3374        q = Q[index]
3375    #Return vector of interpolated values
3376    return q
3377
3378
3379def get_time_interp(time,t=None):
3380    #Finds the ratio and index for time interpolation.
3381    #It is borrowed from previous abstract_2d_finite_volumes code.
3382    if t is None:
3383        t=time[-1]
3384        index = -1
3385        ratio = 0.
3386    else:
3387        T = time
3388        tau = t
3389        index=0
3390        msg = 'Time interval derived from file %s [%s:%s]'\
3391            %('FIXMEfilename', T[0], T[-1])
3392        msg += ' does not match model time: %s' %tau
3393        if tau < time[0]: raise DataTimeError, msg
3394        if tau > time[-1]: raise DataTimeError, msg
3395        while tau > time[index]: index += 1
3396        while tau < time[index]: index -= 1
3397        if tau == time[index]:
3398            #Protect against case where tau == time[-1] (last time)
3399            # - also works in general when tau == time[i]
3400            ratio = 0
3401        else:
3402            #t is now between index and index+1
3403            ratio = (tau - time[index])/(time[index+1] - time[index])
3404    return (index,ratio)
3405
3406
3407def weed(coordinates,volumes,boundary = None):
3408    if type(coordinates)==ArrayType:
3409        coordinates = coordinates.tolist()
3410    if type(volumes)==ArrayType:
3411        volumes = volumes.tolist()
3412
3413    unique = False
3414    point_dict = {}
3415    same_point = {}
3416    for i in range(len(coordinates)):
3417        point = tuple(coordinates[i])
3418        if point_dict.has_key(point):
3419            unique = True
3420            same_point[i]=point
3421            #to change all point i references to point j
3422        else:
3423            point_dict[point]=i
3424            same_point[i]=point
3425
3426    coordinates = []
3427    i = 0
3428    for point in point_dict.keys():
3429        point = tuple(point)
3430        coordinates.append(list(point))
3431        point_dict[point]=i
3432        i+=1
3433
3434
3435    for volume in volumes:
3436        for i in range(len(volume)):
3437            index = volume[i]
3438            if index>-1:
3439                volume[i]=point_dict[same_point[index]]
3440
3441    new_boundary = {}
3442    if not boundary is None:
3443        for segment in boundary.keys():
3444            point0 = point_dict[same_point[segment[0]]]
3445            point1 = point_dict[same_point[segment[1]]]
3446            label = boundary[segment]
3447            #FIXME should the bounday attributes be concaterated
3448            #('exterior, pond') or replaced ('pond')(peter row)
3449
3450            if new_boundary.has_key((point0,point1)):
3451                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3452                                              #+','+label
3453
3454            elif new_boundary.has_key((point1,point0)):
3455                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3456                                              #+','+label
3457            else: new_boundary[(point0,point1)]=label
3458
3459        boundary = new_boundary
3460
3461    return coordinates,volumes,boundary
3462
3463
3464def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3465                 verbose=False):
3466    """Read Digitial Elevation model from the following NetCDF format (.dem)
3467
3468    Example:
3469
3470    ncols         3121
3471    nrows         1800
3472    xllcorner     722000
3473    yllcorner     5893000
3474    cellsize      25
3475    NODATA_value  -9999
3476    138.3698 137.4194 136.5062 135.5558 ..........
3477
3478    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3479    """
3480
3481    import os
3482    from Scientific.IO.NetCDF import NetCDFFile
3483    from Numeric import Float, zeros, sum, reshape, equal
3484
3485    root = basename_in
3486    inname = root + '.dem'
3487
3488    #Open existing netcdf file to read
3489    infile = NetCDFFile(inname, 'r')
3490    if verbose: print 'Reading DEM from %s' %inname
3491
3492    #Read metadata
3493    ncols = infile.ncols[0]
3494    nrows = infile.nrows[0]
3495    xllcorner = infile.xllcorner[0]
3496    yllcorner = infile.yllcorner[0]
3497    cellsize = infile.cellsize[0]
3498    NODATA_value = infile.NODATA_value[0]
3499    zone = infile.zone[0]
3500    false_easting = infile.false_easting[0]
3501    false_northing = infile.false_northing[0]
3502    projection = infile.projection
3503    datum = infile.datum
3504    units = infile.units
3505
3506    dem_elevation = infile.variables['elevation']
3507
3508    #Get output file name
3509    if basename_out == None:
3510        outname = root + '_' + repr(cellsize_new) + '.dem'
3511    else:
3512        outname = basename_out + '.dem'
3513
3514    if verbose: print 'Write decimated NetCDF file to %s' %outname
3515
3516    #Determine some dimensions for decimated grid
3517    (nrows_stencil, ncols_stencil) = stencil.shape
3518    x_offset = ncols_stencil / 2
3519    y_offset = nrows_stencil / 2
3520    cellsize_ratio = int(cellsize_new / cellsize)
3521    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3522    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3523
3524    #Open netcdf file for output
3525    outfile = NetCDFFile(outname, 'w')
3526
3527    #Create new file
3528    outfile.institution = 'Geoscience Australia'
3529    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3530                           'of spatial point data'
3531    #Georeferencing
3532    outfile.zone = zone
3533    outfile.projection = projection
3534    outfile.datum = datum
3535    outfile.units = units
3536
3537    outfile.cellsize = cellsize_new
3538    outfile.NODATA_value = NODATA_value
3539    outfile.false_easting = false_easting
3540    outfile.false_northing = false_northing
3541
3542    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3543    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3544    outfile.ncols = ncols_new
3545    outfile.nrows = nrows_new
3546
3547    # dimension definition
3548    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3549
3550    # variable definition
3551    outfile.createVariable('elevation', Float, ('number_of_points',))
3552
3553    # Get handle to the variable
3554    elevation = outfile.variables['elevation']
3555
3556    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3557
3558    #Store data
3559    global_index = 0
3560    for i in range(nrows_new):
3561        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3562        lower_index = global_index
3563        telev =  zeros(ncols_new, Float)
3564        local_index = 0
3565        trow = i * cellsize_ratio
3566
3567        for j in range(ncols_new):
3568            tcol = j * cellsize_ratio
3569            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3570
3571            #if dem contains 1 or more NODATA_values set value in
3572            #decimated dem to NODATA_value, else compute decimated
3573            #value using stencil
3574            if sum(sum(equal(tmp, NODATA_value))) > 0:
3575                telev[local_index] = NODATA_value
3576            else:
3577                telev[local_index] = sum(sum(tmp * stencil))
3578
3579            global_index += 1
3580            local_index += 1
3581
3582        upper_index = global_index
3583
3584        elevation[lower_index:upper_index] = telev
3585
3586    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3587
3588    infile.close()
3589    outfile.close()
3590
3591
3592
3593
3594def tsh2sww(filename, verbose=False): 
3595    """
3596    to check if a tsh/msh file 'looks' good.
3597    """
3598
3599
3600    if verbose == True:print 'Creating domain from', filename
3601    domain = pmesh_to_domain_instance(filename, Domain)
3602    if verbose == True:print "Number of triangles = ", len(domain)
3603
3604    domain.smooth = True
3605    domain.format = 'sww'   #Native netcdf visualisation format
3606    file_path, filename = path.split(filename)
3607    filename, ext = path.splitext(filename)
3608    domain.set_name(filename)   
3609    domain.reduction = mean
3610    if verbose == True:print "file_path",file_path
3611    if file_path == "":file_path = "."
3612    domain.set_datadir(file_path)
3613
3614    if verbose == True:
3615        print "Output written to " + domain.get_datadir() + sep + \
3616              domain.get_name() + "." + domain.format
3617    sww = get_dataobject(domain)
3618    sww.store_connectivity()
3619    sww.store_timestep()
3620
3621
3622def asc_csiro2sww(bath_dir,
3623                  elevation_dir,
3624                  ucur_dir,
3625                  vcur_dir,
3626                  sww_file,
3627                  minlat = None, maxlat = None,
3628                  minlon = None, maxlon = None,
3629                  zscale=1,
3630                  mean_stage = 0,
3631                  fail_on_NaN = True,
3632                  elevation_NaN_filler = 0,
3633                  bath_prefix='ba',
3634                  elevation_prefix='el',
3635                  verbose=False):
3636    """
3637    Produce an sww boundary file, from esri ascii data from CSIRO.
3638
3639    Also convert latitude and longitude to UTM. All coordinates are
3640    assumed to be given in the GDA94 datum.
3641
3642    assume:
3643    All files are in esri ascii format
3644
3645    4 types of information
3646    bathymetry
3647    elevation
3648    u velocity
3649    v velocity
3650
3651    Assumptions
3652    The metadata of all the files is the same
3653    Each type is in a seperate directory
3654    One bath file with extention .000
3655    The time period is less than 24hrs and uniform.
3656    """
3657    from Scientific.IO.NetCDF import NetCDFFile
3658
3659    from anuga.coordinate_transforms.redfearn import redfearn
3660
3661    precision = Float # So if we want to change the precision its done here
3662
3663    # go in to the bath dir and load the only file,
3664    bath_files = os.listdir(bath_dir)
3665
3666    bath_file = bath_files[0]
3667    bath_dir_file =  bath_dir + os.sep + bath_file
3668    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3669
3670    #Use the date.time of the bath file as a basis for
3671    #the start time for other files
3672    base_start = bath_file[-12:]
3673
3674    #go into the elevation dir and load the 000 file
3675    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3676                         + base_start
3677
3678    elevation_files = os.listdir(elevation_dir)
3679    ucur_files = os.listdir(ucur_dir)
3680    vcur_files = os.listdir(vcur_dir)
3681    elevation_files.sort()
3682    # the first elevation file should be the
3683    # file with the same base name as the bath data
3684    assert elevation_files[0] == 'el' + base_start
3685
3686    number_of_latitudes = bath_grid.shape[0]
3687    number_of_longitudes = bath_grid.shape[1]
3688    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3689
3690    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3691                  for x in range(number_of_longitudes)]
3692    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3693                 for y in range(number_of_latitudes)]
3694
3695     # reverse order of lat, so the fist lat represents the first grid row
3696    latitudes.reverse()
3697
3698    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
3699                                                 minlat=minlat, maxlat=maxlat,
3700                                                 minlon=minlon, maxlon=maxlon)
3701
3702
3703    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3704    latitudes = latitudes[kmin:kmax]
3705    longitudes = longitudes[lmin:lmax]
3706    number_of_latitudes = len(latitudes)
3707    number_of_longitudes = len(longitudes)
3708    number_of_times = len(os.listdir(elevation_dir))
3709    number_of_points = number_of_latitudes*number_of_longitudes
3710    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3711
3712    #Work out the times
3713    if len(elevation_files) > 1:
3714        # Assume: The time period is less than 24hrs.
3715        time_period = (int(elevation_files[1][-3:]) - \
3716                      int(elevation_files[0][-3:]))*60*60
3717        times = [x*time_period for x in range(len(elevation_files))]
3718    else:
3719        times = [0.0]
3720
3721
3722    if verbose:
3723        print '------------------------------------------------'
3724        print 'Statistics:'
3725        print '  Extent (lat/lon):'
3726        print '    lat in [%f, %f], len(lat) == %d'\
3727              %(min(latitudes), max(latitudes),
3728                len(latitudes))
3729        print '    lon in [%f, %f], len(lon) == %d'\
3730              %(min(longitudes), max(longitudes),
3731                len(longitudes))
3732        print '    t in [%f, %f], len(t) == %d'\
3733              %(min(times), max(times), len(times))
3734
3735    ######### WRITE THE SWW FILE #############
3736    # NetCDF file definition
3737    outfile = NetCDFFile(sww_file, 'w')
3738
3739    #Create new file
3740    outfile.institution = 'Geoscience Australia'
3741    outfile.description = 'Converted from XXX'
3742
3743
3744    #For sww compatibility
3745    outfile.smoothing = 'Yes'
3746    outfile.order = 1
3747
3748    #Start time in seconds since the epoch (midnight 1/1/1970)
3749    outfile.starttime = starttime = times[0]
3750
3751
3752    # dimension definitions
3753    outfile.createDimension('number_of_volumes', number_of_volumes)
3754
3755    outfile.createDimension('number_of_vertices', 3)
3756    outfile.createDimension('number_of_points', number_of_points)
3757    outfile.createDimension('number_of_timesteps', number_of_times)
3758
3759    # variable definitions
3760    outfile.createVariable('x', precision, ('number_of_points',))
3761    outfile.createVariable('y', precision, ('number_of_points',))
3762    outfile.createVariable('elevation', precision, ('number_of_points',))
3763
3764    #FIXME: Backwards compatibility
3765    outfile.createVariable('z', precision, ('number_of_points',))
3766    #################################
3767
3768    outfile.createVariable('volumes', Int, ('number_of_volumes',
3769                                            'number_of_vertices'))
3770
3771    outfile.createVariable('time', precision,
3772                           ('number_of_timesteps',))
3773
3774    outfile.createVariable('stage', precision,
3775                           ('number_of_timesteps',
3776                            'number_of_points'))
3777
3778    outfile.createVariable('xmomentum', precision,
3779                           ('number_of_timesteps',
3780                            'number_of_points'))
3781
3782    outfile.createVariable('ymomentum', precision,
3783                           ('number_of_timesteps',
3784                            'number_of_points'))
3785
3786    #Store
3787    from anuga.coordinate_transforms.redfearn import redfearn
3788    x = zeros(number_of_points, Float)  #Easting
3789    y = zeros(number_of_points, Float)  #Northing
3790
3791    if verbose: print 'Making triangular grid'
3792    #Get zone of 1st point.
3793    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3794
3795    vertices = {}
3796    i = 0
3797    for k, lat in enumerate(latitudes):
3798        for l, lon in enumerate(longitudes):
3799
3800            vertices[l,k] = i
3801
3802            zone, easting, northing = redfearn(lat,lon)
3803
3804            msg = 'Zone boundary crossed at longitude =', lon
3805            #assert zone == refzone, msg
3806            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3807            x[i] = easting
3808            y[i] = northing
3809            i += 1
3810
3811
3812    #Construct 2 triangles per 'rectangular' element
3813    volumes = []
3814    for l in range(number_of_longitudes-1):    #X direction
3815        for k in range(number_of_latitudes-1): #Y direction
3816            v1 = vertices[l,k+1]
3817            v2 = vertices[l,k]
3818            v3 = vertices[l+1,k+1]
3819            v4 = vertices[l+1,k]
3820
3821            #Note, this is different to the ferrit2sww code
3822            #since the order of the lats is reversed.
3823            volumes.append([v1,v3,v2]) #Upper element
3824            volumes.append([v4,v2,v3]) #Lower element
3825
3826    volumes = array(volumes)
3827
3828    geo_ref = Geo_reference(refzone,min(x),min(y))
3829    geo_ref.write_NetCDF(outfile)
3830
3831    # This will put the geo ref in the middle
3832    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3833
3834
3835    if verbose:
3836        print '------------------------------------------------'
3837        print 'More Statistics:'
3838        print '  Extent (/lon):'
3839        print '    x in [%f, %f], len(lat) == %d'\
3840              %(min(x), max(x),
3841                len(x))
3842        print '    y in [%f, %f], len(lon) == %d'\
3843              %(min(y), max(y),
3844                len(y))
3845        print 'geo_ref: ',geo_ref
3846
3847    z = resize(bath_grid,outfile.variables['z'][:].shape)
3848    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3849    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3850    outfile.variables['z'][:] = z
3851    outfile.variables['elevation'][:] = z  #FIXME HACK
3852    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3853
3854    stage = outfile.variables['stage']
3855    xmomentum = outfile.variables['xmomentum']
3856    ymomentum = outfile.variables['ymomentum']
3857
3858    outfile.variables['time'][:] = times   #Store time relative
3859
3860    if verbose: print 'Converting quantities'
3861    n = number_of_times
3862    for j in range(number_of_times):
3863        # load in files
3864        elevation_meta, elevation_grid = \
3865           _read_asc(elevation_dir + os.sep + elevation_files[j])
3866
3867        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3868        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3869
3870        #cut matrix to desired size
3871        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3872        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3873        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3874       
3875        # handle missing values
3876        missing = (elevation_grid == elevation_meta['NODATA_value'])
3877        if sometrue (missing):
3878            if fail_on_NaN:
3879                msg = 'File %s contains missing values'\
3880                      %(elevation_files[j])
3881                raise DataMissingValuesError, msg
3882            else:
3883                elevation_grid = elevation_grid*(missing==0) + \
3884                                 missing*elevation_NaN_filler
3885
3886
3887        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3888        i = 0
3889        for k in range(number_of_latitudes):      #Y direction
3890            for l in range(number_of_longitudes): #X direction
3891                w = zscale*elevation_grid[k,l] + mean_stage
3892                stage[j,i] = w
3893                h = w - z[i]
3894                xmomentum[j,i] = u_momentum_grid[k,l]*h
3895                ymomentum[j,i] = v_momentum_grid[k,l]*h
3896                i += 1
3897    outfile.close()
3898
3899def _get_min_max_indexes(latitudes_ref,longitudes_ref,
3900                        minlat=None, maxlat=None,
3901                        minlon=None, maxlon=None):
3902    """
3903    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3904    specified with min/max lat/long
3905
3906    Think of the latitudes and longitudes describing a 2d surface.
3907    The area returned is, if possible, just big enough to cover the
3908    inputed max/min area. (This will not be possible if the max/min area
3909    has a section outside of the latitudes/longitudes area.)
3910
3911    asset  longitudes are sorted,
3912    long - from low to high (west to east, eg 148 - 151)
3913    assert latitudes are sorted, ascending or decending
3914    """
3915    latitudes = latitudes_ref[:]
3916    longitudes = longitudes_ref[:]
3917
3918    latitudes = ensure_numeric(latitudes)
3919    longitudes = ensure_numeric(longitudes)
3920
3921    assert allclose(sort(longitudes), longitudes)
3922
3923    #print latitudes[0],longitudes[0]
3924    #print len(latitudes),len(longitudes)
3925    #print latitudes[len(latitudes)-1],longitudes[len(longitudes)-1]
3926   
3927    lat_ascending = True
3928    if not allclose(sort(latitudes), latitudes):
3929        lat_ascending = False
3930        # reverse order of lat, so it's in ascending order         
3931        latitudes = latitudes[::-1]
3932        assert allclose(sort(latitudes), latitudes)
3933    #print "latitudes  in funct", latitudes
3934   
3935    largest_lat_index = len(latitudes)-1
3936    #Cut out a smaller extent.
3937    if minlat == None:
3938        lat_min_index = 0
3939    else:
3940        lat_min_index = searchsorted(latitudes, minlat)-1
3941        if lat_min_index <0:
3942            lat_min_index = 0
3943
3944
3945    if maxlat == None:
3946        lat_max_index = largest_lat_index #len(latitudes)
3947    else:
3948        lat_max_index = searchsorted(latitudes, maxlat)
3949        if lat_max_index > largest_lat_index:
3950            lat_max_index = largest_lat_index
3951
3952    if minlon == None:
3953        lon_min_index = 0
3954    else:
3955        lon_min_index = searchsorted(longitudes, minlon)-1
3956        if lon_min_index <0:
3957            lon_min_index = 0
3958
3959    if maxlon == None:
3960        lon_max_index = len(longitudes)
3961    else:
3962        lon_max_index = searchsorted(longitudes, maxlon)
3963
3964    # Reversing the indexes, if the lat array is decending
3965    if lat_ascending is False:
3966        lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3967                                       largest_lat_index - lat_min_index
3968    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3969    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3970
3971    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3972
3973
3974def _read_asc(filename, verbose=False):
3975    """Read esri file from the following ASCII format (.asc)
3976
3977    Example:
3978
3979    ncols         3121
3980    nrows         1800
3981    xllcorner     722000
3982    yllcorner     5893000
3983    cellsize      25
3984    NODATA_value  -9999
3985    138.3698 137.4194 136.5062 135.5558 ..........
3986
3987    """
3988
3989    datafile = open(filename)
3990
3991    if verbose: print 'Reading DEM from %s' %(filename)
3992    lines = datafile.readlines()
3993    datafile.close()
3994
3995    if verbose: print 'Got', len(lines), ' lines'
3996
3997    ncols = int(lines.pop(0).split()[1].strip())
3998    nrows = int(lines.pop(0).split()[1].strip())
3999    xllcorner = float(lines.pop(0).split()[1].strip())
4000    yllcorner = float(lines.pop(0).split()[1].strip())
4001    cellsize = float(lines.pop(0).split()[1].strip())
4002    NODATA_value = float(lines.pop(0).split()[1].strip())
4003
4004    assert len(lines) == nrows
4005
4006    #Store data
4007    grid = []
4008
4009    n = len(lines)
4010    for i, line in enumerate(lines):
4011        cells = line.split()
4012        assert len(cells) == ncols
4013        grid.append(array([float(x) for x in cells]))
4014    grid = array(grid)
4015
4016    return {'xllcorner':xllcorner,
4017            'yllcorner':yllcorner,
4018            'cellsize':cellsize,
4019            'NODATA_value':NODATA_value}, grid
4020
4021
4022
4023    ####  URS 2 SWW  ###
4024
4025lon_name = 'LON'
4026lat_name = 'LAT'
4027time_name = 'TIME'
4028precision = Float # So if we want to change the precision its done here       
4029class Write_nc:
4030    """
4031    Write an nc file.
4032   
4033    Note, this should be checked to meet cdc netcdf conventions for gridded
4034    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4035   
4036    """
4037    def __init__(self,
4038                 quantity_name,
4039                 file_name,
4040                 time_step_count,
4041                 time_step,
4042                 lon,
4043                 lat):
4044        """
4045        time_step_count is the number of time steps.
4046        time_step is the time step size
4047       
4048        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4049        """
4050        self.quantity_name = quantity_name
4051        quantity_units = {'HA':'CENTIMETERS',
4052                              'UA':'CENTIMETERS/SECOND',
4053                              'VA':'CENTIMETERS/SECOND'}       
4054       
4055        multiplier_dic = {'HA':100.0, # To convert from m to cm
4056                              'UA':100.0,  #  m/s to cm/sec
4057                              'VA':-100.0}  # MUX files have positve x in the
4058        # Southern direction.  This corrects for it, when writing nc files.
4059       
4060        self.quantity_multiplier =  multiplier_dic[self.quantity_name]
4061       
4062        #self.file_name = file_name
4063        self.time_step_count = time_step_count
4064        self.time_step = time_step
4065
4066        # NetCDF file definition
4067        self.outfile = NetCDFFile(file_name, 'w')
4068        outfile = self.outfile       
4069
4070        #Create new file
4071        nc_lon_lat_header(outfile, lon, lat)
4072   
4073        # TIME
4074        outfile.createDimension(time_name, None)
4075        outfile.createVariable(time_name, precision, (time_name,))
4076
4077        #QUANTITY
4078        outfile.createVariable(self.quantity_name, precision,
4079                               (time_name, lat_name, lon_name))
4080        outfile.variables[self.quantity_name].missing_value=-1.e+034
4081        outfile.variables[self.quantity_name].units= \
4082                                 quantity_units[self.quantity_name]
4083        outfile.variables[lon_name][:]= ensure_numeric(lon)
4084        outfile.variables[lat_name][:]= ensure_numeric(lat)
4085
4086        #Assume no one will be wanting to read this, while we are writing
4087        #outfile.close()
4088       
4089    def store_timestep(self, quantity_slice):
4090        """
4091        Write a time slice of quantity info
4092        quantity_slice is the data to be stored at this time step
4093        """
4094       
4095        outfile = self.outfile
4096       
4097        # Get the variables
4098        time = outfile.variables[time_name]
4099        quantity = outfile.variables[self.quantity_name]
4100           
4101        i = len(time)
4102
4103        #Store time
4104        time[i] = i*self.time_step #self.domain.time
4105        quantity[i,:] = quantity_slice* self.quantity_multiplier
4106       
4107    def close(self):
4108        self.outfile.close()
4109
4110def urs2sww(basename_in='o', basename_out=None, verbose=False,
4111            remove_nc_files=True,
4112            minlat=None, maxlat=None,
4113            minlon= None, maxlon=None,
4114            mint=None, maxt=None,
4115            mean_stage=0,
4116            origin = None,
4117            zscale=1,
4118            fail_on_NaN=True,
4119            NaN_filler=0,
4120            elevation=None):
4121    """
4122    Convert URS C binary format for wave propagation to
4123    sww format native to abstract_2d_finite_volumes.
4124
4125    Specify only basename_in and read files of the form
4126    basefilename-z-mux2, basefilename-e-mux2 and
4127    basefilename-n-mux2 containing relative height,
4128    x-velocity and y-velocity, respectively.
4129
4130    Also convert latitude and longitude to UTM. All coordinates are
4131    assumed to be given in the GDA94 datum. The latitude and longitude
4132    information is for  a grid.
4133
4134    min's and max's: If omitted - full extend is used.
4135    To include a value min may equal it, while max must exceed it.
4136    Lat and lon are assumed to be in decimal degrees.
4137    NOTE: minlon is the most east boundary.
4138   
4139    origin is a 3-tuple with geo referenced
4140    UTM coordinates (zone, easting, northing)
4141    It will be the origin of the sww file. This shouldn't be used,
4142    since all of anuga should be able to handle an arbitary origin.
4143
4144
4145    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4146    which means that latitude is the fastest
4147    varying dimension (row major order, so to speak)
4148
4149    In URS C binary the latitudes and longitudes are in assending order.
4150    """
4151    if basename_out == None:
4152        basename_out = basename_in
4153    files_out = urs2nc(basename_in, basename_out)
4154    ferret2sww(basename_out,
4155               minlat=minlat,
4156               maxlat=maxlat,
4157               minlon=minlon,
4158               maxlon=maxlon,
4159               mint=mint,
4160               maxt=maxt,
4161               mean_stage=mean_stage,
4162               origin=origin,
4163               zscale=zscale,
4164               fail_on_NaN=fail_on_NaN,
4165               NaN_filler=NaN_filler,
4166               inverted_bathymetry=True,
4167               verbose=verbose)
4168    #print "files_out",files_out
4169    if remove_nc_files:
4170        for file_out in files_out:
4171            os.remove(file_out)
4172   
4173def urs2nc(basename_in = 'o', basename_out = 'urs'):
4174    """
4175    Convert the 3 urs files to 4 nc files.
4176
4177    The name of the urs file names must be;
4178    [basename_in]-z-mux
4179    [basename_in]-e-mux
4180    [basename_in]-n-mux
4181   
4182    """
4183   
4184    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4185                basename_in + EAST_VELOCITY_LABEL,
4186                basename_in + NORTH_VELOCITY_LABEL]
4187    files_out = [basename_out+'_ha.nc',
4188                 basename_out+'_ua.nc',
4189                 basename_out+'_va.nc']
4190    quantities = ['HA','UA','VA']
4191
4192    #if os.access(files_in[0]+'.mux', os.F_OK) == 0 :
4193    for i, file_name in enumerate(files_in):
4194        if os.access(file_name, os.F_OK) == 0:
4195            if os.access(file_name+'.mux', os.F_OK) == 0 :
4196                msg = 'File %s does not exist or is not accessible' %file_name
4197                raise IOError, msg
4198            else:
4199               files_in[i] += '.mux'
4200               print "file_name", file_name
4201    hashed_elevation = None
4202    for file_in, file_out, quantity in map(None, files_in,
4203                                           files_out,
4204                                           quantities):
4205        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
4206                                         file_out,
4207                                         quantity)
4208        #print "lonlatdep", lonlatdep
4209        if hashed_elevation == None:
4210            elevation_file = basename_out+'_e.nc'
4211            write_elevation_nc(elevation_file,
4212                                lon,
4213                                lat,
4214                                depth)
4215            hashed_elevation = myhash(lonlatdep)
4216        else:
4217            msg = "The elevation information in the mux files is inconsistent"
4218            assert hashed_elevation == myhash(lonlatdep), msg
4219    files_out.append(elevation_file)
4220    return files_out
4221   
4222def _binary_c2nc(file_in, file_out, quantity):
4223    """
4224    Reads in a quantity urs file and writes a quantity nc file.
4225    additionally, returns the depth and lat, long info,
4226    so it can be written to a file.
4227    """
4228    columns = 3 # long, lat , depth
4229    mux_file = open(file_in, 'rb')
4230   
4231    # Number of points/stations
4232    (points_num,)= unpack('i',mux_file.read(4))
4233
4234    # nt, int - Number of time steps
4235    (time_step_count,)= unpack('i',mux_file.read(4))
4236
4237    #dt, float - time step, seconds
4238    (time_step,) = unpack('f', mux_file.read(4))
4239   
4240    msg = "Bad data in the mux file."
4241    if points_num < 0:
4242        mux_file.close()
4243        raise ANUGAError, msg
4244    if time_step_count < 0:
4245        mux_file.close()
4246        raise ANUGAError, msg
4247    if time_step < 0:
4248        mux_file.close()
4249        raise ANUGAError, msg
4250   
4251    lonlatdep = p_array.array('f')
4252    lonlatdep.read(mux_file, columns * points_num)
4253    lonlatdep = array(lonlatdep, typecode=Float)   
4254    lonlatdep = reshape(lonlatdep, (points_num, columns))
4255   
4256    lon, lat, depth = lon_lat2grid(lonlatdep)
4257    lon_sorted = list(lon)
4258    lon_sorted.sort()
4259
4260    if not lon == lon_sorted:
4261        msg = "Longitudes in mux file are not in ascending order"
4262        raise IOError, msg
4263    lat_sorted = list(lat)
4264    lat_sorted.sort()
4265
4266    if not lat == lat_sorted:
4267        msg = "Latitudes in mux file are not in ascending order"
4268   
4269    nc_file = Write_nc(quantity,
4270                       file_out,
4271                       time_step_count,
4272                       time_step,
4273                       lon,
4274                       lat)
4275
4276    for i in range(time_step_count):
4277        #Read in a time slice  from mux file 
4278        hz_p_array = p_array.array('f')
4279        hz_p_array.read(mux_file, points_num)
4280        hz_p = array(hz_p_array, typecode=Float)
4281        hz_p = reshape(hz_p, (len(lon), len(lat)))
4282        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
4283
4284        #write time slice to nc file
4285        nc_file.store_timestep(hz_p)
4286    mux_file.close()
4287    nc_file.close()
4288
4289    return lonlatdep, lon, lat, depth
4290   
4291
4292def write_elevation_nc(file_out, lon, lat, depth_vector):
4293    """
4294    Write an nc elevation file.
4295    """
4296   
4297    # NetCDF file definition
4298    outfile = NetCDFFile(file_out, 'w')
4299
4300    #Create new file
4301    nc_lon_lat_header(outfile, lon, lat)
4302   
4303    # ELEVATION
4304    zname = 'ELEVATION'
4305    outfile.createVariable(zname, precision, (lat_name, lon_name))
4306    outfile.variables[zname].units='CENTIMETERS'
4307    outfile.variables[zname].missing_value=-1.e+034
4308
4309    outfile.variables[lon_name][:]= ensure_numeric(lon)
4310    outfile.variables[lat_name][:]= ensure_numeric(lat)
4311
4312    depth = reshape(depth_vector, ( len(lat), len(lon)))
4313    outfile.variables[zname][:]= depth
4314   
4315    outfile.close()
4316   
4317def nc_lon_lat_header(outfile, lon, lat):
4318    """
4319    outfile is the netcdf file handle.
4320    lon - a list/array of the longitudes
4321    lat - a list/array of the latitudes
4322    """
4323   
4324    outfile.institution = 'Geoscience Australia'
4325    outfile.description = 'Converted from URS binary C'
4326   
4327    # Longitude
4328    outfile.createDimension(lon_name, len(lon))
4329    outfile.createVariable(lon_name, precision, (lon_name,))
4330    outfile.variables[lon_name].point_spacing='uneven'
4331    outfile.variables[lon_name].units='degrees_east'
4332    outfile.variables[lon_name].assignValue(lon)
4333
4334
4335    # Latitude
4336    outfile.createDimension(lat_name, len(lat))
4337    outfile.createVariable(lat_name, precision, (lat_name,))
4338    outfile.variables[lat_name].point_spacing='uneven'
4339    outfile.variables[lat_name].units='degrees_north'
4340    outfile.variables[lat_name].assignValue(lat)
4341
4342
4343   
4344def lon_lat2grid(long_lat_dep):
4345    """
4346    given a list of points that are assumed to be an a grid,
4347    return the long's and lat's of the grid.
4348    long_lat_dep is an array where each row is a position.
4349    The first column is longitudes.
4350    The second column is latitudes.
4351
4352    The latitude is the fastest varying dimension - in mux files
4353    """
4354    LONG = 0
4355    LAT = 1
4356    QUANTITY = 2
4357
4358    long_lat_dep = ensure_numeric(long_lat_dep, Float)
4359   
4360    num_points = long_lat_dep.shape[0]
4361    this_rows_long = long_lat_dep[0,LONG]
4362
4363    # Count the length of unique latitudes
4364    i = 0
4365    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4366        i += 1
4367    # determine the lats and longsfrom the grid
4368    lat = long_lat_dep[:i, LAT]       
4369    long = long_lat_dep[::i, LONG]
4370   
4371    lenlong = len(long)
4372    lenlat = len(lat)
4373    #print 'len lat', lat, len(lat)
4374    #print 'len long', long, len(long)
4375         
4376    msg = 'Input data is not gridded'     
4377    assert num_points % lenlat == 0, msg
4378    assert num_points % lenlong == 0, msg
4379         
4380    # Test that data is gridded       
4381    for i in range(lenlong):
4382        msg = 'Data is not gridded.  It must be for this operation'
4383        first = i*lenlat
4384        last = first + lenlat
4385               
4386        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4387        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
4388   
4389   
4390#    print 'range long', min(long), max(long)
4391#    print 'range lat', min(lat), max(lat)
4392#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4393#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4394   
4395   
4396   
4397    msg = 'Out of range latitudes/longitudes'
4398    for l in lat:assert -90 < l < 90 , msg
4399    for l in long:assert -180 < l < 180 , msg
4400
4401    # Changing quantity from lat being the fastest varying dimension to
4402    # long being the fastest varying dimension
4403    # FIXME - make this faster/do this a better way
4404    # use numeric transpose, after reshaping the quantity vector
4405#    quantity = zeros(len(long_lat_dep), Float)
4406    quantity = zeros(num_points, Float)
4407   
4408#    print 'num',num_points
4409    for lat_i, _ in enumerate(lat):
4410        for long_i, _ in enumerate(long):
4411            q_index = lat_i*lenlong+long_i
4412            lld_index = long_i*lenlat+lat_i
4413#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4414            temp = long_lat_dep[lld_index, QUANTITY]
4415            quantity[q_index] = temp
4416           
4417    return long, lat, quantity
4418
4419####  END URS 2 SWW  ###
4420
4421#### URS UNGRIDDED 2 SWW ###
4422
4423### PRODUCING THE POINTS NEEDED FILE ###
4424
4425# Ones used for FESA 2007 results
4426#LL_LAT = -50.0
4427#LL_LONG = 80.0
4428#GRID_SPACING = 1.0/60.0
4429#LAT_AMOUNT = 4800
4430#LONG_AMOUNT = 3600
4431
4432def URS_points_needed_to_file(file_name, boundary_polygon, zone,
4433                              ll_lat, ll_long,
4434                              grid_spacing, 
4435                              lat_amount, long_amount,
4436                              isSouthernHemisphere=True,
4437                              export_csv=False, use_cache=False,
4438                              verbose=False):
4439    """
4440    Given the info to replicate the URS grid and a polygon output
4441    a file that specifies the cloud of boundary points for URS.
4442
4443    This creates a .urs file.  This is in the format used by URS;
4444    1st line is the number of points,
4445    each line after represents a point,in lats and longs.
4446   
4447    Note: The polygon cannot cross zones or hemispheres.
4448
4449    A work-a-round for different zones or hemispheres is to run this twice,
4450    once for each zone, and then combine the output.
4451   
4452    file_name - name of the urs file produced for David.
4453    boundary_polygon - a list of points that describes a polygon.
4454                      The last point is assumed ot join the first point.
4455                      This is in UTM (lat long would be better though)
4456
4457     This is info about the URS model that needs to be inputted.
4458     
4459    ll_lat - lower left latitude, in decimal degrees
4460    ll-long - lower left longitude, in decimal degrees
4461    grid_spacing - in deciamal degrees
4462    lat_amount - number of latitudes
4463    long_amount- number of longs
4464
4465
4466    Don't add the file extension.  It will be added.
4467    """
4468    geo = URS_points_needed(boundary_polygon, zone, ll_lat, ll_long,
4469                            grid_spacing, 
4470                            lat_amount, long_amount, isSouthernHemisphere,
4471                            use_cache, verbose)
4472    if not file_name[-4:] == ".urs":
4473        file_name += ".urs"
4474    geo.export_points_file(file_name, isSouthHemisphere=isSouthernHemisphere)
4475    if export_csv:
4476        if file_name[-4:] == ".urs":
4477            file_name = file_name[:-4] + ".csv"
4478        geo.export_points_file(file_name)
4479    return geo
4480
4481def URS_points_needed(boundary_polygon, zone, ll_lat,
4482                      ll_long, grid_spacing, 
4483                      lat_amount, long_amount, isSouthHemisphere=True,
4484                      use_cache=False, verbose=False):
4485    args = (boundary_polygon,
4486            zone, ll_lat,
4487            ll_long, grid_spacing, 
4488            lat_amount, long_amount, isSouthHemisphere)
4489    kwargs = {} 
4490    if use_cache is True:
4491        try:
4492            from anuga.caching import cache
4493        except:
4494            msg = 'Caching was requested, but caching module'+\
4495                  'could not be imported'
4496            raise msg
4497
4498
4499        geo = cache(_URS_points_needed,
4500                  args, kwargs,
4501                  verbose=verbose,
4502                  compression=False)
4503    else:
4504        geo = apply(_URS_points_needed, args, kwargs)
4505
4506    return geo
4507
4508def _URS_points_needed(boundary_polygon,
4509                      zone, ll_lat,
4510                      ll_long, grid_spacing, 
4511                      lat_amount, long_amount,
4512                       isSouthHemisphere):
4513    """
4514    boundary_polygon - a list of points that describes a polygon.
4515                      The last point is assumed ot join the first point.
4516                      This is in UTM (lat long would b better though)
4517
4518    ll_lat - lower left latitude, in decimal degrees
4519    ll-long - lower left longitude, in decimal degrees
4520    grid_spacing - in deciamal degrees
4521
4522    """
4523   
4524    from sets import ImmutableSet
4525   
4526    msg = "grid_spacing can not be zero"
4527    assert not grid_spacing == 0, msg
4528    a = boundary_polygon
4529    # List of segments.  Each segment is two points.
4530    segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))]
4531    # convert the segs to Lat's and longs.
4532   
4533    # Don't assume the zone of the segments is the same as the lower left
4534    # corner of the lat long data!!  They can easily be in different zones
4535   
4536    lat_long_set = ImmutableSet()
4537    for seg in segs:
4538        points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, 
4539                      lat_amount, long_amount, zone, isSouthHemisphere)
4540        lat_long_set |= ImmutableSet(points_lat_long)
4541    if lat_long_set == ImmutableSet([]):
4542        msg = """URS region specified and polygon does not overlap."""
4543        raise ValueError, msg
4544
4545    # Warning there is no info in geospatial saying the hemisphere of
4546    # these points.  There should be.
4547    geo = Geospatial_data(data_points=list(lat_long_set),
4548                              points_are_lats_longs=True)
4549    return geo
4550   
4551def points_needed(seg, ll_lat, ll_long, grid_spacing, 
4552                  lat_amount, long_amount, zone,
4553                  isSouthHemisphere):
4554    """
4555    seg is two points, in UTM
4556    return a list of the points, in lats and longs that are needed to
4557    interpolate any point on the segment.
4558    """
4559    from math import sqrt
4560    #print "zone",zone
4561    geo_reference = Geo_reference(zone=zone)
4562    #print "seg", seg
4563    geo = Geospatial_data(seg,geo_reference=geo_reference)
4564    seg_lat_long = geo.get_data_points(as_lat_long=True,
4565                                       isSouthHemisphere=isSouthHemisphere)
4566    #print "seg_lat_long", seg_lat_long
4567    # 1.415 = 2^0.5, rounded up....
4568    sqrt_2_rounded_up = 1.415
4569    buffer = sqrt_2_rounded_up * grid_spacing
4570   
4571    max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer
4572    max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer
4573    min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer
4574    min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer
4575
4576    #print "min_long", min_long
4577    #print "ll_long", ll_long
4578    #print "grid_spacing", grid_spacing
4579    first_row = (min_long - ll_long)/grid_spacing
4580    # To round up
4581    first_row_long = int(round(first_row + 0.5))
4582    #print "first_row", first_row_long
4583
4584    last_row = (max_long - ll_long)/grid_spacing # round down
4585    last_row_long = int(round(last_row))
4586    #print "last_row",last_row _long
4587   
4588    first_row = (min_lat - ll_lat)/grid_spacing
4589    # To round up
4590    first_row_lat = int(round(first_row + 0.5))
4591    #print "first_row", first_row_lat
4592
4593    last_row = (max_lat - ll_lat)/grid_spacing # round down
4594    last_row_lat = int(round(last_row))
4595    #print "last_row",last_row_lat
4596
4597    # to work out the max distance -
4598    # 111120 - horizontal distance between 1 deg latitude.
4599    #max_distance = sqrt_2_rounded_up * 111120 * grid_spacing
4600    max_distance = 157147.4112 * grid_spacing
4601    #print "max_distance", max_distance #2619.12 m for 1 minute
4602    points_lat_long = []
4603    # Create a list of the lat long points to include.
4604    for index_lat in range(first_row_lat, last_row_lat + 1):
4605        for index_long in range(first_row_long, last_row_long + 1):
4606            #print "index_lat", index_lat
4607            #print "index_long", index_long
4608            lat = ll_lat + index_lat*grid_spacing
4609            long = ll_long + index_long*grid_spacing
4610
4611            #filter here to keep good points
4612            if keep_point(lat, long, seg, max_distance):
4613                points_lat_long.append((lat, long)) #must be hashable
4614    #print "points_lat_long", points_lat_long
4615
4616    # Now that we have these points, lets throw ones out that are too far away
4617    return points_lat_long
4618
4619def keep_point(lat, long, seg, max_distance):
4620    """
4621    seg is two points, UTM
4622    """
4623    from math import sqrt
4624    _ , x0, y0 = redfearn(lat, long)
4625    x1 = seg[0][0]
4626    y1 = seg[0][1]
4627    x2 = seg[1][0]
4628    y2 = seg[1][1]
4629    x2_1 = x2-x1
4630    y2_1 = y2-y1
4631    try:
4632        d = abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1))/sqrt( \
4633            (x2_1)*(x2_1)+(y2_1)*(y2_1))
4634    except ZeroDivisionError:
4635        #print "seg", seg
4636        #print "x0", x0
4637        #print "y0", y0
4638        if sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1)) == 0 and \
4639           abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1)) == 0:
4640            return True
4641        else:
4642            return False
4643   
4644    if d <= max_distance:
4645        return True
4646    else:
4647        return False
4648   
4649    #### CONVERTING UNGRIDDED URS DATA TO AN SWW FILE ####
4650   
4651WAVEHEIGHT_MUX_LABEL = '-z-mux'
4652EAST_VELOCITY_LABEL =  '-e-mux'
4653NORTH_VELOCITY_LABEL =  '-n-mux' 
4654def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False,
4655                      mint=None, maxt=None,
4656                      mean_stage=0,
4657                      origin=None,
4658                      hole_points_UTM=None,
4659                      zscale=1):
4660    """   
4661    Convert URS C binary format for wave propagation to
4662    sww format native to abstract_2d_finite_volumes.
4663
4664
4665    Specify only basename_in and read files of the form
4666    basefilename-z-mux, basefilename-e-mux and
4667    basefilename-n-mux containing relative height,
4668    x-velocity and y-velocity, respectively.
4669
4670    Also convert latitude and longitude to UTM. All coordinates are
4671    assumed to be given in the GDA94 datum. The latitude and longitude
4672    information is assumed ungridded grid.
4673
4674    min's and max's: If omitted - full extend is used.
4675    To include a value min ans max may equal it.
4676    Lat and lon are assumed to be in decimal degrees.
4677   
4678    origin is a 3-tuple with geo referenced
4679    UTM coordinates (zone, easting, northing)
4680    It will be the origin of the sww file. This shouldn't be used,
4681    since all of anuga should be able to handle an arbitary origin.
4682    The mux point info is NOT relative to this origin.
4683
4684
4685    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4686    which means that latitude is the fastest
4687    varying dimension (row major order, so to speak)
4688
4689    In URS C binary the latitudes and longitudes are in assending order.
4690
4691    Note, interpolations of the resulting sww file will be different
4692    from results of urs2sww.  This is due to the interpolation
4693    function used, and the different grid structure between urs2sww
4694    and this function.
4695   
4696    Interpolating data that has an underlying gridded source can
4697    easily end up with different values, depending on the underlying
4698    mesh.
4699
4700    consider these 4 points
4701    50  -50
4702
4703    0     0
4704
4705    The grid can be
4706     -
4707    |\|    A
4708     -
4709     or;
4710      -
4711     |/|   B
4712      -
4713      If a point is just below the center of the midpoint, it will have a
4714      +ve value in grid A and a -ve value in grid B.
4715    """ 
4716    from anuga.mesh_engine.mesh_engine import NoTrianglesError
4717    from anuga.pmesh.mesh import Mesh
4718
4719    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
4720                basename_in + EAST_VELOCITY_LABEL,
4721                basename_in + NORTH_VELOCITY_LABEL]
4722    quantities = ['HA','UA','VA']
4723
4724    # instanciate urs_points of the three mux files.
4725    mux = {}
4726    for quantity, file in map(None, quantities, files_in):
4727        mux[quantity] = Urs_points(file)
4728       
4729    # Could check that the depth is the same. (hashing)
4730
4731    # handle to a mux file to do depth stuff
4732    a_mux = mux[quantities[0]]
4733   
4734    # Convert to utm
4735    lat = a_mux.lonlatdep[:,1]
4736    long = a_mux.lonlatdep[:,0]
4737    points_utm, zone = convert_from_latlon_to_utm( \
4738        latitudes=lat, longitudes=long)
4739    #print "points_utm", points_utm
4740    #print "zone", zone
4741
4742    elevation = a_mux.lonlatdep[:,2] * -1 #
4743   
4744    # grid ( create a mesh from the selected points)
4745    # This mesh has a problem.  Triangles are streched over ungridded areas.
4746    #  If these areas could be described as holes in pmesh, that would be great
4747
4748    # I can't just get the user to selection a point in the middle.
4749    # A boundary is needed around these points.
4750    # But if the zone of points is obvious enough auto-segment should do
4751    # a good boundary.
4752    mesh = Mesh()
4753    mesh.add_vertices(points_utm)
4754    mesh.auto_segment(smooth_indents=True, expand_pinch=True)
4755    # To try and avoid alpha shape 'hugging' too much
4756    mesh.auto_segment( mesh.shape.get_alpha()*1.1 )
4757    if hole_points_UTM is not None:
4758        point = ensure_absolute(hole_points_UTM)
4759        mesh.add_hole(point[0], point[1])
4760    try:
4761        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4762    except NoTrianglesError:
4763        # This is a bit of a hack, going in and changing the
4764        # data structure.
4765        mesh.holes = []
4766        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4767    mesh_dic = mesh.Mesh2MeshList()
4768
4769    #mesh.export_mesh_file(basename_in + '_168.tsh')
4770    #import sys; sys.exit()
4771    # These are the times of the mux file
4772    mux_times = []
4773    for i in range(a_mux.time_step_count):
4774        mux_times.append(a_mux.time_step * i) 
4775    mux_times_start_i, mux_times_fin_i = mux2sww_time(mux_times, mint, maxt)
4776    times = mux_times[mux_times_start_i:mux_times_fin_i]
4777   
4778    if mux_times_start_i == mux_times_fin_i:
4779        # Close the mux files
4780        for quantity, file in map(None, quantities, files_in):
4781            mux[quantity].close()
4782        msg="Due to mint and maxt there's no time info in the boundary SWW."
4783        raise Exception, msg
4784       
4785    # If this raise is removed there is currently no downstream errors
4786           
4787    points_utm=ensure_numeric(points_utm)
4788    assert ensure_numeric(mesh_dic['generatedpointlist']) == \
4789           ensure_numeric(points_utm)
4790   
4791    volumes = mesh_dic['generatedtrianglelist']
4792   
4793    # write sww intro and grid stuff.   
4794    if basename_out is None:
4795        swwname = basename_in + '.sww'
4796    else:
4797        swwname = basename_out + '.sww'
4798
4799    if verbose: print 'Output to ', swwname
4800    outfile = NetCDFFile(swwname, 'w')
4801    # For a different way of doing this, check out tsh2sww
4802    # work out sww_times and the index range this covers
4803    sww = Write_sww()
4804    sww.store_header(outfile, times, len(volumes), len(points_utm),
4805                     verbose=verbose,sww_precision=Float)
4806    outfile.mean_stage = mean_stage
4807    outfile.zscale = zscale
4808
4809    sww.store_triangulation(outfile, points_utm, volumes,
4810                            elevation, zone,  new_origin=origin,
4811                            verbose=verbose)
4812   
4813    if verbose: print 'Converting quantities'
4814    j = 0
4815    # Read in a time slice from each mux file and write it to the sww file
4816    for ha, ua, va in map(None, mux['HA'], mux['UA'], mux['VA']):
4817        if j >= mux_times_start_i and j < mux_times_fin_i:
4818            stage = zscale*ha + mean_stage
4819            h = stage - elevation
4820            xmomentum = ua*h
4821            ymomentum = -1*va*h # -1 since in mux files south is positive.
4822            sww.store_quantities(outfile, 
4823                                 slice_index=j - mux_times_start_i,
4824                                 verbose=verbose,
4825                                 stage=stage,
4826                                 xmomentum=xmomentum,
4827                                 ymomentum=ymomentum,
4828                                 sww_precision=Float)
4829        j += 1
4830    if verbose: sww.verbose_quantities(outfile)
4831    outfile.close()
4832    #
4833    # Do some conversions while writing the sww file
4834
4835    ##################################
4836    # READ MUX2 FILES line of points #
4837    ##################################
4838
4839WAVEHEIGHT_MUX2_LABEL = '-z-mux2'
4840EAST_VELOCITY_MUX2_LABEL =  '-e-mux2'
4841NORTH_VELOCITY_MUX2_LABEL =  '-n-mux2'   
4842
4843def read_mux2_py(filenames,
4844                 weights,
4845                 permutation=None,
4846                 verbose=False):
4847    """Access the mux files specified in the filenames list. Combine the
4848       data found therin as a weighted linear sum as specifed by the weights.
4849       If permutation is None or empty extract timeseries data for all gauges within the
4850       files.
4851         
4852       Input:
4853           filenames:   List of filenames specifiying the file containing the
4854                        timeseries data (mux2 format) for each source
4855           weights:     Weights associated with each source
4856           permutation: Specifies the gauge numbers that for which data is to be
4857                        extracted
4858    """ 
4859
4860    from Numeric import ones,Float,compress,zeros,arange
4861    from urs_ext import read_mux2
4862
4863    numSrc=len(filenames)
4864
4865    file_params=-1*ones(3,Float)#[nsta,dt,nt]
4866   
4867    # Convert verbose to int C flag
4868    if verbose:
4869        verbose=1
4870    else:
4871        verbose=0
4872       
4873       
4874    #msg = 'I got the permutation vector:' + str(permutation)
4875    #assert permutation is not None, msg
4876    if permutation is None:
4877        permutation = ensure_numeric([], Float)   
4878
4879    # Call underlying C implementation urs2sts_ext.c   
4880    data=read_mux2(numSrc,filenames,weights,file_params,permutation,verbose)
4881
4882    msg='File parameter values were not read in correctly from c file'
4883    assert len(compress(file_params>0,file_params))!=0,msg
4884   
4885    msg='The number of stations specifed in the c array and in the file are inconsistent'
4886    assert file_params[0] >= len(permutation)
4887   
4888    msg='The number of stations returned is inconsistent with the requested number'   
4889    assert len(permutation) == 0 or len(permutation) == data.shape[0], msg
4890   
4891    nsta=int(file_params[0])
4892    msg='Must have at least one station'
4893    assert nsta>0,msg
4894   
4895    dt=file_params[1]
4896    msg='Must have a postive timestep'
4897    assert dt>0,msg
4898   
4899    nt=int(file_params[2])
4900    msg='Must have at least one gauge value'
4901    assert nt>0,msg
4902   
4903    OFFSET=5 #number of site parameters p passed back with data
4904    #p=[geolat,geolon,depth,start_tstep,finish_tstep]
4905
4906    times=dt*arange(1,(data.shape[1]-OFFSET)+1)
4907    latitudes=zeros(data.shape[0],Float)
4908    longitudes=zeros(data.shape[0],Float)
4909    elevation=zeros(data.shape[0],Float)
4910    quantity=zeros((data.shape[0],data.shape[1]-OFFSET),Float)
4911    starttime=1e16
4912    for i in range(0,data.shape[0]):
4913        latitudes[i]=data[i][data.shape[1]-OFFSET]
4914        longitudes[i]=data[i][data.shape[1]-OFFSET+1]
4915        elevation[i]=-data[i][data.shape[1]-OFFSET+2]
4916        quantity[i]=data[i][:-OFFSET]
4917        starttime=min(dt*data[i][data.shape[1]-OFFSET+3],starttime)
4918       
4919    return times, latitudes, longitudes, elevation, quantity, starttime
4920
4921def mux2sww_time(mux_times, mint, maxt):
4922    """
4923    """
4924
4925    if mint == None:
4926        mux_times_start_i = 0
4927    else:
4928        mux_times_start_i = searchsorted(mux_times, mint)
4929       
4930    if maxt == None:
4931        mux_times_fin_i = len(mux_times)
4932    else:
4933        maxt += 0.5 # so if you specify a time where there is
4934                    # data that time will be included
4935        mux_times_fin_i = searchsorted(mux_times, maxt)
4936
4937    return mux_times_start_i, mux_times_fin_i
4938
4939
4940def urs2sts(basename_in, basename_out=None, 
4941            weights=None,
4942            verbose=False, 
4943            origin=None,
4944            mean_stage=0.0, 
4945            zscale=1.0,
4946            ordering_filename=None):
4947    """Convert URS mux2 format for wave propagation to sts format
4948
4949    Also convert latitude and longitude to UTM. All coordinates are
4950    assumed to be given in the GDA94 datum
4951
4952    origin is a 3-tuple with geo referenced
4953    UTM coordinates (zone, easting, northing)
4954   
4955    inputs:
4956       
4957    basename_in: list of source file prefixes
4958   
4959        These are combined with the extensions:
4960        WAVEHEIGHT_MUX2_LABEL = '-z-mux2' for stage
4961        EAST_VELOCITY_MUX2_LABEL =  '-e-mux2' xmomentum
4962        NORTH_VELOCITY_MUX2_LABEL =  '-n-mux2' and ymomentum
4963   
4964        to create a 2D list of mux2 file. The rows are associated with each
4965        quantity and must have the above extensions
4966        the columns are the list of file prefixes.
4967   
4968    ordering: a .txt file name specifying which mux2 gauge points are
4969              to be stored. This is indicated by the index of the gauge
4970              in the ordering file.
4971   
4972              ordering file format:
4973              1st line:    'index,longitude,latitude\n'
4974              other lines: index,longitude,latitude
4975             
4976              If ordering is None or ordering file is empty then
4977               all points are taken in the order they
4978              appear in the mux2 file.
4979   
4980   
4981    output:
4982      basename_out: name of sts file in which mux2 data is stored.
4983   
4984   
4985    """
4986    import os
4987    from Scientific.IO.NetCDF import NetCDFFile
4988    from Numeric import Float, Int, Int32, searchsorted, zeros, array
4989    from Numeric import allclose, around,ones,Float
4990    from types import ListType,StringType
4991    from operator import __and__
4992   
4993    if not isinstance(basename_in, ListType):
4994        if verbose: print 'Reading single source'
4995        basename_in=[basename_in]
4996
4997    # Check that basename is a list of strings
4998    if not reduce(__and__, map(lambda z:isinstance(z,StringType), basename_in)):
4999        msg= 'basename_in must be a string or list of strings'
5000        raise Exception, msg
5001
5002    # Find the number of sources to be used
5003    numSrc=len(basename_in)
5004
5005    # A weight must be specified for each source
5006    if weights is None:
5007        # Default is equal weighting
5008        weights=ones(numSrc,Float)/numSrc
5009    else:
5010        weights = ensure_numeric(weights)
5011        msg = 'When combining multiple sources must specify a weight for '+\
5012              'mux2 source file'
5013        assert len(weights)== numSrc, msg
5014
5015    # Check output filename   
5016    if basename_out is None:
5017        msg = 'STS filename must be specified as basename_out'
5018        msg += ' in function urs2sts'
5019        raise Exception, msg
5020   
5021    if basename_out.endswith('.sts'):
5022        stsname = basename_out
5023    else:   
5024        stsname = basename_out + '.sts'       
5025
5026    # Create input filenames from basenames and check their existence
5027    files_in=[[],[],[]]
5028    for files in basename_in:
5029        files_in[0].append(files + WAVEHEIGHT_MUX2_LABEL),
5030        files_in[1].append(files + EAST_VELOCITY_MUX2_LABEL)
5031        files_in[2].append(files + NORTH_VELOCITY_MUX2_LABEL)
5032   
5033    quantities = ['HA','UA','VA'] # Quantity names used in the MUX2 format
5034    for i in range(len(quantities)): 
5035        for file_in in files_in[i]:
5036            if (os.access(file_in, os.F_OK) == 0):
5037                msg = 'File %s does not exist or is not accessible' %file_in
5038                raise IOError, msg
5039
5040    # Establish permutation array
5041    if ordering_filename is not None:
5042   
5043        # Read ordering file
5044        try:
5045            fid=open(ordering_filename, 'r')
5046            file_header=fid.readline().split(',')
5047            ordering_lines=fid.readlines()
5048            fid.close()
5049        except:
5050            msg = 'Cannot open %s'%ordering_filename
5051            raise Exception, msg
5052
5053        reference_header = 'index, longitude, latitude\n'
5054        reference_header_split = reference_header.split(',')
5055        for i in range(3):
5056            if not file_header[i] == reference_header_split[i]:
5057                msg = 'File must contain header: '+reference_header+'\n'
5058                raise Exception, msg
5059
5060        if len(ordering_lines)<2:
5061            msg = 'File must contain at least two points'
5062            raise Exception, msg
5063
5064        permutation = ensure_numeric([int(line.split(',')[0]) for line in ordering_lines])
5065    else:
5066        permutation = None
5067
5068
5069    #print 'permutation', permutation
5070    # Read MUX2 files
5071    if (verbose): print 'reading mux2 file'
5072    mux={}
5073    for i, quantity in enumerate(quantities):
5074   
5075        # For each quantity read the associated list of source mux2 file with
5076        # extention associated with that quantity
5077        times,\
5078        latitudes,\
5079        longitudes,\
5080        elevation,\
5081        mux[quantity],\
5082        starttime = read_mux2_py(files_in[i], weights, permutation, verbose=verbose)
5083   
5084        # Check that all quantities have consistent time and space information     
5085        if quantity!=quantities[0]:
5086            msg='%s, %s and %s have inconsistent gauge data'\
5087                %(files_in[0], files_in[1], files_in[2])
5088            assert allclose(times, times_old), msg
5089            assert allclose(latitudes, latitudes_old), msg
5090            assert allclose(longitudes, longitudes_old), msg
5091            assert allclose(elevation, elevation_old), msg
5092            assert allclose(starttime, starttime_old), msg
5093        times_old=times
5094        latitudes_old=latitudes
5095        longitudes_old=longitudes
5096        elevation_old=elevation
5097        starttime_old=starttime
5098
5099
5100       
5101       
5102       
5103        # Self check - can be removed to improve speed
5104
5105        #ref_longitudes = [float(line.split(',')[1]) for line in ordering_lines]               
5106        #ref_latitudes = [float(line.split(',')[2]) for line in ordering_lines]       
5107        #
5108        #msg = 'Longitudes specified in ordering file do not match those found in mux files'
5109        #msg += 'I got %s instead of %s (only beginning shown)'\
5110        #    %(str(longitudes[:10]) + '...',
5111        #      str(ref_longitudes[:10]) + '...')       
5112        #assert allclose(longitudes, ref_longitudes), msg       
5113        #
5114        #msg = 'Latitudes specified in ordering file do not match those found in mux files. '
5115        #msg += 'I got %s instead of %s (only beginning shown)'\
5116        #    %(str(latitudes[:10]) + '...',
5117        #      str(ref_latitudes[:10]) + '...')               
5118        #assert allclose(latitudes, ref_latitudes), msg
5119       
5120               
5121       
5122    # Store timeseries in NetCDF STS file   
5123    msg='File is empty and or clipped region not in file region'
5124    assert len(latitudes>0),msg
5125
5126    number_of_points = latitudes.shape[0]      # Number of stations retrieved
5127    number_of_times = times.shape[0]           # Number of timesteps
5128    number_of_latitudes = latitudes.shape[0]   # Number latitudes
5129    number_of_longitudes = longitudes.shape[0] # Number longitudes
5130
5131    # NetCDF file definition
5132    outfile = NetCDFFile(stsname, 'w')
5133
5134    description = 'Converted from URS mux2 files: %s'\
5135                  %(basename_in)
5136   
5137    # Create new file
5138    sts = Write_sts()
5139    sts.store_header(outfile, 
5140                     times+starttime,
5141                     number_of_points, 
5142                     description=description,
5143                     verbose=verbose,
5144                     sts_precision=Float)
5145   
5146    # Store
5147    from anuga.coordinate_transforms.redfearn import redfearn
5148    x = zeros(number_of_points, Float)  # Easting
5149    y = zeros(number_of_points, Float)  # Northing
5150
5151    # Check zone boundaries
5152    refzone, _, _ = redfearn(latitudes[0],longitudes[0]) 
5153
5154    for i in range(number_of_points):
5155        zone, easting, northing = redfearn(latitudes[i],longitudes[i])
5156        x[i] = easting
5157        y[i] = northing
5158        msg='all sts gauges need to be in the same zone'
5159        assert zone==refzone,msg
5160
5161    if origin is None:
5162        origin = Geo_reference(refzone,min(x),min(y))
5163    geo_ref = write_NetCDF_georeference(origin, outfile)
5164
5165
5166   
5167    #print geo_ref.get_xllcorner()
5168    #print geo_ref.get_yllcorner()
5169
5170    elevation = resize(elevation,outfile.variables['elevation'][:].shape)
5171    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
5172    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
5173    outfile.variables['elevation'][:] = elevation
5174
5175    stage = outfile.variables['stage']
5176    xmomentum = outfile.variables['xmomentum']
5177    ymomentum = outfile.variables['ymomentum']
5178
5179    if verbose: print 'Converting quantities'
5180    for j in range(len(times)):
5181        for i in range(number_of_points):
5182            w = zscale*mux['HA'][i,j] + mean_stage
5183            h=w-elevation[i]
5184            stage[j,i] = w
5185
5186            xmomentum[j,i] = mux['UA'][i,j]*h
5187            ymomentum[j,i] = mux['VA'][i,j]*h
5188
5189    outfile.close()
5190
5191def create_sts_boundary(stsname):
5192    """
5193        Create boundary segments from .sts file. Points can be stored in
5194    arbitrary order within the .sts file. The order in which the .sts points
5195    make up the boundary are given in order.txt file
5196   
5197    FIXME:
5198    Point coordinates are stored in relative eastings and northings.
5199    But boundary is produced in absolute coordinates
5200   
5201    """
5202    try:
5203        fid = NetCDFFile(stsname+'.sts', 'r')
5204    except:
5205        msg = 'Cannot open %s'%filename+'.sts'
5206
5207
5208    xllcorner = fid.xllcorner[0]
5209    yllcorner = fid.yllcorner[0]
5210    #Points stored in sts file are normalised to [xllcorner,yllcorner] but
5211    #we cannot assume that boundary polygon will be. At least the
5212    #additional points specified by the user after this function is called
5213    x = fid.variables['x'][:]+xllcorner
5214    y = fid.variables['y'][:]+yllcorner
5215
5216    x = reshape(x, (len(x),1))
5217    y = reshape(y, (len(y),1))
5218    sts_points = concatenate((x,y), axis=1)
5219
5220    return sts_points.tolist()
5221
5222class Write_sww:
5223    from anuga.shallow_water.shallow_water_domain import Domain
5224
5225    # FIXME (Ole): Hardwiring the conserved quantities like
5226    # this could be a problem. I would prefer taking them from
5227    # the instantiation of Domain.
5228    #
5229    # (DSG) There is not always a Domain instance when Write_sww is used.
5230    # Check to see if this is the same level of hardwiring as is in
5231    # shallow water doamain.
5232   
5233    sww_quantities = Domain.conserved_quantities
5234
5235
5236    RANGE = '_range'
5237    EXTREMA = ':extrema'
5238
5239    def __init__(self):
5240        pass
5241   
5242    def store_header(self,
5243                     outfile,
5244                     times,
5245                     number_of_volumes,
5246                     number_of_points,
5247                     description='Converted from XXX',
5248                     smoothing=True,
5249                     order=1,
5250                     sww_precision=Float32,
5251                     verbose=False):
5252        """
5253        outfile - the name of the file that will be written
5254        times - A list of the time slice times OR a start time
5255        Note, if a list is given the info will be made relative.
5256        number_of_volumes - the number of triangles
5257        """
5258   
5259        outfile.institution = 'Geoscience Australia'
5260        outfile.description = description
5261
5262        # For sww compatibility
5263        if smoothing is True:
5264            # Smoothing to be depreciated
5265            outfile.smoothing = 'Yes'
5266            outfile.vertices_are_stored_uniquely = 'False'
5267        else:
5268            # Smoothing to be depreciated
5269            outfile.smoothing = 'No'
5270            outfile.vertices_are_stored_uniquely = 'True'
5271        outfile.order = order
5272
5273        try:
5274            revision_number = get_revision_number()
5275        except:
5276            revision_number = None
5277        # Allow None to be stored as a string               
5278        outfile.revision_number = str(revision_number) 
5279
5280
5281       
5282        # times - A list or array of the time slice times OR a start time
5283        # times = ensure_numeric(times)
5284        # Start time in seconds since the epoch (midnight 1/1/1970)
5285
5286        # This is being used to seperate one number from a list.
5287        # what it is actually doing is sorting lists from numeric arrays.
5288        if type(times) is list or type(times) is ArrayType: 
5289            number_of_times = len(times)
5290            times = ensure_numeric(times) 
5291            if number_of_times == 0:
5292                starttime = 0
5293            else:
5294                starttime = times[0]
5295                times = times - starttime  #Store relative times
5296        else:
5297            number_of_times = 0
5298            starttime = times
5299            #times = ensure_numeric([])
5300        outfile.starttime = starttime
5301        # dimension definitions
5302        outfile.createDimension('number_of_volumes', number_of_volumes)
5303        outfile.createDimension('number_of_vertices', 3)
5304        outfile.createDimension('numbers_in_range', 2)
5305   
5306        if smoothing is True:
5307            outfile.createDimension('number_of_points', number_of_points)
5308       
5309            # FIXME(Ole): This will cause sww files for paralle domains to
5310            # have ghost nodes stored (but not used by triangles).
5311            # To clean this up, we have to change get_vertex_values and
5312            # friends in quantity.py (but I can't be bothered right now)
5313        else:
5314            outfile.createDimension('number_of_points', 3*number_of_volumes)
5315        outfile.createDimension('number_of_timesteps', number_of_times)
5316
5317        # variable definitions
5318        outfile.createVariable('x', sww_precision, ('number_of_points',))
5319        outfile.createVariable('y', sww_precision, ('number_of_points',))
5320        outfile.createVariable('elevation', sww_precision, ('number_of_points',))
5321        q = 'elevation'
5322        outfile.createVariable(q+Write_sww.RANGE, sww_precision,
5323                               ('numbers_in_range',))
5324
5325
5326        # Initialise ranges with small and large sentinels.
5327        # If this was in pure Python we could have used None sensibly
5328        outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
5329        outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
5330
5331        # FIXME: Backwards compatibility
5332        outfile.createVariable('z', sww_precision, ('number_of_points',))
5333        #################################
5334
5335        outfile.createVariable('volumes', Int, ('number_of_volumes',
5336                                                'number_of_vertices'))
5337        # Doing sww_precision instead of Float gives cast errors.
5338        outfile.createVariable('time', Float,
5339                               ('number_of_timesteps',))
5340       
5341        for q in Write_sww.sww_quantities:
5342            outfile.createVariable(q, sww_precision,
5343                                   ('number_of_timesteps',
5344                                    'number_of_points')) 
5345            outfile.createVariable(q+Write_sww.RANGE, sww_precision,
5346                                   ('numbers_in_range',))
5347
5348            # Initialise ranges with small and large sentinels.
5349            # If this was in pure Python we could have used None sensibly
5350            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
5351            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
5352           
5353        if type(times) is list or type(times) is ArrayType: 
5354            outfile.variables['time'][:] = times    #Store time relative
5355           
5356        if verbose:
5357            print '------------------------------------------------'
5358            print 'Statistics:'
5359            print '    t in [%f, %f], len(t) == %d'\
5360                  %(min(times.flat), max(times.flat), len(times.flat))
5361
5362       
5363    def store_triangulation(self,
5364                            outfile,
5365                            points_utm,
5366                            volumes,
5367                            elevation, zone=None, new_origin=None, 
5368                            points_georeference=None, verbose=False):
5369        """
5370       
5371        new_origin - qa georeference that the points can be set to. (Maybe
5372        do this before calling this function.)
5373       
5374        points_utm - currently a list or array of the points in UTM.
5375        points_georeference - the georeference of the points_utm
5376       
5377        How about passing new_origin and current_origin.
5378        If you get both, do a convertion from the old to the new.
5379       
5380        If you only get new_origin, the points are absolute,
5381        convert to relative
5382       
5383        if you only get the current_origin the points are relative, store
5384        as relative.
5385       
5386        if you get no georefs create a new georef based on the minimums of
5387        points_utm.  (Another option would be to default to absolute)
5388       
5389        Yes, and this is done in another part of the code.
5390        Probably geospatial.
5391       
5392        If you don't supply either geo_refs, then supply a zone. If not
5393        the default zone will be used.
5394       
5395       
5396        precon
5397       
5398        header has been called.
5399        """
5400       
5401        number_of_points = len(points_utm)   
5402        volumes = array(volumes) 
5403        points_utm = array(points_utm)
5404
5405        # given the two geo_refs and the points, do the stuff
5406        # described in the method header
5407        # if this is needed else where, pull out as a function
5408        points_georeference = ensure_geo_reference(points_georeference)
5409        new_origin = ensure_geo_reference(new_origin)
5410        if new_origin is None and points_georeference is not None:
5411            points = points_utm
5412            geo_ref = points_georeference
5413        else:
5414            if new_origin is None:
5415                new_origin = Geo_reference(zone,min(points_utm[:,0]),
5416                                           min(points_utm[:,1]))
5417            points = new_origin.change_points_geo_ref(points_utm,
5418                                                      points_georeference)
5419            geo_ref = new_origin
5420
5421        # At this stage I need a georef and points
5422        # the points are relative to the georef
5423        geo_ref.write_NetCDF(outfile)
5424   
5425        # This will put the geo ref in the middle
5426        #geo_ref=Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
5427       
5428        x =  points[:,0]
5429        y =  points[:,1]
5430        z = outfile.variables['z'][:]
5431   
5432        if verbose:
5433            print '------------------------------------------------'
5434            print 'More Statistics:'
5435            print '  Extent (/lon):'
5436            print '    x in [%f, %f], len(lat) == %d'\
5437                  %(min(x), max(x),
5438                    len(x))
5439            print '    y in [%f, %f], len(lon) == %d'\
5440                  %(min(y), max(y),
5441                    len(y))
5442            print '    z in [%f, %f], len(z) == %d'\
5443                  %(min(elevation), max(elevation),
5444                    len(elevation))
5445            print 'geo_ref: ',geo_ref
5446            print '------------------------------------------------'
5447           
5448        #z = resize(bath_grid,outfile.variables['z'][:].shape)
5449        #print "points[:,0]", points[:,0]
5450        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
5451        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
5452        outfile.variables['z'][:] = elevation
5453        outfile.variables['elevation'][:] = elevation  #FIXME HACK
5454        outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
5455
5456        q = 'elevation'
5457        # This updates the _range values
5458        outfile.variables[q+Write_sww.RANGE][0] = min(elevation)
5459        outfile.variables[q+Write_sww.RANGE][1] = max(elevation)
5460
5461
5462    def store_quantities(self, outfile, sww_precision=Float32,
5463                         slice_index=None, time=None,
5464                         verbose=False, **quant):
5465        """
5466        Write the quantity info.
5467
5468        **quant is extra keyword arguments passed in. These must be
5469          the sww quantities, currently; stage, xmomentum, ymomentum.
5470       
5471        if the time array is already been built, use the slice_index
5472        to specify the index.
5473       
5474        Otherwise, use time to increase the time dimension
5475
5476        Maybe make this general, but the viewer assumes these quantities,
5477        so maybe we don't want it general - unless the viewer is general
5478       
5479        precon
5480        triangulation and
5481        header have been called.
5482        """
5483
5484        if time is not None:
5485            file_time = outfile.variables['time']
5486            slice_index = len(file_time)
5487            file_time[slice_index] = time   
5488
5489        # Write the conserved quantities from Domain.
5490        # Typically stage,  xmomentum, ymomentum
5491        # other quantities will be ignored, silently.
5492        # Also write the ranges: stage_range,
5493        # xmomentum_range and ymomentum_range
5494        for q in Write_sww.sww_quantities:
5495            if not quant.has_key(q):
5496                msg = 'SWW file can not write quantity %s' %q
5497                raise NewQuantity, msg
5498            else:
5499                q_values = quant[q]
5500                outfile.variables[q][slice_index] = \
5501                                q_values.astype(sww_precision)
5502
5503                # This updates the _range values
5504                q_range = outfile.variables[q+Write_sww.RANGE][:]
5505                q_values_min = min(q_values)
5506                if q_values_min < q_range[0]:
5507                    outfile.variables[q+Write_sww.RANGE][0] = q_values_min
5508                q_values_max = max(q_values)
5509                if q_values_max > q_range[1]:
5510                    outfile.variables[q+Write_sww.RANGE][1] = q_values_max
5511
5512    def verbose_quantities(self, outfile):
5513        print '------------------------------------------------'
5514        print 'More Statistics:'
5515        for q in Write_sww.sww_quantities:
5516            print %s in [%f, %f]' %(q,
5517                                       outfile.variables[q+Write_sww.RANGE][0],
5518                                       outfile.variables[q+Write_sww.RANGE][1])
5519        print '------------------------------------------------'
5520
5521
5522       
5523def obsolete_write_sww_time_slices(outfile, has, uas, vas, elevation,
5524                         mean_stage=0, zscale=1,
5525                         verbose=False):   
5526    #Time stepping
5527    stage = outfile.variables['stage']
5528    xmomentum = outfile.variables['xmomentum']
5529    ymomentum = outfile.variables['ymomentum']
5530
5531    n = len(has)
5532    j=0
5533    for ha, ua, va in map(None, has, uas, vas):
5534        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
5535        w = zscale*ha + mean_stage
5536        stage[j] = w
5537        h = w - elevation
5538        xmomentum[j] = ua*h
5539        ymomentum[j] = -1*va*#  -1 since in mux files south is positive.
5540        j += 1
5541   
5542def urs2txt(basename_in, location_index=None):
5543    """
5544    Not finished or tested
5545    """
5546   
5547    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
5548                basename_in + EAST_VELOCITY_LABEL,
5549                basename_in + NORTH_VELOCITY_LABEL]
5550    quantities = ['HA','UA','VA']
5551
5552    d = ","
5553   
5554    # instanciate urs_points of the three mux files.
5555    mux = {}
5556    for quantity, file in map(None, quantities, files_in):
5557        mux[quantity] = Urs_points(file)
5558       
5559    # Could check that the depth is the same. (hashing)
5560
5561    # handle to a mux file to do depth stuff
5562    a_mux = mux[quantities[0]]
5563   
5564    # Convert to utm
5565    latitudes = a_mux.lonlatdep[:,1]
5566    longitudes = a_mux.lonlatdep[:,0]
5567    points_utm, zone = convert_from_latlon_to_utm( \
5568        latitudes=latitudes, longitudes=longitudes)
5569    #print "points_utm", points_utm
5570    #print "zone", zone
5571    depths = a_mux.lonlatdep[:,2]  #
5572   
5573    fid = open(basename_in + '.txt', 'w')
5574
5575    fid.write("zone: " + str(zone) + "\n")
5576
5577    if location_index is not None:
5578        #Title
5579        li = location_index
5580        fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
5581                  'Northing' + "\n")
5582        fid.write(str(li) +d+ str(latitudes[li])+d+ \
5583              str(longitudes[li]) +d+ str(points_utm[li][0]) +d+ \
5584              str(points_utm[li][01]) + "\n")
5585
5586    # the non-time dependent stuff
5587    #Title
5588    fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
5589              'Northing' +d+ 'depth m' + "\n")
5590    i = 0
5591    for depth, point_utm, lat, long in map(None, depths,
5592                                               points_utm, latitudes,
5593                                               longitudes):
5594       
5595        fid.write(str(i) +d+ str(lat)+d+ str(long) +d+ str(point_utm[0]) +d+ \
5596                  str(point_utm[01]) +d+ str(depth) + "\n")
5597        i +=1
5598    #Time dependent
5599    if location_index is not None:
5600        time_step = a_mux.time_step
5601        i = 0
5602        #Title
5603        fid.write('time' +d+ 'HA depth m'+d+ \
5604                 'UA momentum East x m/sec' +d+ 'VA momentum North y m/sec' \
5605                      + "\n")
5606        for HA, UA, VA in map(None, mux['HA'], mux['UA'], mux['VA']):
5607            fid.write(str(i*time_step) +d+ str(HA[location_index])+d+ \
5608                      str(UA[location_index]) +d+ str(VA[location_index]) \
5609                      + "\n")
5610           
5611            i +=1
5612
5613class Write_sts:
5614
5615    sts_quantities = ['stage','xmomentum','ymomentum']
5616
5617
5618    RANGE = '_range'
5619    EXTREMA = ':extrema'
5620   
5621    def __init__(self):
5622        pass
5623
5624    def store_header(self,
5625                     outfile,
5626                     times,
5627                     number_of_points,
5628                     description='Converted from URS mux2 format',
5629                     sts_precision=Float32,
5630                     verbose=False):
5631        """
5632        outfile - the name of the file that will be written
5633        times - A list of the time slice times OR a start time
5634        Note, if a list is given the info will be made relative.
5635        number_of_points - the number of urs gauges sites
5636        """
5637
5638        outfile.institution = 'Geoscience Australia'
5639        outfile.description = description
5640       
5641        try:
5642            revision_number = get_revision_number()
5643        except:
5644            revision_number = None
5645        # Allow None to be stored as a string               
5646        outfile.revision_number = str(revision_number) 
5647       
5648        # times - A list or array of the time slice times OR a start time
5649        # Start time in seconds since the epoch (midnight 1/1/1970)
5650
5651        # This is being used to seperate one number from a list.
5652        # what it is actually doing is sorting lists from numeric arrays.
5653        if type(times) is list or type(times) is ArrayType: 
5654            number_of_times = len(times)
5655            times = ensure_numeric(times) 
5656            if number_of_times == 0:
5657                starttime = 0
5658            else:
5659                starttime = times[0]
5660                times = times - starttime  #Store relative times
5661        else:
5662            number_of_times = 0
5663            starttime = times
5664
5665        outfile.starttime = starttime
5666
5667        # Dimension definitions
5668        outfile.createDimension('number_of_points', number_of_points)
5669        outfile.createDimension('number_of_timesteps', number_of_times)
5670        outfile.createDimension('numbers_in_range', 2)
5671
5672        # Variable definitions
5673        outfile.createVariable('x', sts_precision, ('number_of_points',))
5674        outfile.createVariable('y', sts_precision, ('number_of_points',))
5675        outfile.createVariable('elevation', sts_precision,('number_of_points',))
5676
5677        q = 'elevation'
5678        outfile.createVariable(q+Write_sts.RANGE, sts_precision,
5679                               ('numbers_in_range',))
5680
5681        # Initialise ranges with small and large sentinels.
5682        # If this was in pure Python we could have used None sensibly
5683        outfile.variables[q+Write_sts.RANGE][0] = max_float  # Min
5684        outfile.variables[q+Write_sts.RANGE][1] = -max_float # Max
5685
5686        # Doing sts_precision instead of Float gives cast errors.
5687        outfile.createVariable('time', Float, ('number_of_timesteps',))
5688
5689        for q in Write_sts.sts_quantities:
5690            outfile.createVariable(q, sts_precision,
5691                                   ('number_of_timesteps',
5692                                    'number_of_points'))
5693            outfile.createVariable(q+Write_sts.RANGE, sts_precision,
5694                                   ('numbers_in_range',))
5695            # Initialise ranges with small and large sentinels.
5696            # If this was in pure Python we could have used None sensibly
5697            outfile.variables[q+Write_sts.RANGE][0] = max_float  # Min
5698            outfile.variables[q+Write_sts.RANGE][1] = -max_float # Max
5699
5700        if type(times) is list or type(times) is ArrayType: 
5701            outfile.variables['time'][:] = times    #Store time relative
5702
5703        if verbose:
5704            print '------------------------------------------------'
5705            print 'Statistics:'
5706            print '    t in [%f, %f], len(t) == %d'\
5707                  %(min(times.flat), max(times.flat), len(times.flat))
5708
5709    def store_points(self,
5710                     outfile,
5711                     points_utm,
5712                     elevation, zone=None, new_origin=None, 
5713                     points_georeference=None, verbose=False):
5714
5715        """
5716        points_utm - currently a list or array of the points in UTM.
5717        points_georeference - the georeference of the points_utm
5718
5719        How about passing new_origin and current_origin.
5720        If you get both, do a convertion from the old to the new.
5721       
5722        If you only get new_origin, the points are absolute,
5723        convert to relative
5724       
5725        if you only get the current_origin the points are relative, store
5726        as relative.
5727       
5728        if you get no georefs create a new georef based on the minimums of
5729        points_utm.  (Another option would be to default to absolute)
5730       
5731        Yes, and this is done in another part of the code.
5732        Probably geospatial.
5733       
5734        If you don't supply either geo_refs, then supply a zone. If not
5735        the default zone will be used.
5736
5737        precondition:
5738             header has been called.
5739        """
5740
5741        number_of_points = len(points_utm)
5742        points_utm = array(points_utm)
5743
5744        # given the two geo_refs and the points, do the stuff
5745        # described in the method header
5746        points_georeference = ensure_geo_reference(points_georeference)
5747        new_origin = ensure_geo_reference(new_origin)
5748       
5749        if new_origin is None and points_georeference is not None:
5750            points = points_utm
5751            geo_ref = points_georeference
5752        else:
5753            if new_origin is None:
5754                new_origin = Geo_reference(zone,min(points_utm[:,0]),
5755                                           min(points_utm[:,1]))
5756            points = new_origin.change_points_geo_ref(points_utm,
5757                                                      points_georeference)
5758            geo_ref = new_origin
5759
5760        # At this stage I need a georef and points
5761        # the points are relative to the georef
5762        geo_ref.write_NetCDF(outfile)
5763
5764        x =  points[:,0]
5765        y =  points[:,1]
5766        z = outfile.variables['z'][:]
5767   
5768        if verbose:
5769            print '------------------------------------------------'
5770            print 'More Statistics:'
5771            print '  Extent (/lon):'
5772            print '    x in [%f, %f], len(lat) == %d'\
5773                  %(min(x), max(x),
5774                    len(x))
5775            print '    y in [%f, %f], len(lon) == %d'\
5776                  %(min(y), max(y),
5777                    len(y))
5778            print '    z in [%f, %f], len(z) == %d'\
5779                  %(min(elevation), max(elevation),
5780                    len(elevation))
5781            print 'geo_ref: ',geo_ref
5782            print '------------------------------------------------'
5783           
5784        #z = resize(bath_grid,outfile.variables['z'][:].shape)
5785        #print "points[:,0]", points[:,0]
5786        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
5787        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
5788        outfile.variables['z'][:] = elevation
5789        outfile.variables['elevation'][:] = elevation  #FIXME HACK4
5790
5791        q = 'elevation'
5792        # This updates the _range values
5793        outfile.variables[q+Write_sts.RANGE][0] = min(elevation)
5794        outfile.variables[q+Write_sts.RANGE][1] = max(elevation)
5795
5796    def store_quantities(self, outfile, sts_precision=Float32,
5797                         slice_index=None, time=None,
5798                         verbose=False, **quant):
5799       
5800        """
5801        Write the quantity info.
5802
5803        **quant is extra keyword arguments passed in. These must be
5804          the sts quantities, currently; stage.
5805       
5806        if the time array is already been built, use the slice_index
5807        to specify the index.
5808       
5809        Otherwise, use time to increase the time dimension
5810
5811        Maybe make this general, but the viewer assumes these quantities,
5812        so maybe we don't want it general - unless the viewer is general
5813       
5814        precondition:
5815            triangulation and
5816            header have been called.
5817        """
5818        if time is not None:
5819            file_time = outfile.variables['time']
5820            slice_index = len(file_time)
5821            file_time[slice_index] = time   
5822
5823        # Write the conserved quantities from Domain.
5824        # Typically stage,  xmomentum, ymomentum
5825        # other quantities will be ignored, silently.
5826        # Also write the ranges: stage_range
5827        for q in Write_sts.sts_quantities:
5828            if not quant.has_key(q):
5829                msg = 'STS file can not write quantity %s' %q
5830                raise NewQuantity, msg
5831            else:
5832                q_values = quant[q]
5833                outfile.variables[q][slice_index] = \
5834                                q_values.astype(sts_precision)
5835
5836                # This updates the _range values
5837                q_range = outfile.variables[q+Write_sts.RANGE][:]
5838                q_values_min = min(q_values)
5839                if q_values_min < q_range[0]:
5840                    outfile.variables[q+Write_sts.RANGE][0] = q_values_min
5841                q_values_max = max(q_values)
5842                if q_values_max > q_range[1]:
5843                    outfile.variables[q+Write_sts.RANGE][1] = q_values_max
5844
5845
5846   
5847class Urs_points:
5848    """
5849    Read the info in URS mux files.
5850
5851    for the quantities heres a correlation between the file names and
5852    what they mean;
5853    z-mux is height above sea level, m
5854    e-mux is velocity is Eastern direction, m/s
5855    n-mux is velocity is Northern direction, m/s   
5856    """
5857    def __init__(self,urs_file):
5858        self.iterated = False
5859        columns = 3 # long, lat , depth
5860        mux_file = open(urs_file, 'rb')
5861       
5862        # Number of points/stations
5863        (self.points_num,)= unpack('i',mux_file.read(4))
5864       
5865        # nt, int - Number of time steps
5866        (self.time_step_count,)= unpack('i',mux_file.read(4))
5867        #print "self.time_step_count", self.time_step_count
5868        #dt, float - time step, seconds
5869        (self.time_step,) = unpack('f', mux_file.read(4))
5870        #print "self.time_step", self.time_step
5871        msg = "Bad data in the urs file."
5872        if self.points_num < 0:
5873            mux_file.close()
5874            raise ANUGAError, msg
5875        if self.time_step_count < 0:
5876            mux_file.close()
5877            raise ANUGAError, msg
5878        if self.time_step < 0:
5879            mux_file.close()
5880            raise ANUGAError, msg
5881
5882        # the depth is in meters, and it is the distance from the ocean
5883        # to the sea bottom.
5884        lonlatdep = p_array.array('f')
5885        lonlatdep.read(mux_file, columns * self.points_num)
5886        lonlatdep = array(lonlatdep, typecode=Float)   
5887        lonlatdep = reshape(lonlatdep, (self.points_num, columns))
5888        #print 'lonlatdep',lonlatdep
5889        self.lonlatdep = lonlatdep
5890       
5891        self.mux_file = mux_file
5892        # check this array
5893
5894    def __iter__(self):
5895        """
5896        iterate over quantity data which is with respect to time.
5897
5898        Note: You can only interate once over an object
5899       
5900        returns quantity infomation for each time slice
5901        """
5902        msg =  "You can only interate once over a urs file."
5903        assert not self.iterated, msg
5904        self.iter_time_step = 0
5905        self.iterated = True
5906        return self
5907   
5908    def next(self):
5909        if self.time_step_count == self.iter_time_step:
5910            self.close()
5911            raise StopIteration
5912        #Read in a time slice  from mux file 
5913        hz_p_array = p_array.array('f')
5914        hz_p_array.read(self.mux_file, self.points_num)
5915        hz_p = array(hz_p_array, typecode=Float)
5916        self.iter_time_step += 1
5917       
5918        return hz_p
5919
5920    def close(self):
5921        self.mux_file.close()
5922       
5923    #### END URS UNGRIDDED 2 SWW ###
5924
5925       
5926def start_screen_catcher(dir_name=None, myid='', numprocs='', extra_info='',
5927                         verbose=True):
5928    """
5929    Used to store screen output and errors to file, if run on multiple
5930    processes eachprocessor will have its own output and error file.
5931   
5932    extra_info - is used as a string that can identify outputs with another
5933    string eg. '_other'
5934   
5935    FIXME: Would be good if you could suppress all the screen output and
5936    only save it to file... however it seems a bit tricky as this capture
5937    techique response to sys.stdout and by this time it is already printed out.
5938    """
5939   
5940    import sys
5941#    dir_name = dir_name
5942    if dir_name == None:
5943        dir_name=getcwd()
5944       
5945    if access(dir_name,W_OK) == 0:
5946        if verbose: print 'Making directory %s' %dir_name
5947      #  if verbose: print "myid", myid
5948        mkdir (dir_name,0777)
5949
5950    if myid <>'':
5951        myid = '_'+str(myid)
5952    if numprocs <>'':
5953        numprocs = '_'+str(numprocs)
5954    if extra_info <>'':
5955        extra_info = '_'+str(extra_info)
5956#    print 'hello1'
5957    screen_output_name = join(dir_name, "screen_output%s%s%s.txt" %(myid,
5958                                                                numprocs,
5959                                                                extra_info))
5960    screen_error_name = join(dir_name,  "screen_error%s%s%s.txt" %(myid,
5961                                                              numprocs,
5962                                                              extra_info))
5963
5964    if verbose: print 'Starting ScreenCatcher, all output will be stored in %s' \
5965                                     %(screen_output_name)
5966    #used to catch screen output to file
5967    sys.stdout = Screen_Catcher(screen_output_name)
5968    sys.stderr = Screen_Catcher(screen_error_name)
5969
5970class Screen_Catcher:
5971    """this simply catches the screen output and stores it to file defined by
5972    start_screen_catcher (above)
5973    """
5974   
5975    def __init__(self, filename):
5976        self.filename = filename
5977#        print 'init'
5978        if exists(self.filename)is True:
5979            print'Old existing file "%s" has been deleted' %(self.filename)
5980            remove(self.filename)
5981
5982    def write(self, stuff):
5983        fid = open(self.filename, 'a')
5984        fid.write(stuff)
5985        fid.close()
5986       
5987# FIXME (DSG): Add unit test, make general, not just 2 files,
5988# but any number of files.
5989def copy_code_files(dir_name, filename1, filename2=None):
5990    """Copies "filename1" and "filename2" to "dir_name". Very useful for
5991    information management
5992    filename1 and filename2 are both absolute pathnames   
5993    """
5994
5995    if access(dir_name,F_OK) == 0:
5996        print 'Make directory %s' %dir_name
5997        mkdir (dir_name,0777)
5998    shutil.copy(filename1, dir_name + sep + basename(filename1))
5999    if filename2!=None:
6000        shutil.copy(filename2, dir_name + sep + basename(filename2))
6001        print 'Files %s and %s copied' %(filename1, filename2)
6002    else:
6003        print 'File %s copied' %(filename1)
6004
6005def get_data_from_file(filename,separator_value = ','):
6006    """
6007    Read in data information from file and
6008   
6009    Returns:
6010        header_fields, a string? of the first line separated
6011        by the 'separator_value'
6012       
6013        data, a array (N data columns X M lines) in the file
6014        excluding the header
6015       
6016    NOTE: wont deal with columns with different lenghts and there must be
6017    no blank lines at the end.
6018    """
6019   
6020    fid = open(filename)
6021    lines = fid.readlines()
6022   
6023    fid.close()
6024   
6025    header_line = lines[0]
6026    header_fields = header_line.split(separator_value)
6027
6028    #array to store data, number in there is to allow float...
6029    #i'm sure there is a better way!
6030    data=array([],typecode=Float)
6031    data=resize(data,((len(lines)-1),len(header_fields)))
6032#    print 'number of fields',range(len(header_fields))
6033#    print 'number of lines',len(lines), shape(data)
6034#    print'data',data[1,1],header_line
6035
6036    array_number = 0
6037    line_number = 1
6038    while line_number < (len(lines)):
6039        for i in range(len(header_fields)): 
6040            #this get line below the header, explaining the +1
6041            #and also the line_number can be used as the array index
6042            fields = lines[line_number].split(separator_value)
6043            #assign to array
6044            data[array_number,i] = float(fields[i])
6045           
6046        line_number = line_number +1
6047        array_number = array_number +1
6048       
6049    return header_fields, data
6050
6051def store_parameters(verbose=False,**kwargs):
6052    """
6053    Store "kwargs" into a temp csv file, if "completed" is a kwargs csv file is
6054    kwargs[file_name] else it is kwargs[output_dir] + details_temp.csv
6055   
6056    Must have a file_name keyword arg, this is what is writing to.
6057    might be a better way to do this using CSV module Writer and writeDict
6058   
6059    writes file to "output_dir" unless "completed" is in kwargs, then
6060    it writes to "file_name" kwargs
6061
6062    """
6063    import types
6064#    import os
6065   
6066    # Check that kwargs is a dictionary
6067    if type(kwargs) != types.DictType:
6068        raise TypeError
6069   
6070    #is completed is kwargs?
6071    try:
6072        kwargs['completed']
6073        completed=True
6074    except:
6075        completed=False
6076 
6077    #get file name and removes from dict and assert that a file_name exists
6078    if completed:
6079        try:
6080            file = str(kwargs['file_name'])
6081        except:
6082            raise 'kwargs must have file_name'
6083    else:
6084        #write temp file in output directory
6085        try:
6086            file = str(kwargs['output_dir'])+'detail_temp.csv'
6087        except:
6088            raise 'kwargs must have output_dir'
6089       
6090    #extracts the header info and the new line info
6091    line=''
6092    header=''
6093    count=0
6094    keys = kwargs.keys()
6095    keys.sort()
6096   
6097    #used the sorted keys to create the header and line data
6098    for k in keys:
6099#        print "%s = %s" %(k, kwargs[k])
6100        header = header+str(k)
6101        line = line+str(kwargs[k])
6102        count+=1
6103        if count <len(kwargs):
6104            header = header+','
6105            line = line+','
6106    header+='\n'
6107    line+='\n'
6108
6109    # checks the header info, if the same, then write, if not create a new file
6110    #try to open!
6111    try:
6112        fid = open(file,"r")
6113        file_header=fid.readline()
6114        fid.close()
6115        if verbose: print 'read file header %s' %file_header
6116       
6117    except:
6118        msg = 'try to create new file',file
6119        if verbose: print msg
6120        #tries to open file, maybe directory is bad
6121        try:
6122            fid = open(file,"w")
6123            fid.write(header)
6124            fid.close()
6125            file_header=header
6126        except:
6127            msg = 'cannot create new file',file
6128            raise msg
6129           
6130    #if header is same or this is a new file
6131    if file_header==str(header):
6132        fid=open(file,"a")
6133        #write new line
6134        fid.write(line)
6135        fid.close()
6136    else:
6137        #backup plan,
6138        # if header is different and has completed will append info to
6139        #end of details_temp.cvs file in output directory
6140        file = str(kwargs['output_dir'])+'detail_temp.csv'
6141        fid=open(file,"a")
6142        fid.write(header)
6143        fid.write(line)
6144        fid.close()
6145        if verbose: print 'file',file_header.strip('\n')
6146        if verbose: print 'head',header.strip('\n')
6147        if file_header.strip('\n')==str(header): print 'they equal'
6148        msg = 'WARNING: File header does not match input info, the input variables have changed, suggest to change file name'
6149        print msg
6150
6151
6152
6153# ----------------------------------------------
6154# Functions to obtain diagnostics from sww files
6155#-----------------------------------------------
6156
6157def get_mesh_and_quantities_from_file(filename,
6158                                      quantities=None,
6159                                      verbose=False):
6160    """Get and rebuild mesh structure and the associated quantities from sww file
6161
6162    Input:
6163        filename - Name os sww file
6164        quantities - Names of quantities to load
6165
6166    Output:
6167        mesh - instance of class Interpolate
6168               (including mesh and interpolation functionality)
6169        quantities - arrays with quantity values at each mesh node
6170        time - vector of stored timesteps
6171    """
6172 
6173    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
6174   
6175    import types
6176    from Scientific.IO.NetCDF import NetCDFFile
6177    from shallow_water import Domain
6178    from Numeric import asarray, transpose, resize
6179    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
6180
6181    if verbose: print 'Reading from ', filename
6182    fid = NetCDFFile(filename, 'r')    # Open existing file for read
6183    time = fid.variables['time'][:]    # Time vector
6184    time += fid.starttime[0]
6185   
6186    # Get the variables as Numeric arrays
6187    x = fid.variables['x'][:]                   # x-coordinates of nodes
6188    y = fid.variables['y'][:]                   # y-coordinates of nodes
6189    elevation = fid.variables['elevation'][:]   # Elevation
6190    stage = fid.variables['stage'][:]           # Water level
6191    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
6192    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
6193
6194
6195    # Mesh (nodes (Mx2), triangles (Nx3))
6196    nodes = concatenate( (x[:, NewAxis], y[:, NewAxis]), axis=1 )
6197    triangles = fid.variables['volumes'][:]
6198   
6199    # Get geo_reference
6200    try:
6201        geo_reference = Geo_reference(NetCDFObject=fid)
6202    except: #AttributeError, e:
6203        # Sww files don't have to have a geo_ref
6204        geo_reference = None
6205
6206    if verbose: print '    building mesh from sww file %s' %filename
6207    boundary = None
6208
6209    #FIXME (Peter Row): Should this be in mesh?
6210    if fid.smoothing != 'Yes':
6211        nodes = nodes.tolist()
6212        triangles = triangles.tolist()
6213        nodes, triangles, boundary=weed(nodes, triangles, boundary)
6214
6215
6216    try:
6217        mesh = Mesh(nodes, triangles, boundary,
6218                    geo_reference=geo_reference)
6219    except AssertionError, e:
6220        fid.close()
6221        msg = 'Domain could not be created: %s. "' %e
6222        raise DataDomainError, msg
6223
6224
6225    quantities = {}
6226    quantities['elevation'] = elevation
6227    quantities['stage'] = stage   
6228    quantities['xmomentum'] = xmomentum
6229    quantities['ymomentum'] = ymomentum   
6230
6231    fid.close()
6232    return mesh, quantities, time
6233
6234
6235
6236def get_flow_through_cross_section(filename,
6237                                   polyline,
6238                                   verbose=False):
6239    """Obtain flow (m^3/s) perpendicular to specified cross section.
6240
6241    Inputs:
6242        filename: Name of sww file
6243        polyline: Representation of desired cross section - it may contain multiple
6244                  sections allowing for complex shapes. Assume absolute UTM coordinates.
6245                  Format [[x0, y0], [x1, y1], ...]
6246
6247    Output:
6248        time: All stored times in sww file
6249        Q: Hydrograph of total flow across given segments for all stored times.
6250
6251    The normal flow is computed for each triangle intersected by the polyline and
6252    added up.  Multiple segments at different angles are specified the normal flows
6253    may partially cancel each other.
6254
6255    The typical usage of this function would be to get flow through a channel,
6256    and the polyline would then be a cross section perpendicular to the flow.
6257
6258    """
6259
6260    from anuga.fit_interpolate.interpolate import Interpolation_function
6261
6262    quantity_names =['elevation',
6263                     'stage',
6264                     'xmomentum',
6265                     'ymomentum']
6266
6267    # Get mesh and quantities from sww file
6268    X = get_mesh_and_quantities_from_file(filename,
6269                                          quantities=quantity_names,
6270                                          verbose=verbose)
6271    mesh, quantities, time = X
6272
6273
6274    # Adjust polyline to mesh spatial origin
6275    polyline = mesh.geo_reference.get_relative(polyline)
6276   
6277    # Find all intersections and associated triangles.
6278    segments = mesh.get_intersecting_segments(polyline, verbose=verbose)
6279    #print
6280    #for x in segments:
6281    #    print x
6282
6283   
6284    # Then store for each triangle the length of the intersecting segment(s),
6285    # right hand normal(s) and midpoints as interpolation_points.
6286    interpolation_points = []
6287    for segment in segments:
6288        midpoint = sum(array(segment.segment))/2
6289        interpolation_points.append(midpoint)
6290
6291
6292    if verbose:
6293        print 'Interpolating - ',       
6294        print 'total number of interpolation points = %d'\
6295              %len(interpolation_points)
6296
6297    I = Interpolation_function(time,
6298                               quantities,
6299                               quantity_names=quantity_names,
6300                               vertex_coordinates=mesh.nodes,
6301                               triangles=mesh.triangles,
6302                               interpolation_points=interpolation_points,
6303                               verbose=verbose)
6304
6305    if verbose: print 'Computing hydrograph'
6306    # Compute hydrograph
6307    Q = []
6308    for t in time:
6309        total_flow=0
6310        for i, p in enumerate(interpolation_points):
6311            elevation, stage, uh, vh = I(t, point_id=i)
6312            normal = segments[i].normal
6313
6314            # Inner product of momentum vector with segment normal [m^2/s]
6315            normal_momentum = uh*normal[0] + vh*normal[1] 
6316
6317            # Flow across this segment [m^3/s]
6318            segment_flow = normal_momentum*segments[i].length
6319
6320            # Accumulate
6321            total_flow += segment_flow
6322             
6323
6324        # Store flow at this timestep   
6325        Q.append(total_flow)
6326   
6327
6328
6329    return time, Q
6330   
6331
6332
6333
6334
6335def get_maximum_inundation_elevation(filename,
6336                                     polygon=None,
6337                                     time_interval=None,
6338                                     verbose=False):
6339   
6340    """Return highest elevation where depth > 0
6341   
6342    Usage:
6343    max_runup = get_maximum_inundation_elevation(filename,
6344                                                 polygon=None,
6345                                                 time_interval=None,
6346                                                 verbose=False)
6347
6348    filename is a NetCDF sww file containing ANUGA model output.   
6349    Optional arguments polygon and time_interval restricts the maximum
6350    runup calculation
6351    to a points that lie within the specified polygon and time interval.
6352
6353    If no inundation is found within polygon and time_interval the return value
6354    is None signifying "No Runup" or "Everything is dry".
6355
6356    See general function get_maximum_inundation_data for details.
6357   
6358    """
6359   
6360    runup, _ = get_maximum_inundation_data(filename,
6361                                           polygon=polygon,
6362                                           time_interval=time_interval,
6363                                           verbose=verbose)
6364    return runup
6365
6366
6367
6368
6369def get_maximum_inundation_location(filename,
6370                                    polygon=None,
6371                                    time_interval=None,
6372                                    verbose=False):
6373    """Return location of highest elevation where h > 0
6374   
6375   
6376    Usage:
6377    max_runup_location = get_maximum_inundation_location(filename,
6378                                                         polygon=None,
6379                                                         time_interval=None,
6380                                                         verbose=False)
6381
6382    filename is a NetCDF sww file containing ANUGA model output.
6383    Optional arguments polygon and time_interval restricts the maximum
6384    runup calculation
6385    to a points that lie within the specified polygon and time interval.
6386
6387    If no inundation is found within polygon and time_interval the return value
6388    is None signifying "No Runup" or "Everything is dry".
6389
6390    See general function get_maximum_inundation_data for details.
6391    """
6392   
6393    _, max_loc = get_maximum_inundation_data(filename,
6394                                             polygon=polygon,
6395                                             time_interval=time_interval,
6396                                             verbose=verbose)
6397    return max_loc
6398   
6399
6400
6401def get_maximum_inundation_data(filename, polygon=None, time_interval=None,
6402                                use_centroid_values=False,
6403                                verbose=False):
6404    """Compute maximum run up height from sww file.
6405
6406
6407    Usage:
6408    runup, location = get_maximum_inundation_data(filename,
6409                                                  polygon=None,
6410                                                  time_interval=None,
6411                                                  verbose=False)
6412   
6413
6414    Algorithm is as in get_maximum_inundation_elevation from
6415    shallow_water_domain
6416    except that this function works with the sww file and computes the maximal
6417    runup height over multiple timesteps.
6418   
6419    Optional arguments polygon and time_interval restricts the
6420    maximum runup calculation
6421    to a points that lie within the specified polygon and time interval.
6422    Polygon is
6423    assumed to be in (absolute) UTM coordinates in the same zone as domain.
6424
6425    If no inundation is found within polygon and time_interval the return value
6426    is None signifying "No Runup" or "Everything is dry".
6427    """
6428
6429    # We are using nodal values here as that is what is stored in sww files.
6430
6431    # Water depth below which it is considered to be 0 in the model
6432    # FIXME (Ole): Allow this to be specified as a keyword argument as well
6433
6434    from anuga.utilities.polygon import inside_polygon   
6435    from anuga.config import minimum_allowed_height
6436    from Scientific.IO.NetCDF import NetCDFFile
6437
6438    dir, base = os.path.split(filename)
6439           
6440    iterate_over = get_all_swwfiles(dir,base)
6441   
6442    # Read sww file
6443    if verbose: 
6444        print 'Reading from %s' %filename
6445        # FIXME: Use general swwstats (when done)
6446   
6447    maximal_runup = None
6448    maximal_runup_location = None
6449   
6450    for file, swwfile in enumerate (iterate_over):
6451       
6452        # Read sww file
6453        filename = join(dir,swwfile+'.sww')
6454       
6455        if verbose: 
6456            print 'Reading from %s' %filename
6457            # FIXME: Use general swwstats (when done)
6458               
6459        fid = NetCDFFile(filename)
6460   
6461        # Get geo_reference
6462        # sww files don't have to have a geo_ref
6463        try:
6464            geo_reference = Geo_reference(NetCDFObject=fid)
6465        except AttributeError, e:
6466            geo_reference = Geo_reference() # Default georef object
6467           
6468        xllcorner = geo_reference.get_xllcorner()
6469        yllcorner = geo_reference.get_yllcorner()
6470        zone = geo_reference.get_zone()
6471       
6472        # Get extent
6473        volumes = fid.variables['volumes'][:]   
6474        x = fid.variables['x'][:] + xllcorner
6475        y = fid.variables['y'][:] + yllcorner
6476   
6477   
6478        # Get the relevant quantities (Convert from single precison)
6479        elevation = array(fid.variables['elevation'][:], Float) 
6480        stage = array(fid.variables['stage'][:], Float)
6481   
6482   
6483        # Here's where one could convert nodal information to centroid
6484        # information
6485        # but is probably something we need to write in C.
6486        # Here's a Python thought which is NOT finished!!!
6487        if use_centroid_values is True:
6488            x = get_centroid_values(x, volumes)
6489            y = get_centroid_values(y, volumes)   
6490            elevation = get_centroid_values(elevation, volumes)   
6491   
6492   
6493        # Spatial restriction
6494        if polygon is not None:
6495            msg = 'polygon must be a sequence of points.'
6496            assert len(polygon[0]) == 2, msg
6497            # FIXME (Ole): Make a generic polygon input check in polygon.py
6498            # and call it here
6499           
6500            points = concatenate((x[:,NewAxis], y[:,NewAxis]), axis=1)
6501   
6502            point_indices = inside_polygon(points, polygon)
6503   
6504            # Restrict quantities to polygon
6505            elevation = take(elevation, point_indices)
6506            stage = take(stage, point_indices, axis=1)
6507   
6508            # Get info for location of maximal runup
6509            points_in_polygon = take(points, point_indices)
6510            x = points_in_polygon[:,0]
6511            y = points_in_polygon[:,1]       
6512        else:
6513            # Take all points
6514            point_indices = arange(len(x))
6515           
6516   
6517        # Temporal restriction
6518        time = fid.variables['time'][:]
6519        all_timeindices = arange(len(time))       
6520        if time_interval is not None:
6521           
6522            msg = 'time_interval must be a sequence of length 2.'
6523            assert len(time_interval) == 2, msg
6524            msg = 'time_interval %s must not be decreasing.' %(time_interval)
6525            assert time_interval[1] >= time_interval[0], msg
6526           
6527            msg = 'Specified time interval [%.8f:%.8f]' %tuple(time_interval)
6528            msg += ' must does not match model time interval: [%.8f, %.8f]\n'\
6529                   %(time[0], time[-1])
6530            if time_interval[1] < time[0]: raise ValueError(msg)
6531            if time_interval[0] > time[-1]: raise ValueError(msg)
6532   
6533            # Take time indices corresponding to interval (& is bitwise AND)
6534            timesteps = compress((time_interval[0] <= time) & (time <= time_interval[1]),
6535                                 all_timeindices)
6536   
6537   
6538            msg = 'time_interval %s did not include any model timesteps.' %(time_interval)       
6539            assert not alltrue(timesteps == 0), msg
6540   
6541   
6542        else:
6543            # Take them all
6544            timesteps = all_timeindices
6545       
6546   
6547        fid.close()
6548   
6549        # Compute maximal runup for each timestep
6550        #maximal_runup = None
6551        #maximal_runup_location = None
6552        #maximal_runups = [None]
6553        #maximal_runup_locations = [None]
6554       
6555        for i in timesteps:
6556            if use_centroid_values is True:
6557                stage_i = get_centroid_values(stage[i,:], volumes)   
6558            else:
6559                stage_i = stage[i,:]
6560               
6561            depth = stage_i  - elevation
6562       
6563            # Get wet nodes i.e. nodes with depth>0 within given region and timesteps
6564            wet_nodes = compress(depth > minimum_allowed_height, arange(len(depth)))
6565   
6566            if alltrue(wet_nodes == 0):
6567                runup = None
6568            else:   
6569                # Find maximum elevation among wet nodes
6570                wet_elevation = take(elevation, wet_nodes)
6571   
6572                runup_index = argmax(wet_elevation)
6573                runup = max(wet_elevation)
6574                assert wet_elevation[runup_index] == runup # Must be True
6575            #print "runup", runup
6576            #print "maximal_runup", maximal_runup
6577           
6578            if runup > maximal_runup:
6579                maximal_runup = runup      # This works even if maximal_runups is None
6580                #print "NEW RUNUP",runup
6581   
6582                # Record location
6583                wet_x = take(x, wet_nodes)
6584                wet_y = take(y, wet_nodes)           
6585                maximal_runup_location = [wet_x[runup_index], wet_y[runup_index]]
6586   
6587    #print 'maximal_runup, maximal_runup_location',maximal_runup, maximal_runup_location
6588    return maximal_runup, maximal_runup_location
6589
6590def get_all_swwfiles(look_in_dir='',base_name='',verbose=False):
6591    '''
6592    Finds all the sww files in a "look_in_dir" which contains a "base_name".
6593    will accept base_name with or without the extension ".sww"
6594   
6595    Returns: a list of strings
6596       
6597    Usage:     iterate_over = get_all_swwfiles(dir, name)
6598    then
6599               for swwfile in iterate_over:
6600                   do stuff
6601                   
6602    Check "export_grids" and "get_maximum_inundation_data" for examples
6603    '''
6604   
6605    #plus tests the extension
6606    name, extension = os.path.splitext(base_name)
6607
6608    if extension <>'' and extension <> '.sww':
6609        msg = msg='file %s %s must be an NetCDF sww file!'%(base_name,extension)
6610        raise IOError, msg
6611
6612    if look_in_dir == "":
6613        look_in_dir = "." # Unix compatibility
6614   
6615    dir_ls = os.listdir(look_in_dir)
6616    #print 'dir_ls',dir_ls, base
6617    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == '.sww']
6618    if len(iterate_over) == 0:
6619        msg = 'No files of the base name %s'\
6620              %(name)
6621        raise IOError, msg
6622    if verbose: print 'iterate over %s' %(iterate_over)
6623
6624    return iterate_over
6625
6626def get_all_files_with_extension(look_in_dir='',base_name='',extension='.sww',verbose=False):
6627    '''
6628    Finds all the sww files in a "look_in_dir" which contains a "base_name".
6629   
6630   
6631    Returns: a list of strings
6632       
6633    Usage:     iterate_over = get_all_swwfiles(dir, name)
6634    then
6635               for swwfile in iterate_over:
6636                   do stuff
6637                   
6638    Check "export_grids" and "get_maximum_inundation_data" for examples
6639    '''
6640   
6641    #plus tests the extension
6642    name, ext = os.path.splitext(base_name)
6643#    print 'look_in_dir',look_in_dir
6644
6645    if ext <>'' and ext <> extension:
6646        msg = msg='base_name %s must be an file with %s extension!'%(base_name,extension)
6647        raise IOError, msg
6648
6649    if look_in_dir == "":
6650        look_in_dir = "." # Unix compatibility
6651#    print 'look_in_dir',look_in_dir, getcwd()
6652    dir_ls = os.listdir(look_in_dir)
6653    #print 'dir_ls',dir_ls, base_name
6654    iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == extension]
6655    if len(iterate_over) == 0:
6656        msg = 'No files of the base name %s in %s'\
6657              %(name, look_in_dir)
6658        raise IOError, msg
6659    if verbose: print 'iterate over %s' %(iterate_over)
6660
6661    return iterate_over
6662
6663def get_all_directories_with_name(look_in_dir='',base_name='',verbose=False):
6664    '''
6665    Finds all the sww files in a "look_in_dir" which contains a "base_name".
6666   
6667   
6668    Returns: a list of strings
6669       
6670    Usage:     iterate_over = get_all_swwfiles(dir, name)
6671    then
6672               for swwfile in iterate_over:
6673                   do stuff
6674                   
6675    Check "export_grids" and "get_maximum_inundation_data" for examples
6676    '''
6677   
6678    #plus tests the extension
6679
6680    if look_in_dir == "":
6681        look_in_dir = "." # Unix compatibility
6682#    print 'look_in_dir',look_in_dir
6683    dir_ls = os.listdir(look_in_dir)
6684#    print 'dir_ls',dir_ls
6685    iterate_over = [x for x in dir_ls if base_name in x]
6686    if len(iterate_over) == 0:
6687        msg = 'No files of the base name %s'\
6688              %(name)
6689        raise IOError, msg
6690    if verbose: print 'iterate over %s' %(iterate_over)
6691
6692    return iterate_over
6693
6694def points2polygon(points_file,
6695                    minimum_triangle_angle=3.0):
6696    """
6697    WARNING: This function is not fully working. 
6698   
6699    Function to return a polygon returned from alpha shape, given a points file.
6700   
6701    WARNING: Alpha shape returns multiple polygons, but this function only returns one polygon.
6702   
6703    """
6704    from anuga.pmesh.mesh import Mesh, importMeshFromFile
6705    from anuga.shallow_water import Domain   
6706    from anuga.pmesh.mesh_interface import create_mesh_from_regions
6707   
6708    mesh = importMeshFromFile(points_file)
6709    mesh.auto_segment()
6710    mesh.exportASCIIsegmentoutlinefile("outline.tsh")
6711    mesh2 = importMeshFromFile("outline.tsh")
6712    mesh2.generate_mesh(maximum_triangle_area=1000000000, minimum_triangle_angle=minimum_triangle_angle, verbose=False)
6713    mesh2.export_mesh_file('outline_meshed.tsh')
6714    domain = Domain("outline_meshed.tsh", use_cache = False)
6715    polygon =  domain.get_boundary_polygon()
6716    return polygon
6717
6718#-------------------------------------------------------------
6719if __name__ == "__main__":
6720    #setting umask from config to force permissions for all files and directories
6721    # created to the same. (it was noticed the "mpirun" doesn't honour the umask
6722    # set in your .bashrc etc file)
6723    from config import umask
6724    import os 
6725    os.umask(umask)
Note: See TracBrowser for help on using the repository browser.