source: anuga_core/source/anuga/shallow_water/data_manager.py @ 4360

Last change on this file since 4360 was 4348, checked in by duncan, 18 years ago

Changing the definition of a mux file. South is now the positive axis, not North.

File size: 157.7 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57
58
59
60import csv
61import os
62from struct import unpack
63import array as p_array
64#import time, os
65from os import sep, path
66
67from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
68     searchsorted, zeros, allclose, around, reshape, transpose, sort
69from Scientific.IO.NetCDF import NetCDFFile
70
71
72from anuga.coordinate_transforms.redfearn import redfearn, \
73     convert_from_latlon_to_utm
74from anuga.coordinate_transforms.geo_reference import Geo_reference
75from anuga.geospatial_data.geospatial_data import Geospatial_data
76from anuga.config import minimum_storable_height as default_minimum_storable_height
77from anuga.utilities.numerical_tools import ensure_numeric,  mean
78from anuga.caching.caching import myhash
79from anuga.utilities.anuga_exceptions import ANUGAError
80from anuga.shallow_water import Domain
81from anuga.abstract_2d_finite_volumes.pmesh2domain import \
82     pmesh_to_domain_instance
83   
84def make_filename(s):
85    """Transform argument string into a suitable filename
86    """
87
88    s = s.strip()
89    s = s.replace(' ', '_')
90    s = s.replace('(', '')
91    s = s.replace(')', '')
92    s = s.replace('__', '_')
93
94    return s
95
96
97def check_dir(path, verbose=None):
98    """Check that specified path exists.
99    If path does not exist it will be created if possible
100
101    USAGE:
102       checkdir(path, verbose):
103
104    ARGUMENTS:
105        path -- Directory
106        verbose -- Flag verbose output (default: None)
107
108    RETURN VALUE:
109        Verified path including trailing separator
110
111    """
112
113    import os, sys
114    import os.path
115
116    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
117        unix = 0
118    else:
119        unix = 1
120
121
122    if path[-1] != os.sep:
123        path = path + os.sep  # Add separator for directories
124
125    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
126    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
127        try:
128            exitcode=os.mkdir(path)
129
130            # Change access rights if possible
131            #
132            if unix:
133                exitcode=os.system('chmod 775 '+path)
134            else:
135                pass  # FIXME: What about acces rights under Windows?
136
137            if verbose: print 'MESSAGE: Directory', path, 'created.'
138
139        except:
140            print 'WARNING: Directory', path, 'could not be created.'
141            if unix:
142                path = '/tmp/'
143            else:
144                path = 'C:'
145
146            print 'Using directory %s instead' %path
147
148    return(path)
149
150
151
152def del_dir(path):
153    """Recursively delete directory path and all its contents
154    """
155
156    import os
157
158    if os.path.isdir(path):
159        for file in os.listdir(path):
160            X = os.path.join(path, file)
161
162
163            if os.path.isdir(X) and not os.path.islink(X):
164                del_dir(X)
165            else:
166                try:
167                    os.remove(X)
168                except:
169                    print "Could not remove file %s" %X
170
171        os.rmdir(path)
172
173
174
175def create_filename(datadir, filename, format, size=None, time=None):
176
177    import os
178    #from anuga.config import data_dir
179
180    FN = check_dir(datadir) + filename
181
182    if size is not None:
183        FN += '_size%d' %size
184
185    if time is not None:
186        FN += '_time%.2f' %time
187
188    FN += '.' + format
189    return FN
190
191
192def get_files(datadir, filename, format, size):
193    """Get all file (names) with gven name, size and format
194    """
195
196    import glob
197
198    import os
199    #from anuga.config import data_dir
200
201    dir = check_dir(datadir)
202
203    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
204    return glob.glob(pattern)
205
206
207
208#Generic class for storing output to e.g. visualisation or checkpointing
209class Data_format:
210    """Generic interface to data formats
211    """
212
213
214    def __init__(self, domain, extension, mode = 'w'):
215        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
216                                        '''   'w' (write)'''+\
217                                        '''   'r' (read)''' +\
218                                        '''   'a' (append)'''
219
220        #Create filename
221        self.filename = create_filename(domain.get_datadir(),
222                                        domain.get_name(), extension)
223
224        #print 'F', self.filename
225        self.timestep = 0
226        self.domain = domain
227       
228
229
230        # Exclude ghosts in case this is a parallel domain
231        self.number_of_nodes = domain.number_of_full_nodes       
232        self.number_of_volumes = domain.number_of_full_triangles
233        #self.number_of_volumes = len(domain)       
234
235
236
237
238        #FIXME: Should we have a general set_precision function?
239
240
241
242#Class for storing output to e.g. visualisation
243class Data_format_sww(Data_format):
244    """Interface to native NetCDF format (.sww) for storing model output
245
246    There are two kinds of data
247
248    1: Constant data: Vertex coordinates and field values. Stored once
249    2: Variable data: Conserved quantities. Stored once per timestep.
250
251    All data is assumed to reside at vertex locations.
252    """
253
254
255    def __init__(self, domain, mode = 'w',\
256                 max_size = 2000000000,
257                 recursion = False):
258        from Scientific.IO.NetCDF import NetCDFFile
259        from Numeric import Int, Float, Float32
260
261        self.precision = Float32 #Use single precision for quantities
262        if hasattr(domain, 'max_size'):
263            self.max_size = domain.max_size #file size max is 2Gig
264        else:
265            self.max_size = max_size
266        self.recursion = recursion
267        self.mode = mode
268
269        Data_format.__init__(self, domain, 'sww', mode)
270
271        if hasattr(domain, 'minimum_storable_height'):
272            self.minimum_storable_height =  domain.minimum_storable_height
273        else:
274            self.minimum_storable_height = default_minimum_storable_height
275
276        # NetCDF file definition
277        fid = NetCDFFile(self.filename, mode)
278
279        if mode == 'w':
280
281            #Create new file
282            fid.institution = 'Geoscience Australia'
283            fid.description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
284
285            if domain.smooth:
286                fid.smoothing = 'Yes'
287            else:
288                fid.smoothing = 'No'
289
290            fid.order = domain.default_order
291
292            if hasattr(domain, 'texture'):
293                fid.texture = domain.texture
294            #else:
295            #    fid.texture = 'None'
296
297            #Reference point
298            #Start time in seconds since the epoch (midnight 1/1/1970)
299            #FIXME: Use Georef
300            fid.starttime = domain.starttime
301
302            # dimension definitions
303            fid.createDimension('number_of_volumes', self.number_of_volumes)
304            fid.createDimension('number_of_vertices', 3)
305
306            if domain.smooth is True:
307                #fid.createDimension('number_of_points', len(domain.vertexlist))
308                fid.createDimension('number_of_points', self.domain.number_of_full_nodes)
309
310                # FIXME(Ole): This will cause sww files for paralle domains to
311                # have ghost nodes stored (but not used by triangles).
312                # To clean this up, we have to change get_vertex_values and friends in
313                # quantity.py (but I can't be bothered right now)
314            else:
315                fid.createDimension('number_of_points', 3*self.number_of_volumes)
316
317            fid.createDimension('number_of_timesteps', None) #extensible
318
319            # variable definitions
320            fid.createVariable('x', self.precision, ('number_of_points',))
321            fid.createVariable('y', self.precision, ('number_of_points',))
322            fid.createVariable('elevation', self.precision, ('number_of_points',))
323            if domain.geo_reference is not None:
324                domain.geo_reference.write_NetCDF(fid)
325
326            #FIXME: Backwards compatibility
327            fid.createVariable('z', self.precision, ('number_of_points',))
328            #################################
329
330            fid.createVariable('volumes', Int, ('number_of_volumes',
331                                                'number_of_vertices'))
332
333            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
334                                               # close to each other may appear as the same step
335                               ('number_of_timesteps',))
336
337            fid.createVariable('stage', self.precision,
338                               ('number_of_timesteps',
339                                'number_of_points'))
340
341            fid.createVariable('xmomentum', self.precision,
342                               ('number_of_timesteps',
343                                'number_of_points'))
344
345            fid.createVariable('ymomentum', self.precision,
346                               ('number_of_timesteps',
347                                'number_of_points'))
348
349        #Close
350        fid.close()
351
352
353    def store_connectivity(self):
354        """Specialisation of store_connectivity for net CDF format
355
356        Writes x,y,z coordinates of triangles constituting
357        the bed elevation.
358        """
359
360        from Scientific.IO.NetCDF import NetCDFFile
361
362        from Numeric import concatenate, Int
363
364        domain = self.domain
365
366        #Get NetCDF
367        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
368
369        # Get the variables
370        x = fid.variables['x']
371        y = fid.variables['y']
372        z = fid.variables['elevation']
373
374        volumes = fid.variables['volumes']
375
376        # Get X, Y and bed elevation Z
377        Q = domain.quantities['elevation']
378        X,Y,Z,V = Q.get_vertex_values(xy=True,
379                                      precision=self.precision)
380
381        volumes[:] = V.astype(volumes.typecode())
382        x[:] = X
383        y[:] = Y
384        z[:] = Z
385
386        #FIXME: Backwards compatibility
387        z = fid.variables['z']
388        z[:] = Z
389        ################################
390
391
392
393        #Close
394        fid.close()
395
396    def store_timestep(self, names):
397        """Store time and named quantities to file
398        """
399        from Scientific.IO.NetCDF import NetCDFFile
400        import types
401        from time import sleep
402        from os import stat
403
404        from Numeric import choose
405
406        #Get NetCDF
407        retries = 0
408        file_open = False
409        while not file_open and retries < 10:
410            try:
411                fid = NetCDFFile(self.filename, 'a') #Open existing file
412            except IOError:
413                #This could happen if someone was reading the file.
414                #In that case, wait a while and try again
415                msg = 'Warning (store_timestep): File %s could not be opened'\
416                      %self.filename
417                msg += ' - trying step %s again' %self.domain.time
418                print msg
419                retries += 1
420                sleep(1)
421            else:
422                file_open = True
423
424        if not file_open:
425            msg = 'File %s could not be opened for append' %self.filename
426            raise DataFileNotOpenError, msg
427
428
429
430        #Check to see if the file is already too big:
431        time = fid.variables['time']
432        i = len(time)+1
433        file_size = stat(self.filename)[6]
434        file_size_increase =  file_size/i
435        if file_size + file_size_increase > self.max_size*(2**self.recursion):
436            #in order to get the file name and start time correct,
437            #I change the domian.filename and domain.starttime.
438            #This is the only way to do this without changing
439            #other modules (I think).
440
441            #write a filename addon that won't break swollens reader
442            #(10.sww is bad)
443            filename_ext = '_time_%s'%self.domain.time
444            filename_ext = filename_ext.replace('.', '_')
445            #remember the old filename, then give domain a
446            #name with the extension
447            old_domain_filename = self.domain.get_name()
448            if not self.recursion:
449                self.domain.set_name(old_domain_filename+filename_ext)
450
451
452            #change the domain starttime to the current time
453            old_domain_starttime = self.domain.starttime
454            self.domain.starttime = self.domain.time
455
456            #build a new data_structure.
457            next_data_structure=\
458                Data_format_sww(self.domain, mode=self.mode,\
459                                max_size = self.max_size,\
460                                recursion = self.recursion+1)
461            if not self.recursion:
462                print '    file_size = %s'%file_size
463                print '    saving file to %s'%next_data_structure.filename
464            #set up the new data_structure
465            self.domain.writer = next_data_structure
466
467            #FIXME - could be cleaner to use domain.store_timestep etc.
468            next_data_structure.store_connectivity()
469            next_data_structure.store_timestep(names)
470            fid.sync()
471            fid.close()
472
473            #restore the old starttime and filename
474            self.domain.starttime = old_domain_starttime
475            self.domain.set_name(old_domain_filename)           
476        else:
477            self.recursion = False
478            domain = self.domain
479
480            # Get the variables
481            time = fid.variables['time']
482            stage = fid.variables['stage']
483            xmomentum = fid.variables['xmomentum']
484            ymomentum = fid.variables['ymomentum']
485            i = len(time)
486
487            #Store time
488            time[i] = self.domain.time
489
490
491            if type(names) not in [types.ListType, types.TupleType]:
492                names = [names]
493
494            for name in names:
495                # Get quantity
496                Q = domain.quantities[name]
497                A,V = Q.get_vertex_values(xy = False,
498                                          precision = self.precision)
499
500
501                #FIXME: Make this general (see below)
502                if name == 'stage':
503                    z = fid.variables['elevation']
504                    #print z[:]
505                    #print A-z[:]
506
507
508                    A = choose(A-z[:] >= self.minimum_storable_height,
509                               (z[:], A))
510                    stage[i,:] = A.astype(self.precision)
511                elif name == 'xmomentum':
512                    xmomentum[i,:] = A.astype(self.precision)
513                elif name == 'ymomentum':
514                    ymomentum[i,:] = A.astype(self.precision)
515
516                #As in....
517            #eval( name + '[i,:] = A.astype(self.precision)' )
518            #FIXME: But we need a UNIT test for that before refactoring
519
520
521
522            #Flush and close
523            fid.sync()
524            fid.close()
525
526
527
528#Class for handling checkpoints data
529class Data_format_cpt(Data_format):
530    """Interface to native NetCDF format (.cpt)
531    """
532
533
534    def __init__(self, domain, mode = 'w'):
535        from Scientific.IO.NetCDF import NetCDFFile
536        from Numeric import Int, Float, Float
537
538        self.precision = Float #Use full precision
539
540        Data_format.__init__(self, domain, 'sww', mode)
541
542
543        # NetCDF file definition
544        fid = NetCDFFile(self.filename, mode)
545
546        if mode == 'w':
547            #Create new file
548            fid.institution = 'Geoscience Australia'
549            fid.description = 'Checkpoint data'
550            #fid.smooth = domain.smooth
551            fid.order = domain.default_order
552
553            # dimension definitions
554            fid.createDimension('number_of_volumes', self.number_of_volumes)
555            fid.createDimension('number_of_vertices', 3)
556
557            #Store info at all vertices (no smoothing)
558            fid.createDimension('number_of_points', 3*self.number_of_volumes)
559            fid.createDimension('number_of_timesteps', None) #extensible
560
561            # variable definitions
562
563            #Mesh
564            fid.createVariable('x', self.precision, ('number_of_points',))
565            fid.createVariable('y', self.precision, ('number_of_points',))
566
567
568            fid.createVariable('volumes', Int, ('number_of_volumes',
569                                                'number_of_vertices'))
570
571            fid.createVariable('time', self.precision,
572                               ('number_of_timesteps',))
573
574            #Allocate space for all quantities
575            for name in domain.quantities.keys():
576                fid.createVariable(name, self.precision,
577                                   ('number_of_timesteps',
578                                    'number_of_points'))
579
580        #Close
581        fid.close()
582
583
584    def store_checkpoint(self):
585        """
586        Write x,y coordinates of triangles.
587        Write connectivity (
588        constituting
589        the bed elevation.
590        """
591
592        from Scientific.IO.NetCDF import NetCDFFile
593
594        from Numeric import concatenate
595
596        domain = self.domain
597
598        #Get NetCDF
599        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
600
601        # Get the variables
602        x = fid.variables['x']
603        y = fid.variables['y']
604
605        volumes = fid.variables['volumes']
606
607        # Get X, Y and bed elevation Z
608        Q = domain.quantities['elevation']
609        X,Y,Z,V = Q.get_vertex_values(xy=True,
610                      precision = self.precision)
611
612
613
614        x[:] = X.astype(self.precision)
615        y[:] = Y.astype(self.precision)
616        z[:] = Z.astype(self.precision)
617
618        volumes[:] = V
619
620        #Close
621        fid.close()
622
623
624    def store_timestep(self, name):
625        """Store time and named quantity to file
626        """
627        from Scientific.IO.NetCDF import NetCDFFile
628        from time import sleep
629
630        #Get NetCDF
631        retries = 0
632        file_open = False
633        while not file_open and retries < 10:
634            try:
635                fid = NetCDFFile(self.filename, 'a') #Open existing file
636            except IOError:
637                #This could happen if someone was reading the file.
638                #In that case, wait a while and try again
639                msg = 'Warning (store_timestep): File %s could not be opened'\
640                  %self.filename
641                msg += ' - trying again'
642                print msg
643                retries += 1
644                sleep(1)
645            else:
646                file_open = True
647
648        if not file_open:
649            msg = 'File %s could not be opened for append' %self.filename
650            raise DataFileNotOPenError, msg
651
652
653        domain = self.domain
654
655        # Get the variables
656        time = fid.variables['time']
657        stage = fid.variables['stage']
658        i = len(time)
659
660        #Store stage
661        time[i] = self.domain.time
662
663        # Get quantity
664        Q = domain.quantities[name]
665        A,V = Q.get_vertex_values(xy=False,
666                  precision = self.precision)
667
668        stage[i,:] = A.astype(self.precision)
669
670        #Flush and close
671        fid.sync()
672        fid.close()
673
674
675
676
677
678#Function for storing xya output
679#FIXME Not done yet for this version
680#This is obsolete.  Use geo_spatial_data instead
681class Data_format_xya(Data_format):
682    """Generic interface to data formats
683    """
684
685
686    def __init__(self, domain, mode = 'w'):
687        from Scientific.IO.NetCDF import NetCDFFile
688        from Numeric import Int, Float, Float32
689
690        self.precision = Float32 #Use single precision
691
692        Data_format.__init__(self, domain, 'xya', mode)
693
694
695
696    #FIXME -This is the old xya format
697    def store_all(self):
698        """Specialisation of store all for xya format
699
700        Writes x,y,z coordinates of triangles constituting
701        the bed elevation.
702        """
703
704        from Numeric import concatenate
705
706        domain = self.domain
707
708        fd = open(self.filename, 'w')
709
710
711        if domain.smooth is True:
712            number_of_points =  len(domain.vertexlist)
713        else:
714            number_of_points = 3*self.number_of_volumes
715
716        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
717
718        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
719                 " # <vertex #> <x> <y> [attributes]" + "\n")
720
721
722        # Get X, Y, bed elevation and friction (index=0,1)
723        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
724                                           indices = (0,1), precision = self.precision)
725
726        bed_eles = A[:,0]
727        fricts = A[:,1]
728
729        # Get stage (index=0)
730        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
731                                       indices = (0,), precision = self.precision)
732
733        stages = B[:,0]
734
735        #<vertex #> <x> <y> [attributes]
736        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
737                                               stages, fricts):
738
739            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
740            fd.write(s)
741
742        #close
743        fd.close()
744
745
746    def store_timestep(self, t, V0, V1, V2):
747        """Store time, water heights (and momentums) to file
748        """
749        pass
750
751
752#### NED is national exposure database (name changed to NEXIS)
753   
754LAT_TITLE = 'LATITUDE'
755LONG_TITLE = 'LONGITUDE'
756X_TITLE = 'x'
757Y_TITLE = 'y'
758class Exposure_csv:
759    def __init__(self,file_name, latitude_title=LAT_TITLE,
760                 longitude_title=LONG_TITLE, is_x_y_locations=None,
761                 x_title=X_TITLE, y_title=Y_TITLE,
762                 refine_polygon=None, title_check_list=None):
763        """
764        This class is for handling the exposure csv file.
765        It reads the file in and converts the lats and longs to a geospatial
766        data object.
767        Use the methods to read and write columns.
768
769        The format of the csv files it reads is;
770           The first row is a title row.
771           comma's are the delimiters
772           each column is a 'set' of data
773
774        Feel free to use/expand it to read other csv files.
775           
776           
777        It is not for adding and deleting rows
778       
779        Can geospatial handle string attributes? It's not made for them.
780        Currently it can't load and save string att's.
781
782        So just use geospatial to hold the x, y and georef? Bad, since
783        different att's are in diferent structures.  Not so bad, the info
784        to write if the .csv file is saved is in attribute_dic
785
786        The location info is in the geospatial attribute.
787       
788       
789        """
790        self._file_name = file_name
791        self._geospatial = None #
792
793        # self._attribute_dic is a dictionary.
794        #The keys are the column titles.
795        #The values are lists of column data
796       
797        # self._title_index_dic is a dictionary.
798        #The keys are the column titles.
799        #The values are the index positions of file columns.
800        self._attribute_dic, self._title_index_dic = \
801        self._load_exposure_csv(self._file_name,
802                                title_check_list=title_check_list)
803        try:
804            #Have code here that handles caps or lower
805            lats = self._attribute_dic[latitude_title]
806            longs = self._attribute_dic[longitude_title]
807           
808        except KeyError:
809            # maybe a warning..
810            #Let's see if this works..
811            if False != is_x_y_locations:
812                is_x_y_locations = True
813            pass
814        else:
815            self._geospatial = Geospatial_data(latitudes = lats,
816                 longitudes = longs)
817
818        if is_x_y_locations is True:
819            if self._geospatial is not None:
820                pass #fixme throw an error
821            try:
822                xs = self._attribute_dic[x_title]
823                ys = self._attribute_dic[y_title]
824                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
825            except KeyError:
826                # maybe a warning..
827                msg = "Could not find location information."
828                raise TitleValueError, msg
829            else:
830                self._geospatial = Geospatial_data(data_points=points)
831           
832        # create a list of points that are in the refining_polygon
833        # described by a list of indexes representing the points
834
835    def __cmp__(self, other):
836        #print "self._attribute_dic",self._attribute_dic
837        #print "other._attribute_dic",other._attribute_dic
838        #print "self._title_index_dic", self._title_index_dic
839        #print "other._title_index_dic", other._title_index_dic
840       
841        #check that a is an instance of this class
842        if isinstance(self, type(other)):
843            result = cmp(self._attribute_dic, other._attribute_dic)
844            if result <>0:
845                return result
846            # The order of the columns is important. Therefore..
847            result = cmp(self._title_index_dic, other._title_index_dic)
848            if result <>0:
849                return result
850            for self_ls, other_ls in map(None,self._attribute_dic, \
851                    other._attribute_dic):
852                result = cmp(self._attribute_dic[self_ls],
853                             other._attribute_dic[other_ls])
854                if result <>0:
855                    return result
856            return 0
857        else:
858            return 1
859   
860    def _load_exposure_csv(self, file_name, title_check_list=None):
861        """
862        Load in the csv as a dic, title as key and column info as value, .
863        Also, create a dic, title as key and column index as value,
864        to keep track of the column order.
865        """
866        #
867        attribute_dic = {}
868        title_index_dic = {}
869        titles_stripped = [] # list of titles
870        reader = csv.reader(file(file_name))
871
872        # Read in and manipulate the title info
873        titles = reader.next()
874        for i,title in enumerate(titles):
875            titles_stripped.append(title.strip())
876            title_index_dic[title.strip()] = i
877        title_count = len(titles_stripped)       
878        #print "title_index_dic",title_index_dic
879        if title_check_list is not None:
880            for title_check in title_check_list:
881                #msg = "Reading error.  This row is not present ", title_check
882                #assert title_index_dic.has_key(title_check), msg
883                if not title_index_dic.has_key(title_check):
884                    #reader.close()
885                    msg = "Reading error.  This row is not present ", \
886                          title_check                     
887                    raise IOError, msg
888                   
889       
890       
891        #create a dic of colum values, indexed by column title
892        for line in reader:
893            if len(line) <> title_count:
894                raise IOError #FIXME make this nicer
895            for i, value in enumerate(line):
896                attribute_dic.setdefault(titles_stripped[i],[]).append(value)
897           
898        return attribute_dic, title_index_dic
899
900    def get_column(self, column_name, use_refind_polygon=False):
901        """
902        Given a column name return a list of the column values
903
904        Note, the type of the values will be String!
905        do this to change a list of strings to a list of floats
906        time = [float(x) for x in time]
907       
908        Not implemented:
909        if use_refind_polygon is True, only return values in the
910        refined polygon
911        """
912        if not self._attribute_dic.has_key(column_name):
913            msg = 'Therer is  no column called %s!' %column_name
914            raise TitleValueError, msg
915        return self._attribute_dic[column_name]
916
917
918    def get_value(self, value_column_name,
919                  known_column_name,
920                  known_values,
921                  use_refind_polygon=False):
922        """
923        Do linear interpolation on the known_colum, using the known_value,
924        to return a value of the column_value_name.
925        """
926        pass
927
928
929    def get_location(self, use_refind_polygon=False):
930        """
931        Return a geospatial object which describes the
932        locations of the location file.
933
934        Note, if there is not location info, this returns None.
935       
936        Not implemented:
937        if use_refind_polygon is True, only return values in the
938        refined polygon
939        """
940        return self._geospatial
941
942    def set_column(self, column_name, column_values, overwrite=False):
943        """
944        Add a column to the 'end' (with the right most column being the end)
945        of the csv file.
946
947        Set overwrite to True if you want to overwrite a column.
948       
949        Note, in column_name white space is removed and case is not checked.
950        Precondition
951        The column_name and column_values cannot have comma's in it.
952        """
953       
954        value_row_count = \
955            len(self._attribute_dic[self._title_index_dic.keys()[0]])
956        if len(column_values) <> value_row_count: 
957            msg = 'The number of column values must equal the number of rows.'
958            raise DataMissingValuesError, msg
959       
960        if self._attribute_dic.has_key(column_name):
961            if not overwrite:
962                msg = 'Column name %s already in use!' %column_name
963                raise TitleValueError, msg
964        else:
965            # New title.  Add it to the title index.
966            self._title_index_dic[column_name] = len(self._title_index_dic)
967        self._attribute_dic[column_name] = column_values
968        #print "self._title_index_dic[column_name]",self._title_index_dic
969
970    def save(self, file_name=None):
971        """
972        Save the exposure csv file
973        """
974        if file_name is None:
975            file_name = self._file_name
976       
977        fd = open(file_name,'wb')
978        writer = csv.writer(fd)
979       
980        #Write the title to a cvs file
981        line = [None]* len(self._title_index_dic)
982        for title in self._title_index_dic.iterkeys():
983            line[self._title_index_dic[title]]= title
984        writer.writerow(line)
985       
986        # Write the values to a cvs file
987        value_row_count = \
988            len(self._attribute_dic[self._title_index_dic.keys()[0]])
989        for row_i in range(value_row_count):
990            line = [None]* len(self._title_index_dic)
991            for title in self._title_index_dic.iterkeys():
992                line[self._title_index_dic[title]]= \
993                     self._attribute_dic[title][row_i]
994            writer.writerow(line)
995
996
997#Auxiliary
998def write_obj(filename,x,y,z):
999    """Store x,y,z vectors into filename (obj format)
1000       Vectors are assumed to have dimension (M,3) where
1001       M corresponds to the number elements.
1002       triangles are assumed to be disconnected
1003
1004       The three numbers in each vector correspond to three vertices,
1005
1006       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
1007
1008    """
1009    #print 'Writing obj to %s' % filename
1010
1011    import os.path
1012
1013    root, ext = os.path.splitext(filename)
1014    if ext == '.obj':
1015        FN = filename
1016    else:
1017        FN = filename + '.obj'
1018
1019
1020    outfile = open(FN, 'wb')
1021    outfile.write("# Triangulation as an obj file\n")
1022
1023    M, N = x.shape
1024    assert N==3  #Assuming three vertices per element
1025
1026    for i in range(M):
1027        for j in range(N):
1028            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1029
1030    for i in range(M):
1031        base = i*N
1032        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1033
1034    outfile.close()
1035
1036
1037#########################################################
1038#Conversion routines
1039########################################################
1040
1041def sww2obj(basefilename, size):
1042    """Convert netcdf based data output to obj
1043    """
1044    from Scientific.IO.NetCDF import NetCDFFile
1045
1046    from Numeric import Float, zeros
1047
1048    #Get NetCDF
1049    FN = create_filename('.', basefilename, 'sww', size)
1050    print 'Reading from ', FN
1051    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1052
1053
1054    # Get the variables
1055    x = fid.variables['x']
1056    y = fid.variables['y']
1057    z = fid.variables['elevation']
1058    time = fid.variables['time']
1059    stage = fid.variables['stage']
1060
1061    M = size  #Number of lines
1062    xx = zeros((M,3), Float)
1063    yy = zeros((M,3), Float)
1064    zz = zeros((M,3), Float)
1065
1066    for i in range(M):
1067        for j in range(3):
1068            xx[i,j] = x[i+j*M]
1069            yy[i,j] = y[i+j*M]
1070            zz[i,j] = z[i+j*M]
1071
1072    #Write obj for bathymetry
1073    FN = create_filename('.', basefilename, 'obj', size)
1074    write_obj(FN,xx,yy,zz)
1075
1076
1077    #Now read all the data with variable information, combine with
1078    #x,y info and store as obj
1079
1080    for k in range(len(time)):
1081        t = time[k]
1082        print 'Processing timestep %f' %t
1083
1084        for i in range(M):
1085            for j in range(3):
1086                zz[i,j] = stage[k,i+j*M]
1087
1088
1089        #Write obj for variable data
1090        #FN = create_filename(basefilename, 'obj', size, time=t)
1091        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1092        write_obj(FN,xx,yy,zz)
1093
1094
1095def dat2obj(basefilename):
1096    """Convert line based data output to obj
1097    FIXME: Obsolete?
1098    """
1099
1100    import glob, os
1101    from anuga.config import data_dir
1102
1103
1104    #Get bathymetry and x,y's
1105    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1106
1107    from Numeric import zeros, Float
1108
1109    M = len(lines)  #Number of lines
1110    x = zeros((M,3), Float)
1111    y = zeros((M,3), Float)
1112    z = zeros((M,3), Float)
1113
1114    ##i = 0
1115    for i, line in enumerate(lines):
1116        tokens = line.split()
1117        values = map(float,tokens)
1118
1119        for j in range(3):
1120            x[i,j] = values[j*3]
1121            y[i,j] = values[j*3+1]
1122            z[i,j] = values[j*3+2]
1123
1124        ##i += 1
1125
1126
1127    #Write obj for bathymetry
1128    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1129
1130
1131    #Now read all the data files with variable information, combine with
1132    #x,y info
1133    #and store as obj
1134
1135    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1136
1137    for filename in files:
1138        print 'Processing %s' % filename
1139
1140        lines = open(data_dir+os.sep+filename,'r').readlines()
1141        assert len(lines) == M
1142        root, ext = os.path.splitext(filename)
1143
1144        #Get time from filename
1145        i0 = filename.find('_time=')
1146        if i0 == -1:
1147            #Skip bathymetry file
1148            continue
1149
1150        i0 += 6  #Position where time starts
1151        i1 = filename.find('.dat')
1152
1153        if i1 > i0:
1154            t = float(filename[i0:i1])
1155        else:
1156            raise DataTimeError, 'Hmmmm'
1157
1158
1159
1160        ##i = 0
1161        for i, line in enumerate(lines):
1162            tokens = line.split()
1163            values = map(float,tokens)
1164
1165            for j in range(3):
1166                z[i,j] = values[j]
1167
1168            ##i += 1
1169
1170        #Write obj for variable data
1171        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1172
1173
1174def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1175    """Read netcdf filename1, pick timesteps first:step:last and save to
1176    nettcdf file filename2
1177    """
1178    from Scientific.IO.NetCDF import NetCDFFile
1179
1180    #Get NetCDF
1181    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1182    outfile = NetCDFFile(filename2, 'w')  #Open new file
1183
1184
1185    #Copy dimensions
1186    for d in infile.dimensions:
1187        outfile.createDimension(d, infile.dimensions[d])
1188
1189    for name in infile.variables:
1190        var = infile.variables[name]
1191        outfile.createVariable(name, var.typecode(), var.dimensions)
1192
1193
1194    #Copy the static variables
1195    for name in infile.variables:
1196        if name == 'time' or name == 'stage':
1197            pass
1198        else:
1199            #Copy
1200            outfile.variables[name][:] = infile.variables[name][:]
1201
1202    #Copy selected timesteps
1203    time = infile.variables['time']
1204    stage = infile.variables['stage']
1205
1206    newtime = outfile.variables['time']
1207    newstage = outfile.variables['stage']
1208
1209    if last is None:
1210        last = len(time)
1211
1212    selection = range(first, last, step)
1213    for i, j in enumerate(selection):
1214        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1215        newtime[i] = time[j]
1216        newstage[i,:] = stage[j,:]
1217
1218    #Close
1219    infile.close()
1220    outfile.close()
1221
1222
1223#Get data objects
1224def get_dataobject(domain, mode='w'):
1225    """Return instance of class of given format using filename
1226    """
1227
1228    cls = eval('Data_format_%s' %domain.format)
1229    return cls(domain, mode)
1230
1231#FIXME move into geospatial.  There should only be one method that
1232# reads xya, and writes pts.
1233def xya2pts(basename_in, basename_out=None, verbose=False,
1234            #easting_min=None, easting_max=None,
1235            #northing_min=None, northing_max=None,
1236            stride = 1,
1237            attribute_name = 'elevation',
1238            z_func = None):
1239    """Read points data from ascii (.xya)
1240
1241    Example:
1242
1243              x(m)        y(m)        z(m)
1244         0.00000e+00  0.00000e+00  1.3535000e-01
1245         0.00000e+00  1.40000e-02  1.3535000e-01
1246
1247
1248
1249    Convert to NetCDF pts format which is
1250
1251    points:  (Nx2) Float array
1252    elevation: N Float array
1253
1254    Only lines that contain three numeric values are processed
1255
1256    If z_func is specified, it will be applied to the third field
1257    """
1258
1259    import os
1260    #from Scientific.IO.NetCDF import NetCDFFile
1261    from Numeric import Float, arrayrange, concatenate
1262
1263    root, ext = os.path.splitext(basename_in)
1264
1265    if ext == '': ext = '.xya'
1266
1267    #Get NetCDF
1268    infile = open(root + ext, 'r')  #Open existing xya file for read
1269
1270    if verbose: print 'Reading xya points from %s' %(root + ext)
1271
1272    points = []
1273    attribute = []
1274    for i, line in enumerate(infile.readlines()):
1275
1276        if i % stride != 0: continue
1277
1278        fields = line.split()
1279
1280        try:
1281            assert len(fields) == 3
1282        except:
1283            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1284
1285        try:
1286            x = float( fields[0] )
1287            y = float( fields[1] )
1288            z = float( fields[2] )
1289        except:
1290            continue
1291
1292        points.append( [x, y] )
1293
1294        if callable(z_func):
1295            attribute.append(z_func(z))
1296        else:
1297            attribute.append(z)
1298
1299
1300    #Get output file
1301    if basename_out == None:
1302        ptsname = root + '.pts'
1303    else:
1304        ptsname = basename_out + '.pts'
1305
1306    if verbose: print 'Store to NetCDF file %s' %ptsname
1307    write_ptsfile(ptsname, points, attribute, attribute_name)
1308
1309
1310
1311######Obsoleted by export_points in load_mesh
1312def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1313                  zone=None, xllcorner=None, yllcorner=None):
1314    """Write points and associated attribute to pts (NetCDF) format
1315    """
1316
1317    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1318
1319    from Numeric import Float
1320
1321    if attribute_name is None:
1322        attribute_name = 'attribute'
1323
1324
1325    from Scientific.IO.NetCDF import NetCDFFile
1326
1327    # NetCDF file definition
1328    outfile = NetCDFFile(ptsname, 'w')
1329
1330
1331    #Create new file
1332    outfile.institution = 'Geoscience Australia'
1333    outfile.description = 'NetCDF pts format for compact and '\
1334                          'portable storage of spatial point data'
1335
1336
1337    #Georeferencing
1338    from anuga.coordinate_transforms.geo_reference import Geo_reference
1339    if zone is None:
1340        assert xllcorner is None, 'xllcorner must be None'
1341        assert yllcorner is None, 'yllcorner must be None'
1342        Geo_reference().write_NetCDF(outfile)
1343    else:
1344        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1345
1346
1347
1348    outfile.createDimension('number_of_points', len(points))
1349    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1350
1351    # variable definitions
1352    outfile.createVariable('points', Float, ('number_of_points',
1353                                             'number_of_dimensions'))
1354    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1355
1356    # Get handles to the variables
1357    nc_points = outfile.variables['points']
1358    nc_attribute = outfile.variables[attribute_name]
1359
1360    #Store data
1361    nc_points[:, :] = points
1362    nc_attribute[:] = attribute
1363
1364    outfile.close()
1365
1366
1367def dem2pts(basename_in, basename_out=None,
1368            easting_min=None, easting_max=None,
1369            northing_min=None, northing_max=None,
1370            use_cache=False, verbose=False,):
1371    """Read Digitial Elevation model from the following NetCDF format (.dem)
1372
1373    Example:
1374
1375    ncols         3121
1376    nrows         1800
1377    xllcorner     722000
1378    yllcorner     5893000
1379    cellsize      25
1380    NODATA_value  -9999
1381    138.3698 137.4194 136.5062 135.5558 ..........
1382
1383    Convert to NetCDF pts format which is
1384
1385    points:  (Nx2) Float array
1386    elevation: N Float array
1387    """
1388
1389
1390
1391    kwargs = {'basename_out': basename_out,
1392              'easting_min': easting_min,
1393              'easting_max': easting_max,
1394              'northing_min': northing_min,
1395              'northing_max': northing_max,
1396              'verbose': verbose}
1397
1398    if use_cache is True:
1399        from caching import cache
1400        result = cache(_dem2pts, basename_in, kwargs,
1401                       dependencies = [basename_in + '.dem'],
1402                       verbose = verbose)
1403
1404    else:
1405        result = apply(_dem2pts, [basename_in], kwargs)
1406
1407    return result
1408
1409
1410def _dem2pts(basename_in, basename_out=None, verbose=False,
1411            easting_min=None, easting_max=None,
1412            northing_min=None, northing_max=None):
1413    """Read Digitial Elevation model from the following NetCDF format (.dem)
1414
1415    Internal function. See public function dem2pts for details.
1416    """
1417
1418    #FIXME: Can this be written feasibly using write_pts?
1419
1420    import os
1421    from Scientific.IO.NetCDF import NetCDFFile
1422    from Numeric import Float, zeros, reshape, sum
1423
1424    root = basename_in
1425
1426    #Get NetCDF
1427    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1428
1429    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1430
1431    ncols = infile.ncols[0]
1432    nrows = infile.nrows[0]
1433    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1434    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1435    cellsize = infile.cellsize[0]
1436    NODATA_value = infile.NODATA_value[0]
1437    dem_elevation = infile.variables['elevation']
1438
1439    zone = infile.zone[0]
1440    false_easting = infile.false_easting[0]
1441    false_northing = infile.false_northing[0]
1442
1443    #Text strings
1444    projection = infile.projection
1445    datum = infile.datum
1446    units = infile.units
1447
1448
1449    #Get output file
1450    if basename_out == None:
1451        ptsname = root + '.pts'
1452    else:
1453        ptsname = basename_out + '.pts'
1454
1455    if verbose: print 'Store to NetCDF file %s' %ptsname
1456    # NetCDF file definition
1457    outfile = NetCDFFile(ptsname, 'w')
1458
1459    #Create new file
1460    outfile.institution = 'Geoscience Australia'
1461    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1462                      'of spatial point data'
1463    #assign default values
1464    if easting_min is None: easting_min = xllcorner
1465    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1466    if northing_min is None: northing_min = yllcorner
1467    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1468
1469    #compute offsets to update georeferencing
1470    easting_offset = xllcorner - easting_min
1471    northing_offset = yllcorner - northing_min
1472
1473    #Georeferencing
1474    outfile.zone = zone
1475    outfile.xllcorner = easting_min #Easting of lower left corner
1476    outfile.yllcorner = northing_min #Northing of lower left corner
1477    outfile.false_easting = false_easting
1478    outfile.false_northing = false_northing
1479
1480    outfile.projection = projection
1481    outfile.datum = datum
1482    outfile.units = units
1483
1484
1485    #Grid info (FIXME: probably not going to be used, but heck)
1486    outfile.ncols = ncols
1487    outfile.nrows = nrows
1488
1489    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1490    totalnopoints = nrows*ncols
1491
1492    # calculating number of NODATA_values for each row in clipped region
1493    #FIXME: use array operations to do faster
1494    nn = 0
1495    k = 0
1496    i1_0 = 0
1497    j1_0 = 0
1498    thisj = 0
1499    thisi = 0
1500    for i in range(nrows):
1501        y = (nrows-i-1)*cellsize + yllcorner
1502        for j in range(ncols):
1503            x = j*cellsize + xllcorner
1504            if easting_min <= x <= easting_max and \
1505               northing_min <= y <= northing_max:
1506                thisj = j
1507                thisi = i
1508                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1509
1510                if k == 0:
1511                    i1_0 = i
1512                    j1_0 = j
1513                k += 1
1514
1515    index1 = j1_0
1516    index2 = thisj
1517
1518    # dimension definitions
1519    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1520    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1521
1522    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1523    nopoints = clippednopoints-nn
1524
1525    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1526
1527    if verbose:
1528        print 'There are %d values in the elevation' %totalnopoints
1529        print 'There are %d values in the clipped elevation' %clippednopoints
1530        print 'There are %d NODATA_values in the clipped elevation' %nn
1531
1532    outfile.createDimension('number_of_points', nopoints)
1533    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1534
1535    # variable definitions
1536    outfile.createVariable('points', Float, ('number_of_points',
1537                                             'number_of_dimensions'))
1538    outfile.createVariable('elevation', Float, ('number_of_points',))
1539
1540    # Get handles to the variables
1541    points = outfile.variables['points']
1542    elevation = outfile.variables['elevation']
1543
1544    lenv = index2-index1+1
1545    #Store data
1546    global_index = 0
1547    #for i in range(nrows):
1548    for i in range(i1_0,thisi+1,1):
1549        if verbose and i%((nrows+10)/10)==0:
1550            print 'Processing row %d of %d' %(i, nrows)
1551
1552        lower_index = global_index
1553
1554        v = dem_elevation_r[i,index1:index2+1]
1555        no_NODATA = sum(v == NODATA_value)
1556        if no_NODATA > 0:
1557            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1558        else:
1559            newcols = lenv #ncols_in_bounding_box
1560
1561        telev = zeros(newcols, Float)
1562        tpoints = zeros((newcols, 2), Float)
1563
1564        local_index = 0
1565
1566        y = (nrows-i-1)*cellsize + yllcorner
1567        #for j in range(ncols):
1568        for j in range(j1_0,index2+1,1):
1569
1570            x = j*cellsize + xllcorner
1571            if easting_min <= x <= easting_max and \
1572               northing_min <= y <= northing_max and \
1573               dem_elevation_r[i,j] <> NODATA_value:
1574                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1575                telev[local_index] = dem_elevation_r[i, j]
1576                global_index += 1
1577                local_index += 1
1578
1579        upper_index = global_index
1580
1581        if upper_index == lower_index + newcols:
1582            points[lower_index:upper_index, :] = tpoints
1583            elevation[lower_index:upper_index] = telev
1584
1585    assert global_index == nopoints, 'index not equal to number of points'
1586
1587    infile.close()
1588    outfile.close()
1589
1590
1591
1592def _read_hecras_cross_sections(lines):
1593    """Return block of surface lines for each cross section
1594    Starts with SURFACE LINE,
1595    Ends with END CROSS-SECTION
1596    """
1597
1598    points = []
1599
1600    reading_surface = False
1601    for i, line in enumerate(lines):
1602
1603        if len(line.strip()) == 0:    #Ignore blanks
1604            continue
1605
1606        if lines[i].strip().startswith('SURFACE LINE'):
1607            reading_surface = True
1608            continue
1609
1610        if lines[i].strip().startswith('END') and reading_surface:
1611            yield points
1612            reading_surface = False
1613            points = []
1614
1615        if reading_surface:
1616            fields = line.strip().split(',')
1617            easting = float(fields[0])
1618            northing = float(fields[1])
1619            elevation = float(fields[2])
1620            points.append([easting, northing, elevation])
1621
1622
1623
1624
1625def hecras_cross_sections2pts(basename_in,
1626                              basename_out=None,
1627                              verbose=False):
1628    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1629
1630    Example:
1631
1632
1633# RAS export file created on Mon 15Aug2005 11:42
1634# by HEC-RAS Version 3.1.1
1635
1636BEGIN HEADER:
1637  UNITS: METRIC
1638  DTM TYPE: TIN
1639  DTM: v:\1\cit\perth_topo\river_tin
1640  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1641  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1642  MAP PROJECTION: UTM
1643  PROJECTION ZONE: 50
1644  DATUM: AGD66
1645  VERTICAL DATUM:
1646  NUMBER OF REACHES:  19
1647  NUMBER OF CROSS-SECTIONS:  14206
1648END HEADER:
1649
1650
1651Only the SURFACE LINE data of the following form will be utilised
1652
1653  CROSS-SECTION:
1654    STREAM ID:Southern-Wungong
1655    REACH ID:Southern-Wungong
1656    STATION:19040.*
1657    CUT LINE:
1658      405548.671603161 , 6438142.7594925
1659      405734.536092045 , 6438326.10404912
1660      405745.130459356 , 6438331.48627354
1661      405813.89633823 , 6438368.6272789
1662    SURFACE LINE:
1663     405548.67,   6438142.76,   35.37
1664     405552.24,   6438146.28,   35.41
1665     405554.78,   6438148.78,   35.44
1666     405555.80,   6438149.79,   35.44
1667     405559.37,   6438153.31,   35.45
1668     405560.88,   6438154.81,   35.44
1669     405562.93,   6438156.83,   35.42
1670     405566.50,   6438160.35,   35.38
1671     405566.99,   6438160.83,   35.37
1672     ...
1673   END CROSS-SECTION
1674
1675    Convert to NetCDF pts format which is
1676
1677    points:  (Nx2) Float array
1678    elevation: N Float array
1679    """
1680
1681    #FIXME: Can this be written feasibly using write_pts?
1682
1683    import os
1684    from Scientific.IO.NetCDF import NetCDFFile
1685    from Numeric import Float, zeros, reshape
1686
1687    root = basename_in
1688
1689    #Get ASCII file
1690    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1691
1692    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1693
1694    lines = infile.readlines()
1695    infile.close()
1696
1697    if verbose: print 'Converting to pts format'
1698
1699    i = 0
1700    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1701        i += 1
1702
1703    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1704    i += 1
1705
1706    assert lines[i].strip().upper().startswith('UNITS:')
1707    units = lines[i].strip().split()[1]
1708    i += 1
1709
1710    assert lines[i].strip().upper().startswith('DTM TYPE:')
1711    i += 1
1712
1713    assert lines[i].strip().upper().startswith('DTM:')
1714    i += 1
1715
1716    assert lines[i].strip().upper().startswith('STREAM')
1717    i += 1
1718
1719    assert lines[i].strip().upper().startswith('CROSS')
1720    i += 1
1721
1722    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1723    projection = lines[i].strip().split(':')[1]
1724    i += 1
1725
1726    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1727    zone = int(lines[i].strip().split(':')[1])
1728    i += 1
1729
1730    assert lines[i].strip().upper().startswith('DATUM:')
1731    datum = lines[i].strip().split(':')[1]
1732    i += 1
1733
1734    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1735    i += 1
1736
1737    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1738    i += 1
1739
1740    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1741    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1742    i += 1
1743
1744
1745    #Now read all points
1746    points = []
1747    elevation = []
1748    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1749        for k, entry in enumerate(entries):
1750            points.append(entry[:2])
1751            elevation.append(entry[2])
1752
1753
1754    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1755          %(j+1, number_of_cross_sections)
1756    assert j+1 == number_of_cross_sections, msg
1757
1758    #Get output file
1759    if basename_out == None:
1760        ptsname = root + '.pts'
1761    else:
1762        ptsname = basename_out + '.pts'
1763
1764    #FIXME (DSG-ON): use loadASCII export_points_file
1765    if verbose: print 'Store to NetCDF file %s' %ptsname
1766    # NetCDF file definition
1767    outfile = NetCDFFile(ptsname, 'w')
1768
1769    #Create new file
1770    outfile.institution = 'Geoscience Australia'
1771    outfile.description = 'NetCDF pts format for compact and portable ' +\
1772                          'storage of spatial point data derived from HEC-RAS'
1773
1774    #Georeferencing
1775    outfile.zone = zone
1776    outfile.xllcorner = 0.0
1777    outfile.yllcorner = 0.0
1778    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1779    outfile.false_northing = 1000000
1780
1781    outfile.projection = projection
1782    outfile.datum = datum
1783    outfile.units = units
1784
1785
1786    outfile.createDimension('number_of_points', len(points))
1787    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1788
1789    # variable definitions
1790    outfile.createVariable('points', Float, ('number_of_points',
1791                                             'number_of_dimensions'))
1792    outfile.createVariable('elevation', Float, ('number_of_points',))
1793
1794    # Get handles to the variables
1795    outfile.variables['points'][:] = points
1796    outfile.variables['elevation'][:] = elevation
1797
1798    outfile.close()
1799
1800
1801
1802def sww2dem(basename_in, basename_out = None,
1803            quantity = None,
1804            timestep = None,
1805            reduction = None,
1806            cellsize = 10,
1807            NODATA_value = -9999,
1808            easting_min = None,
1809            easting_max = None,
1810            northing_min = None,
1811            northing_max = None,
1812            verbose = False,
1813            origin = None,
1814            datum = 'WGS84',
1815        format = 'ers'):
1816
1817    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1818
1819    Example (ASC):
1820
1821    ncols         3121
1822    nrows         1800
1823    xllcorner     722000
1824    yllcorner     5893000
1825    cellsize      25
1826    NODATA_value  -9999
1827    138.3698 137.4194 136.5062 135.5558 ..........
1828
1829    Also write accompanying file with same basename_in but extension .prj
1830    used to fix the UTM zone, datum, false northings and eastings.
1831
1832    The prj format is assumed to be as
1833
1834    Projection    UTM
1835    Zone          56
1836    Datum         WGS84
1837    Zunits        NO
1838    Units         METERS
1839    Spheroid      WGS84
1840    Xshift        0.0000000000
1841    Yshift        10000000.0000000000
1842    Parameters
1843
1844
1845    The parameter quantity must be the name of an existing quantity or
1846    an expression involving existing quantities. The default is
1847    'elevation'.
1848
1849    if timestep (an index) is given, output quantity at that timestep
1850
1851    if reduction is given use that to reduce quantity over all timesteps.
1852
1853    datum
1854
1855    format can be either 'asc' or 'ers'
1856    """
1857
1858    import sys
1859    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1860    from Numeric import array2string
1861
1862    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1863    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
1864
1865    msg = 'Format must be either asc or ers'
1866    assert format.lower() in ['asc', 'ers'], msg
1867
1868
1869    false_easting = 500000
1870    false_northing = 10000000
1871
1872    if quantity is None:
1873        quantity = 'elevation'
1874
1875    if reduction is None:
1876        reduction = max
1877
1878    if basename_out is None:
1879        basename_out = basename_in + '_%s' %quantity
1880
1881    swwfile = basename_in + '.sww'
1882    demfile = basename_out + '.' + format
1883    # Note the use of a .ers extension is optional (write_ermapper_grid will
1884    # deal with either option
1885
1886    #Read sww file
1887    if verbose: print 'Reading from %s' %swwfile
1888    from Scientific.IO.NetCDF import NetCDFFile
1889    fid = NetCDFFile(swwfile)
1890
1891    #Get extent and reference
1892    x = fid.variables['x'][:]
1893    y = fid.variables['y'][:]
1894    volumes = fid.variables['volumes'][:]
1895    if timestep is not None:
1896        times = fid.variables['time'][timestep]
1897    else:
1898        times = fid.variables['time'][:]
1899
1900    number_of_timesteps = fid.dimensions['number_of_timesteps']
1901    number_of_points = fid.dimensions['number_of_points']
1902   
1903    if origin is None:
1904
1905        #Get geo_reference
1906        #sww files don't have to have a geo_ref
1907        try:
1908            geo_reference = Geo_reference(NetCDFObject=fid)
1909        except AttributeError, e:
1910            geo_reference = Geo_reference() #Default georef object
1911
1912        xllcorner = geo_reference.get_xllcorner()
1913        yllcorner = geo_reference.get_yllcorner()
1914        zone = geo_reference.get_zone()
1915    else:
1916        zone = origin[0]
1917        xllcorner = origin[1]
1918        yllcorner = origin[2]
1919
1920
1921
1922    #FIXME: Refactor using code from file_function.statistics
1923    #Something like print swwstats(swwname)
1924    if verbose:
1925        print '------------------------------------------------'
1926        print 'Statistics of SWW file:'
1927        print '  Name: %s' %swwfile
1928        print '  Reference:'
1929        print '    Lower left corner: [%f, %f]'\
1930              %(xllcorner, yllcorner)
1931        if timestep is not None:
1932            print '    Time: %f' %(times)
1933        else:
1934            print '    Start time: %f' %fid.starttime[0]
1935        print '  Extent:'
1936        print '    x [m] in [%f, %f], len(x) == %d'\
1937              %(min(x.flat), max(x.flat), len(x.flat))
1938        print '    y [m] in [%f, %f], len(y) == %d'\
1939              %(min(y.flat), max(y.flat), len(y.flat))
1940        if timestep is not None:
1941            print '    t [s] = %f, len(t) == %d' %(times, 1)
1942        else:
1943            print '    t [s] in [%f, %f], len(t) == %d'\
1944              %(min(times), max(times), len(times))
1945        print '  Quantities [SI units]:'
1946        for name in ['stage', 'xmomentum', 'ymomentum']:
1947            q = fid.variables[name][:].flat
1948            if timestep is not None:
1949                q = q[timestep*len(x):(timestep+1)*len(x)]
1950            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1951        for name in ['elevation']:
1952            q = fid.variables[name][:].flat
1953            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1954           
1955    # Get quantity and reduce if applicable
1956    if verbose: print 'Processing quantity %s' %quantity
1957
1958    # Turn NetCDF objects into Numeric arrays
1959    quantity_dict = {}
1960    for name in fid.variables.keys():
1961        quantity_dict[name] = fid.variables[name][:] 
1962
1963
1964    # Convert quantity expression to quantities found in sww file   
1965    q = apply_expression_to_dictionary(quantity, quantity_dict)
1966
1967    if len(q.shape) == 2:
1968        #q has a time component and needs to be reduced along
1969        #the temporal dimension
1970        if verbose: print 'Reducing quantity %s' %quantity
1971        q_reduced = zeros( number_of_points, Float )
1972       
1973        if timestep is not None:
1974            for k in range(number_of_points):
1975                q_reduced[k] = q[timestep,k] 
1976        else:
1977            for k in range(number_of_points):
1978                q_reduced[k] = reduction( q[:,k] )
1979
1980        q = q_reduced
1981
1982    #Post condition: Now q has dimension: number_of_points
1983    assert len(q.shape) == 1
1984    assert q.shape[0] == number_of_points
1985
1986
1987    if verbose:
1988        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1989
1990
1991    #Create grid and update xll/yll corner and x,y
1992
1993    #Relative extent
1994    if easting_min is None:
1995        xmin = min(x)
1996    else:
1997        xmin = easting_min - xllcorner
1998
1999    if easting_max is None:
2000        xmax = max(x)
2001    else:
2002        xmax = easting_max - xllcorner
2003
2004    if northing_min is None:
2005        ymin = min(y)
2006    else:
2007        ymin = northing_min - yllcorner
2008
2009    if northing_max is None:
2010        ymax = max(y)
2011    else:
2012        ymax = northing_max - yllcorner
2013
2014
2015
2016    if verbose: print 'Creating grid'
2017    ncols = int((xmax-xmin)/cellsize)+1
2018    nrows = int((ymax-ymin)/cellsize)+1
2019
2020
2021    #New absolute reference and coordinates
2022    newxllcorner = xmin+xllcorner
2023    newyllcorner = ymin+yllcorner
2024
2025    x = x+xllcorner-newxllcorner
2026    y = y+yllcorner-newyllcorner
2027   
2028    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2029    assert len(vertex_points.shape) == 2
2030
2031    grid_points = zeros ( (ncols*nrows, 2), Float )
2032
2033
2034    for i in xrange(nrows):
2035        if format.lower() == 'asc':
2036            yg = i*cellsize
2037        else:
2038        #this will flip the order of the y values for ers
2039            yg = (nrows-i)*cellsize
2040
2041        for j in xrange(ncols):
2042            xg = j*cellsize
2043            k = i*ncols + j
2044
2045            grid_points[k,0] = xg
2046            grid_points[k,1] = yg
2047
2048    #Interpolate
2049    #from least_squares import Interpolation
2050    from anuga.fit_interpolate.interpolate import Interpolate
2051
2052    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2053
2054    #Interpolate using quantity values
2055    if verbose: print 'Interpolating'
2056    grid_values = interp.interpolate(q, grid_points).flat
2057
2058
2059    if verbose:
2060        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2061                                                      max(grid_values))
2062
2063    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2064    P = interp.mesh.get_boundary_polygon()
2065    outside_indices = outside_polygon(grid_points, P, closed=True)
2066
2067    for i in outside_indices:
2068        grid_values[i] = NODATA_value
2069
2070
2071
2072
2073    if format.lower() == 'ers':
2074        # setup ERS header information
2075        grid_values = reshape(grid_values,(nrows, ncols))
2076        header = {}
2077        header['datum'] = '"' + datum + '"'
2078        # FIXME The use of hardwired UTM and zone number needs to be made optional
2079        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2080        header['projection'] = '"UTM-' + str(zone) + '"'
2081        header['coordinatetype'] = 'EN'
2082        if header['coordinatetype'] == 'LL':
2083            header['longitude'] = str(newxllcorner)
2084            header['latitude'] = str(newyllcorner)
2085        elif header['coordinatetype'] == 'EN':
2086            header['eastings'] = str(newxllcorner)
2087            header['northings'] = str(newyllcorner)
2088        header['nullcellvalue'] = str(NODATA_value)
2089        header['xdimension'] = str(cellsize)
2090        header['ydimension'] = str(cellsize)
2091        header['value'] = '"' + quantity + '"'
2092        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2093
2094
2095        #Write
2096        if verbose: print 'Writing %s' %demfile
2097        import ermapper_grids
2098        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2099
2100        fid.close()
2101    else:
2102        #Write to Ascii format
2103
2104        #Write prj file
2105        prjfile = basename_out + '.prj'
2106
2107        if verbose: print 'Writing %s' %prjfile
2108        prjid = open(prjfile, 'w')
2109        prjid.write('Projection    %s\n' %'UTM')
2110        prjid.write('Zone          %d\n' %zone)
2111        prjid.write('Datum         %s\n' %datum)
2112        prjid.write('Zunits        NO\n')
2113        prjid.write('Units         METERS\n')
2114        prjid.write('Spheroid      %s\n' %datum)
2115        prjid.write('Xshift        %d\n' %false_easting)
2116        prjid.write('Yshift        %d\n' %false_northing)
2117        prjid.write('Parameters\n')
2118        prjid.close()
2119
2120
2121
2122        if verbose: print 'Writing %s' %demfile
2123
2124        ascid = open(demfile, 'w')
2125
2126        ascid.write('ncols         %d\n' %ncols)
2127        ascid.write('nrows         %d\n' %nrows)
2128        ascid.write('xllcorner     %d\n' %newxllcorner)
2129        ascid.write('yllcorner     %d\n' %newyllcorner)
2130        ascid.write('cellsize      %f\n' %cellsize)
2131        ascid.write('NODATA_value  %d\n' %NODATA_value)
2132
2133
2134        #Get bounding polygon from mesh
2135        #P = interp.mesh.get_boundary_polygon()
2136        #inside_indices = inside_polygon(grid_points, P)
2137
2138        for i in range(nrows):
2139            if verbose and i%((nrows+10)/10)==0:
2140                print 'Doing row %d of %d' %(i, nrows)
2141
2142            base_index = (nrows-i-1)*ncols
2143
2144            slice = grid_values[base_index:base_index+ncols]
2145            s = array2string(slice, max_line_width=sys.maxint)
2146            ascid.write(s[1:-1] + '\n')
2147
2148
2149            #print
2150            #for j in range(ncols):
2151            #    index = base_index+j#
2152            #    print grid_values[index],
2153            #    ascid.write('%f ' %grid_values[index])
2154            #ascid.write('\n')
2155
2156        #Close
2157        ascid.close()
2158        fid.close()
2159
2160#Backwards compatibility
2161def sww2asc(basename_in, basename_out = None,
2162            quantity = None,
2163            timestep = None,
2164            reduction = None,
2165            cellsize = 10,
2166            verbose = False,
2167            origin = None):
2168    print 'sww2asc will soon be obsoleted - please use sww2dem'
2169    sww2dem(basename_in,
2170            basename_out = basename_out,
2171            quantity = quantity,
2172            timestep = timestep,
2173            reduction = reduction,
2174            cellsize = cellsize,
2175            verbose = verbose,
2176            origin = origin,
2177        datum = 'WGS84',
2178        format = 'asc')
2179
2180def sww2ers(basename_in, basename_out = None,
2181            quantity = None,
2182            timestep = None,
2183            reduction = None,
2184            cellsize = 10,
2185            verbose = False,
2186            origin = None,
2187            datum = 'WGS84'):
2188    print 'sww2ers will soon be obsoleted - please use sww2dem'
2189    sww2dem(basename_in,
2190            basename_out = basename_out,
2191            quantity = quantity,
2192            timestep = timestep,
2193            reduction = reduction,
2194            cellsize = cellsize,
2195            verbose = verbose,
2196            origin = origin,
2197            datum = datum,
2198            format = 'ers')
2199################################# END COMPATIBILITY ##############
2200
2201
2202
2203def sww2pts(basename_in, basename_out=None,
2204            data_points=None,
2205            quantity=None,
2206            timestep=None,
2207            reduction=None,
2208            NODATA_value=-9999,
2209            verbose=False,
2210            origin=None):
2211            #datum = 'WGS84')
2212
2213
2214    """Read SWW file and convert to interpolated values at selected points
2215
2216    The parameter quantity' must be the name of an existing quantity or
2217    an expression involving existing quantities. The default is
2218    'elevation'.
2219
2220    if timestep (an index) is given, output quantity at that timestep
2221
2222    if reduction is given use that to reduce quantity over all timesteps.
2223
2224    data_points (Nx2 array) give locations of points where quantity is to be computed.
2225   
2226    """
2227
2228    import sys
2229    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2230    from Numeric import array2string
2231
2232    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2233    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
2234
2235    from anuga.geospatial_data.geospatial_data import Geospatial_data
2236
2237    if quantity is None:
2238        quantity = 'elevation'
2239
2240    if reduction is None:
2241        reduction = max
2242
2243    if basename_out is None:
2244        basename_out = basename_in + '_%s' %quantity
2245
2246    swwfile = basename_in + '.sww'
2247    ptsfile = basename_out + '.pts'
2248
2249    # Read sww file
2250    if verbose: print 'Reading from %s' %swwfile
2251    from Scientific.IO.NetCDF import NetCDFFile
2252    fid = NetCDFFile(swwfile)
2253
2254    # Get extent and reference
2255    x = fid.variables['x'][:]
2256    y = fid.variables['y'][:]
2257    volumes = fid.variables['volumes'][:]
2258
2259    number_of_timesteps = fid.dimensions['number_of_timesteps']
2260    number_of_points = fid.dimensions['number_of_points']
2261    if origin is None:
2262
2263        # Get geo_reference
2264        # sww files don't have to have a geo_ref
2265        try:
2266            geo_reference = Geo_reference(NetCDFObject=fid)
2267        except AttributeError, e:
2268            geo_reference = Geo_reference() #Default georef object
2269
2270        xllcorner = geo_reference.get_xllcorner()
2271        yllcorner = geo_reference.get_yllcorner()
2272        zone = geo_reference.get_zone()
2273    else:
2274        zone = origin[0]
2275        xllcorner = origin[1]
2276        yllcorner = origin[2]
2277
2278
2279
2280    # FIXME: Refactor using code from file_function.statistics
2281    # Something like print swwstats(swwname)
2282    if verbose:
2283        x = fid.variables['x'][:]
2284        y = fid.variables['y'][:]
2285        times = fid.variables['time'][:]
2286        print '------------------------------------------------'
2287        print 'Statistics of SWW file:'
2288        print '  Name: %s' %swwfile
2289        print '  Reference:'
2290        print '    Lower left corner: [%f, %f]'\
2291              %(xllcorner, yllcorner)
2292        print '    Start time: %f' %fid.starttime[0]
2293        print '  Extent:'
2294        print '    x [m] in [%f, %f], len(x) == %d'\
2295              %(min(x.flat), max(x.flat), len(x.flat))
2296        print '    y [m] in [%f, %f], len(y) == %d'\
2297              %(min(y.flat), max(y.flat), len(y.flat))
2298        print '    t [s] in [%f, %f], len(t) == %d'\
2299              %(min(times), max(times), len(times))
2300        print '  Quantities [SI units]:'
2301        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2302            q = fid.variables[name][:].flat
2303            print '    %s in [%f, %f]' %(name, min(q), max(q))
2304
2305
2306
2307    # Get quantity and reduce if applicable
2308    if verbose: print 'Processing quantity %s' %quantity
2309
2310    # Turn NetCDF objects into Numeric arrays
2311    quantity_dict = {}
2312    for name in fid.variables.keys():
2313        quantity_dict[name] = fid.variables[name][:]
2314
2315
2316
2317    # Convert quantity expression to quantities found in sww file   
2318    q = apply_expression_to_dictionary(quantity, quantity_dict)
2319
2320
2321
2322    if len(q.shape) == 2:
2323        # q has a time component and needs to be reduced along
2324        # the temporal dimension
2325        if verbose: print 'Reducing quantity %s' %quantity
2326        q_reduced = zeros( number_of_points, Float )
2327
2328        for k in range(number_of_points):
2329            q_reduced[k] = reduction( q[:,k] )
2330
2331        q = q_reduced
2332
2333    # Post condition: Now q has dimension: number_of_points
2334    assert len(q.shape) == 1
2335    assert q.shape[0] == number_of_points
2336
2337
2338    if verbose:
2339        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2340
2341
2342    # Create grid and update xll/yll corner and x,y
2343    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2344    assert len(vertex_points.shape) == 2
2345
2346    # Interpolate
2347    from anuga.fit_interpolate.interpolate import Interpolate
2348    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2349
2350    # Interpolate using quantity values
2351    if verbose: print 'Interpolating'
2352    interpolated_values = interp.interpolate(q, data_points).flat
2353
2354
2355    if verbose:
2356        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2357                                                      max(interpolated_values))
2358
2359    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2360    P = interp.mesh.get_boundary_polygon()
2361    outside_indices = outside_polygon(data_points, P, closed=True)
2362
2363    for i in outside_indices:
2364        interpolated_values[i] = NODATA_value
2365
2366
2367    # Store results   
2368    G = Geospatial_data(data_points=data_points,
2369                        attributes=interpolated_values)
2370
2371    G.export_points_file(ptsfile, absolute = True)
2372
2373    fid.close()
2374
2375
2376def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2377                                  use_cache = False,
2378                                  verbose = False):
2379    """Read Digitial Elevation model from the following ASCII format (.asc)
2380
2381    Example:
2382
2383    ncols         3121
2384    nrows         1800
2385    xllcorner     722000
2386    yllcorner     5893000
2387    cellsize      25
2388    NODATA_value  -9999
2389    138.3698 137.4194 136.5062 135.5558 ..........
2390
2391    Convert basename_in + '.asc' to NetCDF format (.dem)
2392    mimicking the ASCII format closely.
2393
2394
2395    An accompanying file with same basename_in but extension .prj must exist
2396    and is used to fix the UTM zone, datum, false northings and eastings.
2397
2398    The prj format is assumed to be as
2399
2400    Projection    UTM
2401    Zone          56
2402    Datum         WGS84
2403    Zunits        NO
2404    Units         METERS
2405    Spheroid      WGS84
2406    Xshift        0.0000000000
2407    Yshift        10000000.0000000000
2408    Parameters
2409    """
2410
2411
2412
2413    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2414
2415    if use_cache is True:
2416        from caching import cache
2417        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2418                       dependencies = [basename_in + '.asc'],
2419                       verbose = verbose)
2420
2421    else:
2422        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2423
2424    return result
2425
2426
2427
2428
2429
2430
2431def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2432                                  verbose = False):
2433    """Read Digitial Elevation model from the following ASCII format (.asc)
2434
2435    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2436    """
2437
2438    import os
2439    from Scientific.IO.NetCDF import NetCDFFile
2440    from Numeric import Float, array
2441
2442    #root, ext = os.path.splitext(basename_in)
2443    root = basename_in
2444
2445    ###########################################
2446    # Read Meta data
2447    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2448    metadatafile = open(root + '.prj')
2449    metalines = metadatafile.readlines()
2450    metadatafile.close()
2451
2452    L = metalines[0].strip().split()
2453    assert L[0].strip().lower() == 'projection'
2454    projection = L[1].strip()                   #TEXT
2455
2456    L = metalines[1].strip().split()
2457    assert L[0].strip().lower() == 'zone'
2458    zone = int(L[1].strip())
2459
2460    L = metalines[2].strip().split()
2461    assert L[0].strip().lower() == 'datum'
2462    datum = L[1].strip()                        #TEXT
2463
2464    L = metalines[3].strip().split()
2465    assert L[0].strip().lower() == 'zunits'     #IGNORE
2466    zunits = L[1].strip()                       #TEXT
2467
2468    L = metalines[4].strip().split()
2469    assert L[0].strip().lower() == 'units'
2470    units = L[1].strip()                        #TEXT
2471
2472    L = metalines[5].strip().split()
2473    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2474    spheroid = L[1].strip()                     #TEXT
2475
2476    L = metalines[6].strip().split()
2477    assert L[0].strip().lower() == 'xshift'
2478    false_easting = float(L[1].strip())
2479
2480    L = metalines[7].strip().split()
2481    assert L[0].strip().lower() == 'yshift'
2482    false_northing = float(L[1].strip())
2483
2484    #print false_easting, false_northing, zone, datum
2485
2486
2487    ###########################################
2488    #Read DEM data
2489
2490    datafile = open(basename_in + '.asc')
2491
2492    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2493    lines = datafile.readlines()
2494    datafile.close()
2495
2496    if verbose: print 'Got', len(lines), ' lines'
2497
2498    ncols = int(lines[0].split()[1].strip())
2499    nrows = int(lines[1].split()[1].strip())
2500    xllcorner = float(lines[2].split()[1].strip())
2501    yllcorner = float(lines[3].split()[1].strip())
2502    cellsize = float(lines[4].split()[1].strip())
2503    NODATA_value = int(lines[5].split()[1].strip())
2504
2505    assert len(lines) == nrows + 6
2506
2507
2508    ##########################################
2509
2510
2511    if basename_out == None:
2512        netcdfname = root + '.dem'
2513    else:
2514        netcdfname = basename_out + '.dem'
2515
2516    if verbose: print 'Store to NetCDF file %s' %netcdfname
2517    # NetCDF file definition
2518    fid = NetCDFFile(netcdfname, 'w')
2519
2520    #Create new file
2521    fid.institution = 'Geoscience Australia'
2522    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2523                      'of spatial point data'
2524
2525    fid.ncols = ncols
2526    fid.nrows = nrows
2527    fid.xllcorner = xllcorner
2528    fid.yllcorner = yllcorner
2529    fid.cellsize = cellsize
2530    fid.NODATA_value = NODATA_value
2531
2532    fid.zone = zone
2533    fid.false_easting = false_easting
2534    fid.false_northing = false_northing
2535    fid.projection = projection
2536    fid.datum = datum
2537    fid.units = units
2538
2539
2540    # dimension definitions
2541    fid.createDimension('number_of_rows', nrows)
2542    fid.createDimension('number_of_columns', ncols)
2543
2544    # variable definitions
2545    fid.createVariable('elevation', Float, ('number_of_rows',
2546                                            'number_of_columns'))
2547
2548    # Get handles to the variables
2549    elevation = fid.variables['elevation']
2550
2551    #Store data
2552    n = len(lines[6:])
2553    for i, line in enumerate(lines[6:]):
2554        fields = line.split()
2555        if verbose and i%((n+10)/10)==0:
2556            print 'Processing row %d of %d' %(i, nrows)
2557
2558        elevation[i, :] = array([float(x) for x in fields])
2559
2560    fid.close()
2561
2562
2563
2564
2565
2566def ferret2sww(basename_in, basename_out = None,
2567               verbose = False,
2568               minlat = None, maxlat = None,
2569               minlon = None, maxlon = None,
2570               mint = None, maxt = None, mean_stage = 0,
2571               origin = None, zscale = 1,
2572               fail_on_NaN = True,
2573               NaN_filler = 0,
2574               elevation = None,
2575               inverted_bathymetry = True
2576               ): #FIXME: Bathymetry should be obtained
2577                                  #from MOST somehow.
2578                                  #Alternatively from elsewhere
2579                                  #or, as a last resort,
2580                                  #specified here.
2581                                  #The value of -100 will work
2582                                  #for the Wollongong tsunami
2583                                  #scenario but is very hacky
2584    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2585    sww format native to abstract_2d_finite_volumes.
2586
2587    Specify only basename_in and read files of the form
2588    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2589    relative height, x-velocity and y-velocity, respectively.
2590
2591    Also convert latitude and longitude to UTM. All coordinates are
2592    assumed to be given in the GDA94 datum.
2593
2594    min's and max's: If omitted - full extend is used.
2595    To include a value min may equal it, while max must exceed it.
2596    Lat and lon are assuemd to be in decimal degrees
2597
2598    origin is a 3-tuple with geo referenced
2599    UTM coordinates (zone, easting, northing)
2600
2601    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2602    which means that longitude is the fastest
2603    varying dimension (row major order, so to speak)
2604
2605    ferret2sww uses grid points as vertices in a triangular grid
2606    counting vertices from lower left corner upwards, then right
2607    """
2608
2609    import os
2610    from Scientific.IO.NetCDF import NetCDFFile
2611    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2612    from Numeric import allclose, around
2613
2614    precision = Float
2615
2616    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2617
2618    if minlat != None:
2619        assert -90 < minlat < 90 , msg
2620    if maxlat != None:
2621        assert -90 < maxlat < 90 , msg
2622        if minlat != None:
2623            assert maxlat > minlat
2624    if minlon != None:
2625        assert -180 < minlon < 180 , msg
2626    if maxlon != None:
2627        assert -180 < maxlon < 180 , msg
2628        if minlon != None:
2629            assert maxlon > minlon
2630       
2631
2632
2633    #Get NetCDF data
2634    if verbose: print 'Reading files %s_*.nc' %basename_in
2635    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
2636    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2637    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2638    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2639    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2640
2641    if basename_out is None:
2642        swwname = basename_in + '.sww'
2643    else:
2644        swwname = basename_out + '.sww'
2645
2646#    Get dimensions of file_h
2647    for dimension in file_h.dimensions.keys():
2648        if dimension[:3] == 'LON':
2649            dim_h_longitude = dimension
2650        if dimension[:3] == 'LAT':
2651            dim_h_latitude = dimension
2652        if dimension[:4] == 'TIME':
2653            dim_h_time = dimension
2654
2655#    print 'long:', dim_h_longitude
2656#    print 'lats:', dim_h_latitude
2657#    print 'times:', dim_h_time
2658
2659    times = file_h.variables[dim_h_time]
2660    latitudes = file_h.variables[dim_h_latitude]
2661    longitudes = file_h.variables[dim_h_longitude]
2662   
2663# get dimensions for file_e
2664    for dimension in file_e.dimensions.keys():
2665        if dimension[:3] == 'LON':
2666            dim_e_longitude = dimension
2667        if dimension[:3] == 'LAT':
2668            dim_e_latitude = dimension
2669
2670# get dimensions for file_u
2671    for dimension in file_u.dimensions.keys():
2672        if dimension[:3] == 'LON':
2673            dim_u_longitude = dimension
2674        if dimension[:3] == 'LAT':
2675            dim_u_latitude = dimension
2676        if dimension[:4] == 'TIME':
2677            dim_u_time = dimension
2678           
2679# get dimensions for file_v
2680    for dimension in file_v.dimensions.keys():
2681        if dimension[:3] == 'LON':
2682            dim_v_longitude = dimension
2683        if dimension[:3] == 'LAT':
2684            dim_v_latitude = dimension
2685        if dimension[:4] == 'TIME':
2686            dim_v_time = dimension
2687
2688
2689    #Precision used by most for lat/lon is 4 or 5 decimals
2690    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2691    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2692
2693    #Check that files are compatible
2694    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2695    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2696    assert allclose(latitudes, e_lat)
2697
2698    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2699    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2700    assert allclose(longitudes, e_lon)
2701
2702    if mint == None:
2703        jmin = 0
2704    else:
2705        jmin = searchsorted(times, mint)
2706       
2707    if maxt == None:
2708        jmax=len(times)
2709    else:
2710        jmax = searchsorted(times, maxt)
2711
2712    #print "latitudes[:]",latitudes[:]
2713    #print "longitudes[:]",longitudes [:]
2714    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2715                                                  longitudes[:],
2716                                                 minlat, maxlat,
2717                                                 minlon, maxlon)
2718
2719
2720#    print' j', jmin, jmax
2721    times = times[jmin:jmax]
2722    latitudes = latitudes[kmin:kmax]
2723    longitudes = longitudes[lmin:lmax]
2724
2725    #print "latitudes[:]",latitudes[:]
2726    #print "longitudes[:]",longitudes [:]
2727
2728    if verbose: print 'cropping'
2729    zname = 'ELEVATION'
2730
2731    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2732    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2733    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2734    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2735
2736    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2737    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2738    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2739    #        from Numeric import asarray
2740    #        elevations=elevations.tolist()
2741    #        elevations.reverse()
2742    #        elevations=asarray(elevations)
2743    #    else:
2744    #        from Numeric import asarray
2745    #        elevations=elevations.tolist()
2746    #        elevations.reverse()
2747    #        elevations=asarray(elevations)
2748    #        'print hmmm'
2749
2750
2751
2752    #Get missing values
2753    nan_ha = file_h.variables['HA'].missing_value[0]
2754    nan_ua = file_u.variables['UA'].missing_value[0]
2755    nan_va = file_v.variables['VA'].missing_value[0]
2756    if hasattr(file_e.variables[zname],'missing_value'):
2757        nan_e  = file_e.variables[zname].missing_value[0]
2758    else:
2759        nan_e = None
2760
2761    #Cleanup
2762    from Numeric import sometrue
2763
2764    missing = (amplitudes == nan_ha)
2765    if sometrue (missing):
2766        if fail_on_NaN:
2767            msg = 'NetCDFFile %s contains missing values'\
2768                  %(basename_in+'_ha.nc')
2769            raise DataMissingValuesError, msg
2770        else:
2771            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2772
2773    missing = (uspeed == nan_ua)
2774    if sometrue (missing):
2775        if fail_on_NaN:
2776            msg = 'NetCDFFile %s contains missing values'\
2777                  %(basename_in+'_ua.nc')
2778            raise DataMissingValuesError, msg
2779        else:
2780            uspeed = uspeed*(missing==0) + missing*NaN_filler
2781
2782    missing = (vspeed == nan_va)
2783    if sometrue (missing):
2784        if fail_on_NaN:
2785            msg = 'NetCDFFile %s contains missing values'\
2786                  %(basename_in+'_va.nc')
2787            raise DataMissingValuesError, msg
2788        else:
2789            vspeed = vspeed*(missing==0) + missing*NaN_filler
2790
2791
2792    missing = (elevations == nan_e)
2793    if sometrue (missing):
2794        if fail_on_NaN:
2795            msg = 'NetCDFFile %s contains missing values'\
2796                  %(basename_in+'_e.nc')
2797            raise DataMissingValuesError, msg
2798        else:
2799            elevations = elevations*(missing==0) + missing*NaN_filler
2800
2801    #######
2802
2803
2804
2805    number_of_times = times.shape[0]
2806    number_of_latitudes = latitudes.shape[0]
2807    number_of_longitudes = longitudes.shape[0]
2808
2809    assert amplitudes.shape[0] == number_of_times
2810    assert amplitudes.shape[1] == number_of_latitudes
2811    assert amplitudes.shape[2] == number_of_longitudes
2812
2813    if verbose:
2814        print '------------------------------------------------'
2815        print 'Statistics:'
2816        print '  Extent (lat/lon):'
2817        print '    lat in [%f, %f], len(lat) == %d'\
2818              %(min(latitudes.flat), max(latitudes.flat),
2819                len(latitudes.flat))
2820        print '    lon in [%f, %f], len(lon) == %d'\
2821              %(min(longitudes.flat), max(longitudes.flat),
2822                len(longitudes.flat))
2823        print '    t in [%f, %f], len(t) == %d'\
2824              %(min(times.flat), max(times.flat), len(times.flat))
2825
2826        q = amplitudes.flat
2827        name = 'Amplitudes (ha) [cm]'
2828        print %s in [%f, %f]' %(name, min(q), max(q))
2829
2830        q = uspeed.flat
2831        name = 'Speeds (ua) [cm/s]'
2832        print %s in [%f, %f]' %(name, min(q), max(q))
2833
2834        q = vspeed.flat
2835        name = 'Speeds (va) [cm/s]'
2836        print %s in [%f, %f]' %(name, min(q), max(q))
2837
2838        q = elevations.flat
2839        name = 'Elevations (e) [m]'
2840        print %s in [%f, %f]' %(name, min(q), max(q))
2841
2842
2843    #print number_of_latitudes, number_of_longitudes
2844    number_of_points = number_of_latitudes*number_of_longitudes
2845    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2846
2847
2848    file_h.close()
2849    file_u.close()
2850    file_v.close()
2851    file_e.close()
2852
2853
2854    # NetCDF file definition
2855    outfile = NetCDFFile(swwname, 'w')
2856
2857    #Create new file
2858    outfile.institution = 'Geoscience Australia'
2859    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2860                          %(basename_in + '_ha.nc',
2861                            basename_in + '_ua.nc',
2862                            basename_in + '_va.nc',
2863                            basename_in + '_e.nc')
2864
2865
2866    #For sww compatibility
2867    outfile.smoothing = 'Yes'
2868    outfile.order = 1
2869
2870    #Start time in seconds since the epoch (midnight 1/1/1970)
2871    outfile.starttime = starttime = times[0]
2872    times = times - starttime  #Store relative times
2873
2874    # dimension definitions
2875    outfile.createDimension('number_of_volumes', number_of_volumes)
2876
2877    outfile.createDimension('number_of_vertices', 3)
2878    outfile.createDimension('number_of_points', number_of_points)
2879
2880
2881    #outfile.createDimension('number_of_timesteps', len(times))
2882    outfile.createDimension('number_of_timesteps', len(times))
2883
2884    # variable definitions
2885    outfile.createVariable('x', precision, ('number_of_points',))
2886    outfile.createVariable('y', precision, ('number_of_points',))
2887    outfile.createVariable('elevation', precision, ('number_of_points',))
2888
2889    #FIXME: Backwards compatibility
2890    outfile.createVariable('z', precision, ('number_of_points',))
2891    #################################
2892
2893    outfile.createVariable('volumes', Int, ('number_of_volumes',
2894                                            'number_of_vertices'))
2895
2896    outfile.createVariable('time', precision,
2897                           ('number_of_timesteps',))
2898
2899    outfile.createVariable('stage', precision,
2900                           ('number_of_timesteps',
2901                            'number_of_points'))
2902
2903    outfile.createVariable('xmomentum', precision,
2904                           ('number_of_timesteps',
2905                            'number_of_points'))
2906
2907    outfile.createVariable('ymomentum', precision,
2908                           ('number_of_timesteps',
2909                            'number_of_points'))
2910
2911
2912    #Store
2913    from anuga.coordinate_transforms.redfearn import redfearn
2914    x = zeros(number_of_points, Float)  #Easting
2915    y = zeros(number_of_points, Float)  #Northing
2916
2917
2918    if verbose: print 'Making triangular grid'
2919    #Check zone boundaries
2920    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2921
2922    vertices = {}
2923    i = 0
2924    for k, lat in enumerate(latitudes):       #Y direction
2925        for l, lon in enumerate(longitudes):  #X direction
2926
2927            vertices[l,k] = i
2928
2929            zone, easting, northing = redfearn(lat,lon)
2930
2931            msg = 'Zone boundary crossed at longitude =', lon
2932            #assert zone == refzone, msg
2933            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2934            x[i] = easting
2935            y[i] = northing
2936            i += 1
2937
2938
2939    #Construct 2 triangles per 'rectangular' element
2940    volumes = []
2941    for l in range(number_of_longitudes-1):    #X direction
2942        for k in range(number_of_latitudes-1): #Y direction
2943            v1 = vertices[l,k+1]
2944            v2 = vertices[l,k]
2945            v3 = vertices[l+1,k+1]
2946            v4 = vertices[l+1,k]
2947
2948            volumes.append([v1,v2,v3]) #Upper element
2949            volumes.append([v4,v3,v2]) #Lower element
2950
2951    volumes = array(volumes)
2952
2953    if origin == None:
2954        zone = refzone
2955        xllcorner = min(x)
2956        yllcorner = min(y)
2957    else:
2958        zone = origin[0]
2959        xllcorner = origin[1]
2960        yllcorner = origin[2]
2961
2962
2963    outfile.xllcorner = xllcorner
2964    outfile.yllcorner = yllcorner
2965    outfile.zone = zone
2966
2967
2968    if elevation is not None:
2969        z = elevation
2970    else:
2971        if inverted_bathymetry:
2972            z = -1*elevations
2973        else:
2974            z = elevations
2975    #FIXME: z should be obtained from MOST and passed in here
2976
2977    from Numeric import resize
2978    z = resize(z,outfile.variables['z'][:].shape)
2979    outfile.variables['x'][:] = x - xllcorner
2980    outfile.variables['y'][:] = y - yllcorner
2981    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
2982    outfile.variables['elevation'][:] = z
2983    outfile.variables['time'][:] = times   #Store time relative
2984    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
2985
2986
2987
2988    #Time stepping
2989    stage = outfile.variables['stage']
2990    xmomentum = outfile.variables['xmomentum']
2991    ymomentum = outfile.variables['ymomentum']
2992
2993    if verbose: print 'Converting quantities'
2994    n = len(times)
2995    for j in range(n):
2996        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2997        i = 0
2998        for k in range(number_of_latitudes):      #Y direction
2999            for l in range(number_of_longitudes): #X direction
3000                w = zscale*amplitudes[j,k,l]/100 + mean_stage
3001                stage[j,i] = w
3002                h = w - z[i]
3003                xmomentum[j,i] = uspeed[j,k,l]/100*h
3004                ymomentum[j,i] = vspeed[j,k,l]/100*h
3005                i += 1
3006
3007    #outfile.close()
3008
3009    #FIXME: Refactor using code from file_function.statistics
3010    #Something like print swwstats(swwname)
3011    if verbose:
3012        x = outfile.variables['x'][:]
3013        y = outfile.variables['y'][:]
3014        times = outfile.variables['time'][:]
3015        print '------------------------------------------------'
3016        print 'Statistics of output file:'
3017        print '  Name: %s' %swwname
3018        print '  Reference:'
3019        print '    Lower left corner: [%f, %f]'\
3020              %(xllcorner, yllcorner)
3021        print '    Start time: %f' %starttime
3022        print '  Extent:'
3023        print '    x [m] in [%f, %f], len(x) == %d'\
3024              %(min(x.flat), max(x.flat), len(x.flat))
3025        print '    y [m] in [%f, %f], len(y) == %d'\
3026              %(min(y.flat), max(y.flat), len(y.flat))
3027        print '    t [s] in [%f, %f], len(t) == %d'\
3028              %(min(times), max(times), len(times))
3029        print '  Quantities [SI units]:'
3030        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3031            q = outfile.variables[name][:].flat
3032            print '    %s in [%f, %f]' %(name, min(q), max(q))
3033
3034
3035
3036    outfile.close()
3037
3038
3039
3040
3041
3042def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False):
3043    """Template for converting typical text files with time series to
3044    NetCDF tms file.
3045
3046
3047    The file format is assumed to be either two fields separated by a comma:
3048
3049        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3050
3051    E.g
3052
3053      31/08/04 00:00:00, 1.328223 0 0
3054      31/08/04 00:15:00, 1.292912 0 0
3055
3056    or time (seconds), value0 value1 value2 ...
3057   
3058      0.0, 1.328223 0 0
3059      0.1, 1.292912 0 0
3060
3061    will provide a time dependent function f(t) with three attributes
3062
3063    filename is assumed to be the rootname with extenisons .txt and .sww
3064    """
3065
3066    import time, calendar
3067    from Numeric import array
3068    from anuga.config import time_format
3069    from anuga.utilities.numerical_tools import ensure_numeric
3070
3071
3072
3073    fid = open(filename + '.txt')
3074    line = fid.readline()
3075    fid.close()
3076
3077    fields = line.split(',')
3078    msg = 'File %s must have the format date, value0 value1 value2 ...'
3079    assert len(fields) == 2, msg
3080
3081    if not time_as_seconds:
3082        try:
3083            starttime = calendar.timegm(time.strptime(fields[0], time_format))
3084        except ValueError:
3085            msg = 'First field in file %s must be' %filename
3086            msg += ' date-time with format %s.\n' %time_format
3087            msg += 'I got %s instead.' %fields[0]
3088            raise DataTimeError, msg
3089    else:
3090        try:
3091            starttime = float(fields[0])
3092        except Error:
3093            msg = "Bad time format"
3094            raise DataTimeError, msg
3095
3096
3097    #Split values
3098    values = []
3099    for value in fields[1].split():
3100        values.append(float(value))
3101
3102    q = ensure_numeric(values)
3103
3104    msg = 'ERROR: File must contain at least one independent value'
3105    assert len(q.shape) == 1, msg
3106
3107
3108
3109    #Read times proper
3110    from Numeric import zeros, Float, alltrue
3111    from anuga.config import time_format
3112    import time, calendar
3113
3114    fid = open(filename + '.txt')
3115    lines = fid.readlines()
3116    fid.close()
3117
3118    N = len(lines)
3119    d = len(q)
3120
3121    T = zeros(N, Float)       #Time
3122    Q = zeros((N, d), Float)  #Values
3123
3124    for i, line in enumerate(lines):
3125        fields = line.split(',')
3126        if not time_as_seconds:
3127            realtime = calendar.timegm(time.strptime(fields[0], time_format))
3128        else:
3129             realtime = float(fields[0])
3130        T[i] = realtime - starttime
3131
3132        for j, value in enumerate(fields[1].split()):
3133            Q[i, j] = float(value)
3134
3135    msg = 'File %s must list time as a monotonuosly ' %filename
3136    msg += 'increasing sequence'
3137    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3138
3139    #Create NetCDF file
3140    from Scientific.IO.NetCDF import NetCDFFile
3141
3142    fid = NetCDFFile(filename + '.tms', 'w')
3143
3144
3145    fid.institution = 'Geoscience Australia'
3146    fid.description = 'Time series'
3147
3148
3149    #Reference point
3150    #Start time in seconds since the epoch (midnight 1/1/1970)
3151    #FIXME: Use Georef
3152    fid.starttime = starttime
3153
3154    # dimension definitions
3155    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3156    #fid.createDimension('number_of_vertices', 3)
3157
3158
3159    fid.createDimension('number_of_timesteps', len(T))
3160
3161    fid.createVariable('time', Float, ('number_of_timesteps',))
3162
3163    fid.variables['time'][:] = T
3164
3165    for i in range(Q.shape[1]):
3166        try:
3167            name = quantity_names[i]
3168        except:
3169            name = 'Attribute%d'%i
3170
3171        fid.createVariable(name, Float, ('number_of_timesteps',))
3172        fid.variables[name][:] = Q[:,i]
3173
3174    fid.close()
3175
3176
3177def extent_sww(file_name):
3178    """
3179    Read in an sww file.
3180
3181    Input;
3182    file_name - the sww file
3183
3184    Output;
3185    z - Vector of bed elevation
3186    volumes - Array.  Each row has 3 values, representing
3187    the vertices that define the volume
3188    time - Vector of the times where there is stage information
3189    stage - array with respect to time and vertices (x,y)
3190    """
3191
3192
3193    from Scientific.IO.NetCDF import NetCDFFile
3194
3195    #Check contents
3196    #Get NetCDF
3197    fid = NetCDFFile(file_name, 'r')
3198
3199    # Get the variables
3200    x = fid.variables['x'][:]
3201    y = fid.variables['y'][:]
3202    stage = fid.variables['stage'][:]
3203    #print "stage",stage
3204    #print "stage.shap",stage.shape
3205    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3206    #print "min(stage)",min(stage)
3207
3208    fid.close()
3209
3210    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3211
3212
3213def sww2domain(filename,boundary=None,t=None,\
3214               fail_if_NaN=True,NaN_filler=0\
3215               ,verbose = False,very_verbose = False):
3216    """
3217    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3218
3219    Boundary is not recommended if domain.smooth is not selected, as it
3220    uses unique coordinates, but not unique boundaries. This means that
3221    the boundary file will not be compatable with the coordinates, and will
3222    give a different final boundary, or crash.
3223    """
3224    NaN=9.969209968386869e+036
3225    #initialise NaN.
3226
3227    from Scientific.IO.NetCDF import NetCDFFile
3228    from shallow_water import Domain
3229    from Numeric import asarray, transpose, resize
3230
3231    if verbose: print 'Reading from ', filename
3232    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3233    time = fid.variables['time']       #Timesteps
3234    if t is None:
3235        t = time[-1]
3236    time_interp = get_time_interp(time,t)
3237
3238    # Get the variables as Numeric arrays
3239    x = fid.variables['x'][:]             #x-coordinates of vertices
3240    y = fid.variables['y'][:]             #y-coordinates of vertices
3241    elevation = fid.variables['elevation']     #Elevation
3242    stage = fid.variables['stage']     #Water level
3243    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3244    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3245
3246    starttime = fid.starttime[0]
3247    volumes = fid.variables['volumes'][:] #Connectivity
3248    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3249
3250    conserved_quantities = []
3251    interpolated_quantities = {}
3252    other_quantities = []
3253
3254    # get geo_reference
3255    #sww files don't have to have a geo_ref
3256    try:
3257        geo_reference = Geo_reference(NetCDFObject=fid)
3258    except: #AttributeError, e:
3259        geo_reference = None
3260
3261    if verbose: print '    getting quantities'
3262    for quantity in fid.variables.keys():
3263        dimensions = fid.variables[quantity].dimensions
3264        if 'number_of_timesteps' in dimensions:
3265            conserved_quantities.append(quantity)
3266            interpolated_quantities[quantity]=\
3267                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3268        else: other_quantities.append(quantity)
3269
3270    other_quantities.remove('x')
3271    other_quantities.remove('y')
3272    other_quantities.remove('z')
3273    other_quantities.remove('volumes')
3274
3275    conserved_quantities.remove('time')
3276
3277    if verbose: print '    building domain'
3278    #    From domain.Domain:
3279    #    domain = Domain(coordinates, volumes,\
3280    #                    conserved_quantities = conserved_quantities,\
3281    #                    other_quantities = other_quantities,zone=zone,\
3282    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3283
3284    #   From shallow_water.Domain:
3285    coordinates=coordinates.tolist()
3286    volumes=volumes.tolist()
3287    #FIXME:should this be in mesh?(peter row)
3288    if fid.smoothing == 'Yes': unique = False
3289    else: unique = True
3290    if unique:
3291        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3292
3293
3294    try:
3295        domain = Domain(coordinates, volumes, boundary)
3296    except AssertionError, e:
3297        fid.close()
3298        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3299        raise DataDomainError, msg
3300
3301    if not boundary is None:
3302        domain.boundary = boundary
3303
3304    domain.geo_reference = geo_reference
3305
3306    domain.starttime=float(starttime)+float(t)
3307    domain.time=0.0
3308
3309    for quantity in other_quantities:
3310        try:
3311            NaN = fid.variables[quantity].missing_value
3312        except:
3313            pass #quantity has no missing_value number
3314        X = fid.variables[quantity][:]
3315        if very_verbose:
3316            print '       ',quantity
3317            print '        NaN =',NaN
3318            print '        max(X)'
3319            print '       ',max(X)
3320            print '        max(X)==NaN'
3321            print '       ',max(X)==NaN
3322            print ''
3323        if (max(X)==NaN) or (min(X)==NaN):
3324            if fail_if_NaN:
3325                msg = 'quantity "%s" contains no_data entry'%quantity
3326                raise DataMissingValuesError, msg
3327            else:
3328                data = (X<>NaN)
3329                X = (X*data)+(data==0)*NaN_filler
3330        if unique:
3331            X = resize(X,(len(X)/3,3))
3332        domain.set_quantity(quantity,X)
3333    #
3334    for quantity in conserved_quantities:
3335        try:
3336            NaN = fid.variables[quantity].missing_value
3337        except:
3338            pass #quantity has no missing_value number
3339        X = interpolated_quantities[quantity]
3340        if very_verbose:
3341            print '       ',quantity
3342            print '        NaN =',NaN
3343            print '        max(X)'
3344            print '       ',max(X)
3345            print '        max(X)==NaN'
3346            print '       ',max(X)==NaN
3347            print ''
3348        if (max(X)==NaN) or (min(X)==NaN):
3349            if fail_if_NaN:
3350                msg = 'quantity "%s" contains no_data entry'%quantity
3351                raise DataMissingValuesError, msg
3352            else:
3353                data = (X<>NaN)
3354                X = (X*data)+(data==0)*NaN_filler
3355        if unique:
3356            X = resize(X,(X.shape[0]/3,3))
3357        domain.set_quantity(quantity,X)
3358
3359    fid.close()
3360    return domain
3361
3362def interpolated_quantity(saved_quantity,time_interp):
3363
3364    #given an index and ratio, interpolate quantity with respect to time.
3365    index,ratio = time_interp
3366    Q = saved_quantity
3367    if ratio > 0:
3368        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3369    else:
3370        q = Q[index]
3371    #Return vector of interpolated values
3372    return q
3373
3374def get_time_interp(time,t=None):
3375    #Finds the ratio and index for time interpolation.
3376    #It is borrowed from previous abstract_2d_finite_volumes code.
3377    if t is None:
3378        t=time[-1]
3379        index = -1
3380        ratio = 0.
3381    else:
3382        T = time
3383        tau = t
3384        index=0
3385        msg = 'Time interval derived from file %s [%s:%s]'\
3386            %('FIXMEfilename', T[0], T[-1])
3387        msg += ' does not match model time: %s' %tau
3388        if tau < time[0]: raise DataTimeError, msg
3389        if tau > time[-1]: raise DataTimeError, msg
3390        while tau > time[index]: index += 1
3391        while tau < time[index]: index -= 1
3392        if tau == time[index]:
3393            #Protect against case where tau == time[-1] (last time)
3394            # - also works in general when tau == time[i]
3395            ratio = 0
3396        else:
3397            #t is now between index and index+1
3398            ratio = (tau - time[index])/(time[index+1] - time[index])
3399    return (index,ratio)
3400
3401
3402def weed(coordinates,volumes,boundary = None):
3403    if type(coordinates)=='array':
3404        coordinates = coordinates.tolist()
3405    if type(volumes)=='array':
3406        volumes = volumes.tolist()
3407
3408    unique = False
3409    point_dict = {}
3410    same_point = {}
3411    for i in range(len(coordinates)):
3412        point = tuple(coordinates[i])
3413        if point_dict.has_key(point):
3414            unique = True
3415            same_point[i]=point
3416            #to change all point i references to point j
3417        else:
3418            point_dict[point]=i
3419            same_point[i]=point
3420
3421    coordinates = []
3422    i = 0
3423    for point in point_dict.keys():
3424        point = tuple(point)
3425        coordinates.append(list(point))
3426        point_dict[point]=i
3427        i+=1
3428
3429
3430    for volume in volumes:
3431        for i in range(len(volume)):
3432            index = volume[i]
3433            if index>-1:
3434                volume[i]=point_dict[same_point[index]]
3435
3436    new_boundary = {}
3437    if not boundary is None:
3438        for segment in boundary.keys():
3439            point0 = point_dict[same_point[segment[0]]]
3440            point1 = point_dict[same_point[segment[1]]]
3441            label = boundary[segment]
3442            #FIXME should the bounday attributes be concaterated
3443            #('exterior, pond') or replaced ('pond')(peter row)
3444
3445            if new_boundary.has_key((point0,point1)):
3446                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3447                                              #+','+label
3448
3449            elif new_boundary.has_key((point1,point0)):
3450                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3451                                              #+','+label
3452            else: new_boundary[(point0,point1)]=label
3453
3454        boundary = new_boundary
3455
3456    return coordinates,volumes,boundary
3457
3458
3459def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3460                 verbose=False):
3461    """Read Digitial Elevation model from the following NetCDF format (.dem)
3462
3463    Example:
3464
3465    ncols         3121
3466    nrows         1800
3467    xllcorner     722000
3468    yllcorner     5893000
3469    cellsize      25
3470    NODATA_value  -9999
3471    138.3698 137.4194 136.5062 135.5558 ..........
3472
3473    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3474    """
3475
3476    import os
3477    from Scientific.IO.NetCDF import NetCDFFile
3478    from Numeric import Float, zeros, sum, reshape, equal
3479
3480    root = basename_in
3481    inname = root + '.dem'
3482
3483    #Open existing netcdf file to read
3484    infile = NetCDFFile(inname, 'r')
3485    if verbose: print 'Reading DEM from %s' %inname
3486
3487    #Read metadata
3488    ncols = infile.ncols[0]
3489    nrows = infile.nrows[0]
3490    xllcorner = infile.xllcorner[0]
3491    yllcorner = infile.yllcorner[0]
3492    cellsize = infile.cellsize[0]
3493    NODATA_value = infile.NODATA_value[0]
3494    zone = infile.zone[0]
3495    false_easting = infile.false_easting[0]
3496    false_northing = infile.false_northing[0]
3497    projection = infile.projection
3498    datum = infile.datum
3499    units = infile.units
3500
3501    dem_elevation = infile.variables['elevation']
3502
3503    #Get output file name
3504    if basename_out == None:
3505        outname = root + '_' + repr(cellsize_new) + '.dem'
3506    else:
3507        outname = basename_out + '.dem'
3508
3509    if verbose: print 'Write decimated NetCDF file to %s' %outname
3510
3511    #Determine some dimensions for decimated grid
3512    (nrows_stencil, ncols_stencil) = stencil.shape
3513    x_offset = ncols_stencil / 2
3514    y_offset = nrows_stencil / 2
3515    cellsize_ratio = int(cellsize_new / cellsize)
3516    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3517    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3518
3519    #Open netcdf file for output
3520    outfile = NetCDFFile(outname, 'w')
3521
3522    #Create new file
3523    outfile.institution = 'Geoscience Australia'
3524    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3525                           'of spatial point data'
3526    #Georeferencing
3527    outfile.zone = zone
3528    outfile.projection = projection
3529    outfile.datum = datum
3530    outfile.units = units
3531
3532    outfile.cellsize = cellsize_new
3533    outfile.NODATA_value = NODATA_value
3534    outfile.false_easting = false_easting
3535    outfile.false_northing = false_northing
3536
3537    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3538    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3539    outfile.ncols = ncols_new
3540    outfile.nrows = nrows_new
3541
3542    # dimension definition
3543    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3544
3545    # variable definition
3546    outfile.createVariable('elevation', Float, ('number_of_points',))
3547
3548    # Get handle to the variable
3549    elevation = outfile.variables['elevation']
3550
3551    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3552
3553    #Store data
3554    global_index = 0
3555    for i in range(nrows_new):
3556        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3557        lower_index = global_index
3558        telev =  zeros(ncols_new, Float)
3559        local_index = 0
3560        trow = i * cellsize_ratio
3561
3562        for j in range(ncols_new):
3563            tcol = j * cellsize_ratio
3564            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3565
3566            #if dem contains 1 or more NODATA_values set value in
3567            #decimated dem to NODATA_value, else compute decimated
3568            #value using stencil
3569            if sum(sum(equal(tmp, NODATA_value))) > 0:
3570                telev[local_index] = NODATA_value
3571            else:
3572                telev[local_index] = sum(sum(tmp * stencil))
3573
3574            global_index += 1
3575            local_index += 1
3576
3577        upper_index = global_index
3578
3579        elevation[lower_index:upper_index] = telev
3580
3581    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3582
3583    infile.close()
3584    outfile.close()
3585
3586
3587
3588
3589def tsh2sww(filename, verbose=False): #test_tsh2sww
3590    """
3591    to check if a tsh/msh file 'looks' good.
3592    """
3593
3594
3595    if verbose == True:print 'Creating domain from', filename
3596    domain = pmesh_to_domain_instance(filename, Domain)
3597    if verbose == True:print "Number of triangles = ", len(domain)
3598
3599    domain.smooth = True
3600    domain.format = 'sww'   #Native netcdf visualisation format
3601    file_path, filename = path.split(filename)
3602    filename, ext = path.splitext(filename)
3603    domain.set_name(filename)   
3604    domain.reduction = mean
3605    if verbose == True:print "file_path",file_path
3606    if file_path == "":file_path = "."
3607    domain.set_datadir(file_path)
3608
3609    if verbose == True:
3610        print "Output written to " + domain.get_datadir() + sep + \
3611              domain.get_name() + "." + domain.format
3612    sww = get_dataobject(domain)
3613    sww.store_connectivity()
3614    sww.store_timestep('stage')
3615
3616
3617def asc_csiro2sww(bath_dir,
3618                  elevation_dir,
3619                  ucur_dir,
3620                  vcur_dir,
3621                  sww_file,
3622                  minlat = None, maxlat = None,
3623                  minlon = None, maxlon = None,
3624                  zscale=1,
3625                  mean_stage = 0,
3626                  fail_on_NaN = True,
3627                  elevation_NaN_filler = 0,
3628                  bath_prefix='ba',
3629                  elevation_prefix='el',
3630                  verbose=False):
3631    """
3632    Produce an sww boundary file, from esri ascii data from CSIRO.
3633
3634    Also convert latitude and longitude to UTM. All coordinates are
3635    assumed to be given in the GDA94 datum.
3636
3637    assume:
3638    All files are in esri ascii format
3639
3640    4 types of information
3641    bathymetry
3642    elevation
3643    u velocity
3644    v velocity
3645
3646    Assumptions
3647    The metadata of all the files is the same
3648    Each type is in a seperate directory
3649    One bath file with extention .000
3650    The time period is less than 24hrs and uniform.
3651    """
3652    from Scientific.IO.NetCDF import NetCDFFile
3653
3654    from anuga.coordinate_transforms.redfearn import redfearn
3655
3656    precision = Float # So if we want to change the precision its done here
3657
3658    # go in to the bath dir and load the only file,
3659    bath_files = os.listdir(bath_dir)
3660
3661    bath_file = bath_files[0]
3662    bath_dir_file =  bath_dir + os.sep + bath_file
3663    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3664
3665    #Use the date.time of the bath file as a basis for
3666    #the start time for other files
3667    base_start = bath_file[-12:]
3668
3669    #go into the elevation dir and load the 000 file
3670    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3671                         + base_start
3672
3673    elevation_files = os.listdir(elevation_dir)
3674    ucur_files = os.listdir(ucur_dir)
3675    vcur_files = os.listdir(vcur_dir)
3676    elevation_files.sort()
3677    # the first elevation file should be the
3678    # file with the same base name as the bath data
3679    assert elevation_files[0] == 'el' + base_start
3680
3681    number_of_latitudes = bath_grid.shape[0]
3682    number_of_longitudes = bath_grid.shape[1]
3683    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3684
3685    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3686                  for x in range(number_of_longitudes)]
3687    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3688                 for y in range(number_of_latitudes)]
3689
3690     # reverse order of lat, so the fist lat represents the first grid row
3691    latitudes.reverse()
3692
3693    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
3694                                                 minlat=minlat, maxlat=maxlat,
3695                                                 minlon=minlon, maxlon=maxlon)
3696
3697
3698    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3699    latitudes = latitudes[kmin:kmax]
3700    longitudes = longitudes[lmin:lmax]
3701    number_of_latitudes = len(latitudes)
3702    number_of_longitudes = len(longitudes)
3703    number_of_times = len(os.listdir(elevation_dir))
3704    number_of_points = number_of_latitudes*number_of_longitudes
3705    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3706
3707    #Work out the times
3708    if len(elevation_files) > 1:
3709        # Assume: The time period is less than 24hrs.
3710        time_period = (int(elevation_files[1][-3:]) - \
3711                      int(elevation_files[0][-3:]))*60*60
3712        times = [x*time_period for x in range(len(elevation_files))]
3713    else:
3714        times = [0.0]
3715
3716
3717    if verbose:
3718        print '------------------------------------------------'
3719        print 'Statistics:'
3720        print '  Extent (lat/lon):'
3721        print '    lat in [%f, %f], len(lat) == %d'\
3722              %(min(latitudes), max(latitudes),
3723                len(latitudes))
3724        print '    lon in [%f, %f], len(lon) == %d'\
3725              %(min(longitudes), max(longitudes),
3726                len(longitudes))
3727        print '    t in [%f, %f], len(t) == %d'\
3728              %(min(times), max(times), len(times))
3729
3730    ######### WRITE THE SWW FILE #############
3731    # NetCDF file definition
3732    outfile = NetCDFFile(sww_file, 'w')
3733
3734    #Create new file
3735    outfile.institution = 'Geoscience Australia'
3736    outfile.description = 'Converted from XXX'
3737
3738
3739    #For sww compatibility
3740    outfile.smoothing = 'Yes'
3741    outfile.order = 1
3742
3743    #Start time in seconds since the epoch (midnight 1/1/1970)
3744    outfile.starttime = starttime = times[0]
3745
3746
3747    # dimension definitions
3748    outfile.createDimension('number_of_volumes', number_of_volumes)
3749
3750    outfile.createDimension('number_of_vertices', 3)
3751    outfile.createDimension('number_of_points', number_of_points)
3752    outfile.createDimension('number_of_timesteps', number_of_times)
3753
3754    # variable definitions
3755    outfile.createVariable('x', precision, ('number_of_points',))
3756    outfile.createVariable('y', precision, ('number_of_points',))
3757    outfile.createVariable('elevation', precision, ('number_of_points',))
3758
3759    #FIXME: Backwards compatibility
3760    outfile.createVariable('z', precision, ('number_of_points',))
3761    #################################
3762
3763    outfile.createVariable('volumes', Int, ('number_of_volumes',
3764                                            'number_of_vertices'))
3765
3766    outfile.createVariable('time', precision,
3767                           ('number_of_timesteps',))
3768
3769    outfile.createVariable('stage', precision,
3770                           ('number_of_timesteps',
3771                            'number_of_points'))
3772
3773    outfile.createVariable('xmomentum', precision,
3774                           ('number_of_timesteps',
3775                            'number_of_points'))
3776
3777    outfile.createVariable('ymomentum', precision,
3778                           ('number_of_timesteps',
3779                            'number_of_points'))
3780
3781    #Store
3782    from anuga.coordinate_transforms.redfearn import redfearn
3783    x = zeros(number_of_points, Float)  #Easting
3784    y = zeros(number_of_points, Float)  #Northing
3785
3786    if verbose: print 'Making triangular grid'
3787    #Get zone of 1st point.
3788    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3789
3790    vertices = {}
3791    i = 0
3792    for k, lat in enumerate(latitudes):
3793        for l, lon in enumerate(longitudes):
3794
3795            vertices[l,k] = i
3796
3797            zone, easting, northing = redfearn(lat,lon)
3798
3799            msg = 'Zone boundary crossed at longitude =', lon
3800            #assert zone == refzone, msg
3801            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3802            x[i] = easting
3803            y[i] = northing
3804            i += 1
3805
3806
3807    #Construct 2 triangles per 'rectangular' element
3808    volumes = []
3809    for l in range(number_of_longitudes-1):    #X direction
3810        for k in range(number_of_latitudes-1): #Y direction
3811            v1 = vertices[l,k+1]
3812            v2 = vertices[l,k]
3813            v3 = vertices[l+1,k+1]
3814            v4 = vertices[l+1,k]
3815
3816            #Note, this is different to the ferrit2sww code
3817            #since the order of the lats is reversed.
3818            volumes.append([v1,v3,v2]) #Upper element
3819            volumes.append([v4,v2,v3]) #Lower element
3820
3821    volumes = array(volumes)
3822
3823    geo_ref = Geo_reference(refzone,min(x),min(y))
3824    geo_ref.write_NetCDF(outfile)
3825
3826    # This will put the geo ref in the middle
3827    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3828
3829
3830    if verbose:
3831        print '------------------------------------------------'
3832        print 'More Statistics:'
3833        print '  Extent (/lon):'
3834        print '    x in [%f, %f], len(lat) == %d'\
3835              %(min(x), max(x),
3836                len(x))
3837        print '    y in [%f, %f], len(lon) == %d'\
3838              %(min(y), max(y),
3839                len(y))
3840        print 'geo_ref: ',geo_ref
3841
3842    z = resize(bath_grid,outfile.variables['z'][:].shape)
3843    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3844    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3845    outfile.variables['z'][:] = z
3846    outfile.variables['elevation'][:] = z  #FIXME HACK
3847    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3848
3849    stage = outfile.variables['stage']
3850    xmomentum = outfile.variables['xmomentum']
3851    ymomentum = outfile.variables['ymomentum']
3852
3853    outfile.variables['time'][:] = times   #Store time relative
3854
3855    if verbose: print 'Converting quantities'
3856    n = number_of_times
3857    for j in range(number_of_times):
3858        # load in files
3859        elevation_meta, elevation_grid = \
3860           _read_asc(elevation_dir + os.sep + elevation_files[j])
3861
3862        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3863        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3864
3865        #cut matrix to desired size
3866        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3867        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3868        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3869       
3870        # handle missing values
3871        missing = (elevation_grid == elevation_meta['NODATA_value'])
3872        if sometrue (missing):
3873            if fail_on_NaN:
3874                msg = 'File %s contains missing values'\
3875                      %(elevation_files[j])
3876                raise DataMissingValuesError, msg
3877            else:
3878                elevation_grid = elevation_grid*(missing==0) + \
3879                                 missing*elevation_NaN_filler
3880
3881
3882        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3883        i = 0
3884        for k in range(number_of_latitudes):      #Y direction
3885            for l in range(number_of_longitudes): #X direction
3886                w = zscale*elevation_grid[k,l] + mean_stage
3887                stage[j,i] = w
3888                h = w - z[i]
3889                xmomentum[j,i] = u_momentum_grid[k,l]*h
3890                ymomentum[j,i] = v_momentum_grid[k,l]*h
3891                i += 1
3892    outfile.close()
3893
3894def _get_min_max_indexes(latitudes_ref,longitudes_ref,
3895                        minlat=None, maxlat=None,
3896                        minlon=None, maxlon=None):
3897    """
3898    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3899    specified with min/max lat/long
3900
3901    Think of the latitudes and longitudes describing a 2d surface.
3902    The area returned is, if possible, just big enough to cover the
3903    inputed max/min area. (This will not be possible if the max/min area
3904    has a section outside of the latitudes/longitudes area.)
3905
3906    asset  longitudes are sorted,
3907    long - from low to high (west to east, eg 148 - 151)
3908    assert latitudes are sorted, ascending or decending
3909    """
3910    latitudes = latitudes_ref[:]
3911    longitudes = longitudes_ref[:]
3912
3913    latitudes = ensure_numeric(latitudes)
3914    longitudes = ensure_numeric(longitudes)
3915   
3916    assert allclose(sort(longitudes), longitudes)
3917   
3918    lat_ascending = True
3919    if not allclose(sort(latitudes), latitudes):
3920        lat_ascending = False
3921        # reverse order of lat, so it's in ascending order         
3922        latitudes = latitudes[::-1]
3923        assert allclose(sort(latitudes), latitudes)
3924    #print "latitudes  in funct", latitudes
3925   
3926    largest_lat_index = len(latitudes)-1
3927    #Cut out a smaller extent.
3928    if minlat == None:
3929        lat_min_index = 0
3930    else:
3931        lat_min_index = searchsorted(latitudes, minlat)-1
3932        if lat_min_index <0:
3933            lat_min_index = 0
3934
3935
3936    if maxlat == None:
3937        lat_max_index = largest_lat_index #len(latitudes)
3938    else:
3939        lat_max_index = searchsorted(latitudes, maxlat)
3940        if lat_max_index > largest_lat_index:
3941            lat_max_index = largest_lat_index
3942
3943    if minlon == None:
3944        lon_min_index = 0
3945    else:
3946        lon_min_index = searchsorted(longitudes, minlon)-1
3947        if lon_min_index <0:
3948            lon_min_index = 0
3949
3950    if maxlon == None:
3951        lon_max_index = len(longitudes)
3952    else:
3953        lon_max_index = searchsorted(longitudes, maxlon)
3954
3955    # Reversing the indexes, if the lat array is decending
3956    if lat_ascending is False:
3957        lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3958                                       largest_lat_index - lat_min_index
3959    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3960    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3961
3962    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3963
3964
3965def _read_asc(filename, verbose=False):
3966    """Read esri file from the following ASCII format (.asc)
3967
3968    Example:
3969
3970    ncols         3121
3971    nrows         1800
3972    xllcorner     722000
3973    yllcorner     5893000
3974    cellsize      25
3975    NODATA_value  -9999
3976    138.3698 137.4194 136.5062 135.5558 ..........
3977
3978    """
3979
3980    datafile = open(filename)
3981
3982    if verbose: print 'Reading DEM from %s' %(filename)
3983    lines = datafile.readlines()
3984    datafile.close()
3985
3986    if verbose: print 'Got', len(lines), ' lines'
3987
3988    ncols = int(lines.pop(0).split()[1].strip())
3989    nrows = int(lines.pop(0).split()[1].strip())
3990    xllcorner = float(lines.pop(0).split()[1].strip())
3991    yllcorner = float(lines.pop(0).split()[1].strip())
3992    cellsize = float(lines.pop(0).split()[1].strip())
3993    NODATA_value = float(lines.pop(0).split()[1].strip())
3994
3995    assert len(lines) == nrows
3996
3997    #Store data
3998    grid = []
3999
4000    n = len(lines)
4001    for i, line in enumerate(lines):
4002        cells = line.split()
4003        assert len(cells) == ncols
4004        grid.append(array([float(x) for x in cells]))
4005    grid = array(grid)
4006
4007    return {'xllcorner':xllcorner,
4008            'yllcorner':yllcorner,
4009            'cellsize':cellsize,
4010            'NODATA_value':NODATA_value}, grid
4011
4012
4013
4014    ####  URS 2 SWW  ###
4015
4016lon_name = 'LON'
4017lat_name = 'LAT'
4018time_name = 'TIME'
4019precision = Float # So if we want to change the precision its done here       
4020class Write_nc:
4021    """
4022    Write an nc file.
4023   
4024    Note, this should be checked to meet cdc netcdf conventions for gridded
4025    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4026   
4027    """
4028    def __init__(self,
4029                 quantity_name,
4030                 file_name,
4031                 time_step_count,
4032                 time_step,
4033                 lon,
4034                 lat):
4035        """
4036        time_step_count is the number of time steps.
4037        time_step is the time step size
4038       
4039        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4040        """
4041        self.quantity_name = quantity_name
4042        quantity_units = {'HA':'CENTIMETERS',
4043                              'UA':'CENTIMETERS/SECOND',
4044                              'VA':'CENTIMETERS/SECOND'}       
4045       
4046        multiplier_dic = {'HA':100.0, # To convert from m to cm
4047                              'UA':100.0,  #  m/s to cm/sec
4048                              'VA':-100.0}  # MUX files have positve x in the
4049        # Southern direction.  This corrects for it, when writing nc files.
4050       
4051        self.quantity_multiplier =  multiplier_dic[self.quantity_name]
4052       
4053        #self.file_name = file_name
4054        self.time_step_count = time_step_count
4055        self.time_step = time_step
4056
4057        # NetCDF file definition
4058        self.outfile = NetCDFFile(file_name, 'w')
4059        outfile = self.outfile       
4060
4061        #Create new file
4062        nc_lon_lat_header(outfile, lon, lat)
4063   
4064        # TIME
4065        outfile.createDimension(time_name, None)
4066        outfile.createVariable(time_name, precision, (time_name,))
4067
4068        #QUANTITY
4069        outfile.createVariable(self.quantity_name, precision,
4070                               (time_name, lat_name, lon_name))
4071        outfile.variables[self.quantity_name].missing_value=-1.e+034
4072        outfile.variables[self.quantity_name].units= \
4073                                 quantity_units[self.quantity_name]
4074        outfile.variables[lon_name][:]= ensure_numeric(lon)
4075        outfile.variables[lat_name][:]= ensure_numeric(lat)
4076
4077        #Assume no one will be wanting to read this, while we are writing
4078        #outfile.close()
4079       
4080    def store_timestep(self, quantity_slice):
4081        """
4082        Write a time slice of quantity info
4083        quantity_slice is the data to be stored at this time step
4084        """
4085       
4086        outfile = self.outfile
4087       
4088        # Get the variables
4089        time = outfile.variables[time_name]
4090        quantity = outfile.variables[self.quantity_name]
4091           
4092        i = len(time)
4093
4094        #Store time
4095        time[i] = i*self.time_step #self.domain.time
4096        quantity[i,:] = quantity_slice* self.quantity_multiplier
4097       
4098    def close(self):
4099        self.outfile.close()
4100
4101def urs2sww(basename_in='o', basename_out=None, verbose=False,
4102            remove_nc_files=True,
4103            minlat=None, maxlat=None,
4104            minlon= None, maxlon=None,
4105            mint=None, maxt=None,
4106            mean_stage=0,
4107            origin = None,
4108            zscale=1,
4109            fail_on_NaN=True,
4110            NaN_filler=0,
4111            elevation=None,
4112            gridded=True):
4113    """
4114    Convert URS C binary format for wave propagation to
4115    sww format native to abstract_2d_finite_volumes.
4116
4117    Specify only basename_in and read files of the form
4118    basefilename-z-mux, basefilename-e-mux and basefilename-n-mux containing
4119    relative height, x-velocity and y-velocity, respectively.
4120
4121    Also convert latitude and longitude to UTM. All coordinates are
4122    assumed to be given in the GDA94 datum. The latitude and longitude
4123    information is for  a grid.
4124
4125    min's and max's: If omitted - full extend is used.
4126    To include a value min may equal it, while max must exceed it.
4127    Lat and lon are assumed to be in decimal degrees.
4128    NOTE: minlon is the most east boundary.
4129   
4130    origin is a 3-tuple with geo referenced
4131    UTM coordinates (zone, easting, northing)
4132    It will be the origin of the sww file. This shouldn't be used,
4133    since all of anuga should be able to handle an arbitary origin.
4134
4135
4136    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4137    which means that latitude is the fastest
4138    varying dimension (row major order, so to speak)
4139
4140    In URS C binary the latitudes and longitudes are in assending order.
4141    """
4142    if gridded is False:
4143        pass
4144        #urs_ungridded2sww()
4145       
4146    if basename_out == None:
4147        basename_out = basename_in
4148    files_out = urs2nc(basename_in, basename_out)
4149    ferret2sww(basename_out,
4150               minlat=minlat,
4151               maxlat=maxlat,
4152               minlon=minlon,
4153               maxlon=maxlon,
4154               mint=mint,
4155               maxt=maxt,
4156               mean_stage=mean_stage,
4157               origin=origin,
4158               zscale=zscale,
4159               fail_on_NaN=fail_on_NaN,
4160               NaN_filler=NaN_filler,
4161               inverted_bathymetry=True,
4162               verbose=verbose)
4163    #print "files_out",files_out
4164    if remove_nc_files:
4165        for file_out in files_out:
4166            os.remove(file_out)
4167   
4168def urs2nc(basename_in = 'o', basename_out = 'urs'):
4169    """
4170    Convert the 3 urs files to 4 nc files.
4171
4172    The name of the urs file names must be;
4173    [basename_in]-z-mux
4174    [basename_in]-e-mux
4175    [basename_in]-n-mux
4176   
4177    """
4178    files_in = [basename_in+'-z-mux',
4179                basename_in+'-e-mux',
4180                basename_in+'-n-mux']
4181    files_out = [basename_out+'_ha.nc',
4182                 basename_out+'_ua.nc',
4183                 basename_out+'_va.nc']
4184    quantities = ['HA','UA','VA']
4185
4186    for file_name in files_in:
4187        if os.access(file_name, os.F_OK) == 0 :
4188            msg = 'File %s does not exist or is not accessible' %file_name
4189            raise IOError, msg
4190       
4191    hashed_elevation = None
4192    for file_in, file_out, quantity in map(None, files_in,
4193                                           files_out,
4194                                           quantities):
4195        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
4196                                         file_out,
4197                                         quantity)
4198        #print "lonlatdep", lonlatdep
4199        if hashed_elevation == None:
4200            elevation_file = basename_out+'_e.nc'
4201            write_elevation_nc(elevation_file,
4202                                lon,
4203                                lat,
4204                                depth)
4205            hashed_elevation = myhash(lonlatdep)
4206        else:
4207            msg = "The elevation information in the mux files is inconsistent"
4208            assert hashed_elevation == myhash(lonlatdep), msg
4209    files_out.append(elevation_file)
4210    return files_out
4211   
4212def _binary_c2nc(file_in, file_out, quantity):
4213    """
4214    Reads in a quantity urs file and writes a quantity nc file.
4215    additionally, returns the depth and lat, long info,
4216    so it can be written to a file.
4217    """
4218    columns = 3 # long, lat , depth
4219    mux_file = open(file_in, 'rb')
4220   
4221    # Number of points/stations
4222    (points_num,)= unpack('i',mux_file.read(4))
4223
4224    # nt, int - Number of time steps
4225    (time_step_count,)= unpack('i',mux_file.read(4))
4226
4227    #dt, float - time step, seconds
4228    (time_step,) = unpack('f', mux_file.read(4))
4229   
4230    msg = "Bad data in the mux file."
4231    if points_num < 0:
4232        mux_file.close()
4233        raise ANUGAError, msg
4234    if time_step_count < 0:
4235        mux_file.close()
4236        raise ANUGAError, msg
4237    if time_step < 0:
4238        mux_file.close()
4239        raise ANUGAError, msg
4240   
4241    lonlatdep = p_array.array('f')
4242    lonlatdep.read(mux_file, columns * points_num)
4243    lonlatdep = array(lonlatdep, typecode=Float)   
4244    lonlatdep = reshape(lonlatdep, (points_num, columns))
4245   
4246    lon, lat, depth = lon_lat2grid(lonlatdep)
4247    lon_sorted = list(lon)
4248    lon_sorted.sort()
4249
4250    if not lon == lon_sorted:
4251        msg = "Longitudes in mux file are not in ascending order"
4252        raise IOError, msg
4253    lat_sorted = list(lat)
4254    lat_sorted.sort()
4255
4256    if not lat == lat_sorted:
4257        msg = "Latitudes in mux file are not in ascending order"
4258   
4259    nc_file = Write_nc(quantity,
4260                       file_out,
4261                       time_step_count,
4262                       time_step,
4263                       lon,
4264                       lat)
4265
4266    for i in range(time_step_count):
4267        #Read in a time slice  from mux file 
4268        hz_p_array = p_array.array('f')
4269        hz_p_array.read(mux_file, points_num)
4270        hz_p = array(hz_p_array, typecode=Float)
4271        hz_p = reshape(hz_p, (len(lon), len(lat)))
4272        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
4273
4274        #write time slice to nc file
4275        nc_file.store_timestep(hz_p)
4276    mux_file.close()
4277    nc_file.close()
4278
4279    return lonlatdep, lon, lat, depth
4280   
4281
4282def write_elevation_nc(file_out, lon, lat, depth_vector):
4283    """
4284    Write an nc elevation file.
4285    """
4286   
4287    # NetCDF file definition
4288    outfile = NetCDFFile(file_out, 'w')
4289
4290    #Create new file
4291    nc_lon_lat_header(outfile, lon, lat)
4292   
4293    # ELEVATION
4294    zname = 'ELEVATION'
4295    outfile.createVariable(zname, precision, (lat_name, lon_name))
4296    outfile.variables[zname].units='CENTIMETERS'
4297    outfile.variables[zname].missing_value=-1.e+034
4298
4299    outfile.variables[lon_name][:]= ensure_numeric(lon)
4300    outfile.variables[lat_name][:]= ensure_numeric(lat)
4301
4302    depth = reshape(depth_vector, ( len(lat), len(lon)))
4303    outfile.variables[zname][:]= depth
4304   
4305    outfile.close()
4306   
4307def nc_lon_lat_header(outfile, lon, lat):
4308    """
4309    outfile is the netcdf file handle.
4310    lon - a list/array of the longitudes
4311    lat - a list/array of the latitudes
4312    """
4313   
4314    outfile.institution = 'Geoscience Australia'
4315    outfile.description = 'Converted from URS binary C'
4316   
4317    # Longitude
4318    outfile.createDimension(lon_name, len(lon))
4319    outfile.createVariable(lon_name, precision, (lon_name,))
4320    outfile.variables[lon_name].point_spacing='uneven'
4321    outfile.variables[lon_name].units='degrees_east'
4322    outfile.variables[lon_name].assignValue(lon)
4323
4324
4325    # Latitude
4326    outfile.createDimension(lat_name, len(lat))
4327    outfile.createVariable(lat_name, precision, (lat_name,))
4328    outfile.variables[lat_name].point_spacing='uneven'
4329    outfile.variables[lat_name].units='degrees_north'
4330    outfile.variables[lat_name].assignValue(lat)
4331
4332
4333   
4334def lon_lat2grid(long_lat_dep):
4335    """
4336    given a list of points that are assumed to be an a grid,
4337    return the long's and lat's of the grid.
4338    long_lat_dep is an array where each row is a position.
4339    The first column is longitudes.
4340    The second column is latitudes.
4341
4342    The latitude is the fastest varying dimension - in mux files
4343    """
4344    LONG = 0
4345    LAT = 1
4346    QUANTITY = 2
4347
4348    long_lat_dep = ensure_numeric(long_lat_dep, Float)
4349   
4350    num_points = long_lat_dep.shape[0]
4351    this_rows_long = long_lat_dep[0,LONG]
4352
4353    # Count the length of unique latitudes
4354    i = 0
4355    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4356        i += 1
4357    # determine the lats and longsfrom the grid
4358    lat = long_lat_dep[:i, LAT]       
4359    long = long_lat_dep[::i, LONG]
4360   
4361    lenlong = len(long)
4362    lenlat = len(lat)
4363    #print 'len lat', lat, len(lat)
4364    #print 'len long', long, len(long)
4365         
4366    msg = 'Input data is not gridded'     
4367    assert num_points % lenlat == 0, msg
4368    assert num_points % lenlong == 0, msg
4369         
4370    # Test that data is gridded       
4371    for i in range(lenlong):
4372        msg = 'Data is not gridded.  It must be for this operation'
4373        first = i*lenlat
4374        last = first + lenlat
4375               
4376        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4377        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
4378   
4379   
4380#    print 'range long', min(long), max(long)
4381#    print 'range lat', min(lat), max(lat)
4382#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4383#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4384   
4385   
4386   
4387    msg = 'Out of range latitudes/longitudes'
4388    for l in lat:assert -90 < l < 90 , msg
4389    for l in long:assert -180 < l < 180 , msg
4390
4391    # Changing quantity from lat being the fastest varying dimension to
4392    # long being the fastest varying dimension
4393    # FIXME - make this faster/do this a better way
4394    # use numeric transpose, after reshaping the quantity vector
4395#    quantity = zeros(len(long_lat_dep), Float)
4396    quantity = zeros(num_points, Float)
4397   
4398#    print 'num',num_points
4399    for lat_i, _ in enumerate(lat):
4400        for long_i, _ in enumerate(long):
4401            q_index = lat_i*lenlong+long_i
4402            lld_index = long_i*lenlat+lat_i
4403#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4404            temp = long_lat_dep[lld_index, QUANTITY]
4405            quantity[q_index] = temp
4406           
4407    return long, lat, quantity
4408
4409    ####  END URS 2 SWW  ###
4410
4411    #### URS UNGRIDDED 2 SWW ###
4412
4413    ### PRODUCING THE POINTS NEEDED FILE ###
4414LL_LAT = -50.0
4415LL_LONG = 80.0
4416GRID_SPACING = 1.0/60.0
4417LAT_AMOUNT = 4800
4418LONG_AMOUNT = 3600
4419def URS_points_needed_to_file(file_name, boundary_polygon, zone,
4420                              ll_lat=LL_LAT, ll_long=LL_LONG,
4421                              grid_spacing=GRID_SPACING, 
4422                              lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT,
4423                              export_csv=False, use_cache=False,
4424                              verbose=False):
4425    """
4426    file_name - name of the urs file produced for David.
4427    boundary_polygon - a list of points that describes a polygon.
4428                      The last point is assumed ot join the first point.
4429                      This is in UTM (lat long would be better though)
4430
4431    ll_lat - lower left latitude, in decimal degrees
4432    ll-long - lower left longitude, in decimal degrees
4433    grid_spacing - in deciamal degrees
4434
4435
4436    Don't add the file extension.  It will be added.
4437    """
4438    geo = URS_points_needed(boundary_polygon, zone, ll_lat, ll_long,
4439                            grid_spacing, 
4440                      lat_amount, long_amount,use_cache, verbose)
4441    if not file_name[-4:] == ".urs":
4442        file_name += ".urs"
4443    geo.export_points_file(file_name)
4444    if export_csv:
4445        if file_name[-4:] == ".urs":
4446            file_name = file_name[:-4] + ".csv"
4447        geo.export_points_file(file_name)
4448
4449def URS_points_needed(boundary_polygon, zone, ll_lat=LL_LAT,
4450                      ll_long=LL_LONG, grid_spacing=GRID_SPACING, 
4451                      lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT,
4452                      use_cache=False, verbose=False):
4453    args = (boundary_polygon,
4454                      zone)
4455    kwargs = {'ll_lat': ll_lat,
4456              'll_long': ll_long,
4457              'grid_spacing': grid_spacing,
4458              'lat_amount': lat_amount,
4459              'long_amount': long_amount} 
4460    if use_cache is True:
4461        try:
4462            from anuga.caching import cache
4463        except:
4464            msg = 'Caching was requested, but caching module'+\
4465                  'could not be imported'
4466            raise msg
4467
4468
4469        geo = cache(_URS_points_needed,
4470                  args, kwargs,
4471                  verbose=verbose,
4472                  compression=False)
4473    else:
4474        #I was getting 'got multiple values for keyword argument' errors
4475        #geo = apply(_URS_points_needed, args, kwargs)
4476        geo = _URS_points_needed(boundary_polygon,
4477                      zone, ll_lat,
4478                      ll_long, grid_spacing, 
4479                      lat_amount, long_amount)
4480
4481    return geo   
4482def _URS_points_needed(boundary_polygon,
4483                      zone, ll_lat=LL_LAT,
4484                      ll_long=LL_LONG, grid_spacing=GRID_SPACING, 
4485                      lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT):
4486    """
4487
4488    boundary_polygon - a list of points that describes a polygon.
4489                      The last point is assumed ot join the first point.
4490                      This is in UTM (lat long would be better though)
4491
4492    ll_lat - lower left latitude, in decimal degrees
4493    ll-long - lower left longitude, in decimal degrees
4494    grid_spacing - in deciamal degrees
4495
4496    """
4497    #FIXME cache this function!
4498   
4499    from sets import ImmutableSet
4500   
4501    msg = "grid_spacing can not be zero"
4502    assert not grid_spacing ==0, msg
4503    a = boundary_polygon
4504    # List of segments.  Each segment is two points.
4505    segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))]
4506
4507    # convert the segs to Lat's and longs.
4508   
4509    # Don't assume the zone of the segments is the same as the lower left
4510    # corner of the lat long data!!  They can easily be in different zones
4511   
4512    lat_long_set = ImmutableSet()
4513    for seg in segs:
4514        points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, 
4515                      lat_amount, long_amount, zone)
4516        lat_long_set |= ImmutableSet(points_lat_long)
4517    #print "lat_long_set",lat_long_set
4518    geo = Geospatial_data(data_points=list(lat_long_set),
4519                              points_are_lats_longs=True)
4520    return geo
4521   
4522def points_needed(seg, ll_lat, ll_long, grid_spacing, 
4523                  lat_amount, long_amount, zone):
4524    """
4525    return a list of the points, in lats and longs that are needed to
4526    interpolate any point on the segment.
4527    """
4528    from math import sqrt
4529    #print "zone",zone
4530    geo_reference = Geo_reference(zone=zone)
4531    #print "seg",seg
4532    geo = Geospatial_data(seg,geo_reference=geo_reference)
4533    seg_lat_long = geo.get_data_points(as_lat_long=True)
4534    #print "seg_lat_long", seg_lat_long
4535    # 1.415 = 2^0.5, rounded up....
4536    sqrt_2_rounded_up = 1.415
4537    buffer = sqrt_2_rounded_up * grid_spacing
4538   
4539    #
4540   
4541    max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer
4542    max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer
4543    min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer
4544    min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer
4545
4546    first_row = (min_long - ll_long)/grid_spacing
4547    # To round up
4548    first_row_long = int(round(first_row + 0.5))
4549    #print "first_row", first_row_long
4550
4551    last_row = (max_long - ll_long)/grid_spacing # round down
4552    last_row_long = int(round(last_row))
4553    #print "last_row",last_row _long
4554   
4555    first_row = (min_lat - ll_lat)/grid_spacing
4556    # To round up
4557    first_row_lat = int(round(first_row + 0.5))
4558    #print "first_row", first_row_lat
4559
4560    last_row = (max_lat - ll_lat)/grid_spacing # round down
4561    last_row_lat = int(round(last_row))
4562    #print "last_row",last_row_lat
4563
4564    # to work out the max distance -
4565    # 111120 - horizontal distance between 1 deg latitude.
4566    #max_distance = sqrt_2_rounded_up * 111120 * grid_spacing
4567    max_distance = 157147.4112 * grid_spacing
4568    #print "max_distance", max_distance #2619.12 m for 1 minute
4569    points_lat_long = []
4570    # Create a list of the lat long points to include.
4571    for index_lat in range(first_row_lat, last_row_lat + 1):
4572        for index_long in range(first_row_long, last_row_long + 1):
4573            lat = ll_lat + index_lat*grid_spacing
4574            long = ll_long + index_long*grid_spacing
4575
4576            #filter here to keep good points
4577            if keep_point(lat, long, seg, max_distance):
4578                points_lat_long.append((lat, long)) #must be hashable
4579    #print "points_lat_long", points_lat_long
4580
4581    # Now that we have these points, lets throw ones out that are too far away
4582    return points_lat_long
4583
4584def keep_point(lat, long, seg, max_distance):
4585    """
4586    seg is two points, UTM
4587    """
4588    from math import sqrt
4589    _ , x0, y0 = redfearn(lat, long)
4590    x1 = seg[0][0]
4591    y1 = seg[0][1]
4592    x2 = seg[1][0]
4593    y2 = seg[1][1]
4594
4595    x2_1 = x2-x1
4596    y2_1 = y2-y1
4597    d = abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1))/sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1))
4598    if d <= max_distance:
4599        return True
4600    else:
4601        return False
4602   
4603    #### CONVERTING UNGRIDDED URS DATA TO AN SWW FILE ####
4604def urs_ungridded2sww_link(basename_in='o', basename_out=None, verbose=False,
4605            remove_nc_files=True,
4606            minlat=None, maxlat=None,
4607            minlon= None, maxlon=None,
4608            mint=None, maxt=None,
4609            mean_stage=0,
4610            origin = None,
4611            zscale=1,
4612            fail_on_NaN=True,
4613            NaN_filler=0,
4614            elevation=None):
4615    """
4616    validation stuff on parrameters not used
4617    """
4618    pass
4619   
4620def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False,
4621            mint=None, maxt=None,
4622            mean_stage=0,
4623            origin = None,
4624            zscale=1):
4625    """   
4626    Convert URS C binary format for wave propagation to
4627    sww format native to abstract_2d_finite_volumes.
4628
4629    Specify only basename_in and read files of the form
4630    basefilename-z-mux, basefilename-e-mux and basefilename-n-mux containing
4631    relative height, x-velocity and y-velocity, respectively.
4632
4633    Also convert latitude and longitude to UTM. All coordinates are
4634    assumed to be given in the GDA94 datum. The latitude and longitude
4635    information is assumed ungridded grid.
4636
4637    min's and max's: If omitted - full extend is used.
4638    To include a value min ans max may equal it.
4639    Lat and lon are assumed to be in decimal degrees.
4640   
4641    origin is a 3-tuple with geo referenced
4642    UTM coordinates (zone, easting, northing)
4643    It will be the origin of the sww file. This shouldn't be used,
4644    since all of anuga should be able to handle an arbitary origin.
4645
4646
4647    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4648    which means that latitude is the fastest
4649    varying dimension (row major order, so to speak)
4650
4651    In URS C binary the latitudes and longitudes are in assending order.
4652    """ 
4653    from anuga.pmesh.mesh import Mesh
4654
4655    files_in = [basename_in+'-z-mux',
4656                basename_in+'-e-mux',
4657                basename_in+'-n-mux']
4658    quantities = ['HA','UA','VA']
4659
4660    # instanciate urs_points of the three mux files.
4661    mux = {}
4662    for quantity, file in map(None, quantities, files_in):
4663        mux[quantity] = Urs_points(file)
4664       
4665    # Could check that the depth is the same. (hashing)
4666
4667    # handle to a mux file to do depth stuff
4668    a_mux = mux[quantities[0]]
4669   
4670    # Convert to utm
4671    lat = a_mux.lonlatdep[:,1]
4672    long = a_mux.lonlatdep[:,0]
4673    points_utm, zone = convert_from_latlon_to_utm( \
4674        latitudes=lat, longitudes=long)
4675    #print "points_utm", points_utm
4676    #print "zone", zone
4677
4678    elevation = a_mux.lonlatdep[:,2] * -1 #
4679   
4680    # grid ( create a mesh from the selected points)
4681    # This mesh has a problem.  Triangles are streched over ungridded areas.
4682    #  If these areas could be described as holes in pmesh, that would be great
4683   
4684    mesh = Mesh()
4685    mesh.add_vertices(points_utm)
4686    mesh.auto_segment()
4687    mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4688    mesh_dic = mesh.Mesh2MeshList()
4689
4690    #mesh.export_mesh_file(basename_in + '.tsh')
4691    # These are the times of the mux file
4692    mux_times = []
4693    for i in range(a_mux.time_step_count):
4694        mux_times.append(a_mux.time_step * i) 
4695    mux_times_start_i, mux_times_fin_i = mux2sww_time(mux_times, mint, maxt)
4696    times = mux_times[mux_times_start_i:mux_times_fin_i]
4697   
4698    if mux_times_start_i == mux_times_fin_i:
4699        # Close the mux files
4700        for quantity, file in map(None, quantities, files_in):
4701            mux[quantity].close()
4702        msg="Due to mint and maxt there's no time info in the boundary SWW."
4703        assert 1==0 , msg #Happy to know a better way of doing this..
4704       
4705    # If this raise is removed there is currently no downstream errors
4706           
4707    points_utm=ensure_numeric(points_utm)
4708    #print "mesh_dic['generatedpointlist']", mesh_dic['generatedpointlist']
4709    #print "points_utm", points_utm
4710    assert ensure_numeric(mesh_dic['generatedpointlist']) == \
4711           ensure_numeric(points_utm)
4712   
4713    volumes = mesh_dic['generatedtrianglelist']
4714   
4715    # write sww intro and grid stuff.   
4716    if basename_out is None:
4717        swwname = basename_in + '.sww'
4718    else:
4719        swwname = basename_out + '.sww'
4720
4721    if verbose: print 'Output to ', swwname
4722    outfile = NetCDFFile(swwname, 'w')
4723    # For a different way of doing this, check out tsh2sww
4724    # work out sww_times and the index range this covers
4725    write_sww_header(outfile, times, len(volumes), len(points_utm))
4726    outfile.mean_stage = mean_stage
4727    outfile.zscale = zscale
4728    write_sww_triangulation(outfile, points_utm, volumes,
4729                            elevation, zone,  origin=origin, verbose=verbose)
4730   
4731    if verbose: print 'Converting quantities'
4732    j = 0
4733    # Read in a time slice from each mux file and write it to the sww file
4734    for HA, UA, VA in map(None, mux['HA'], mux['UA'], mux['VA']):
4735        if j >= mux_times_start_i and j < mux_times_fin_i:
4736            write_sww_time_slice(outfile, HA, UA, VA,  elevation,
4737                                 j - mux_times_start_i,
4738                                 mean_stage=mean_stage, zscale=zscale,
4739                                 verbose=verbose)
4740        j += 1
4741    outfile.close()
4742    #
4743    # Do some conversions while writing the sww file
4744def mux2sww_time(mux_times, mint, maxt):
4745    """
4746    """
4747
4748    if mint == None:
4749        mux_times_start_i = 0
4750    else:
4751        mux_times_start_i = searchsorted(mux_times, mint)
4752       
4753    if maxt == None:
4754        mux_times_fin_i = len(mux_times)
4755    else:
4756        maxt += 0.5 # so if you specify a time where there is
4757                    # data that time will be included
4758        mux_times_fin_i = searchsorted(mux_times, maxt)
4759
4760    return mux_times_start_i, mux_times_fin_i
4761
4762def write_sww_header(outfile, times, number_of_volumes, number_of_points ):
4763    """
4764    outfile - the name of the file that will be written
4765    times - A list of the time slice times
4766    number_of_volumes - the number of triangles
4767    """
4768    number_of_times = len(times)
4769   
4770    outfile.institution = 'Geoscience Australia'
4771    outfile.description = 'Converted from XXX'
4772
4773
4774    #For sww compatibility
4775    outfile.smoothing = 'Yes'
4776    outfile.order = 1
4777
4778    #Start time in seconds since the epoch (midnight 1/1/1970)
4779    if len(times) == 0:
4780        outfile.starttime = starttime = 0
4781    else:
4782        outfile.starttime = starttime = times[0]
4783
4784
4785    # dimension definitions
4786    outfile.createDimension('number_of_volumes', number_of_volumes)
4787
4788    outfile.createDimension('number_of_vertices', 3)
4789    outfile.createDimension('number_of_points', number_of_points)
4790    outfile.createDimension('number_of_timesteps', number_of_times)
4791
4792    # variable definitions
4793    outfile.createVariable('x', precision, ('number_of_points',))
4794    outfile.createVariable('y', precision, ('number_of_points',))
4795    outfile.createVariable('elevation', precision, ('number_of_points',))
4796
4797    #FIXME: Backwards compatibility
4798    outfile.createVariable('z', precision, ('number_of_points',))
4799    #################################
4800
4801    outfile.createVariable('volumes', Int, ('number_of_volumes',
4802                                            'number_of_vertices'))
4803
4804    outfile.createVariable('time', precision,
4805                           ('number_of_timesteps',))
4806
4807    outfile.createVariable('stage', precision,
4808                           ('number_of_timesteps',
4809                            'number_of_points'))
4810
4811    outfile.createVariable('xmomentum', precision,
4812                           ('number_of_timesteps',
4813                            'number_of_points'))
4814
4815    outfile.createVariable('ymomentum', precision,
4816                           ('number_of_timesteps',
4817                            'number_of_points'))
4818   
4819    outfile.variables['time'][:] = times   
4820
4821
4822def write_sww_triangulation(outfile, points_utm, volumes,
4823                            elevation, zone, origin=None, verbose=False):
4824
4825    number_of_points = len(points_utm)   
4826    volumes = array(volumes)
4827
4828    if origin is not None:
4829        if isinstance(origin, Geo_reference): 
4830            geo_ref = origin
4831        else:
4832            geo_ref = apply(Geo_reference,origin)
4833    else:
4834        geo_ref = Geo_reference(zone,min(points_utm[:,0]),min(points_utm[:,1]))
4835    #geo_ref = Geo_reference(zone,0,0)
4836    geo_ref.write_NetCDF(outfile)
4837
4838    # This will put the geo ref in the middle
4839    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
4840    x =  points_utm[:,0]
4841    y =  points_utm[:,1]
4842    z = outfile.variables['z'][:]
4843    #FIXME the w.r.t time is lost now..
4844    if verbose:
4845        print '------------------------------------------------'
4846        print 'More Statistics:'
4847        print '  Extent (/lon):'
4848        print '    x in [%f, %f], len(lat) == %d'\
4849              %(min(x), max(x),
4850                len(x))
4851        print '    y in [%f, %f], len(lon) == %d'\
4852              %(min(y), max(y),
4853                len(y))
4854        print '    z in [%f, %f], len(z) == %d'\
4855              %(min(elevation), max(elevation),
4856                len(elevation))
4857        print 'geo_ref: ',geo_ref
4858        print '------------------------------------------------'
4859
4860    #z = resize(bath_grid,outfile.variables['z'][:].shape)
4861    outfile.variables['x'][:] = points_utm[:,0] - geo_ref.get_xllcorner()
4862    outfile.variables['y'][:] = points_utm[:,1] - geo_ref.get_yllcorner()
4863    outfile.variables['z'][:] = elevation
4864    outfile.variables['elevation'][:] = elevation  #FIXME HACK
4865    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
4866
4867
4868def write_sww_time_slices(outfile, has, uas, vas, elevation,
4869                         mean_stage=0, zscale=1,
4870                         verbose=False):   
4871    #Time stepping
4872    stage = outfile.variables['stage']
4873    xmomentum = outfile.variables['xmomentum']
4874    ymomentum = outfile.variables['ymomentum']
4875
4876    n = len(has)
4877    j=0
4878    for ha, ua, va in map(None, has, uas, vas):
4879        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
4880        w = zscale*ha + mean_stage
4881        stage[j] = w
4882        h = w - elevation
4883        xmomentum[j] = ua*h
4884        ymomentum[j] = -1*va*#  -1 since in mux files south is positive.
4885        j += 1
4886
4887def write_sww_time_slice(outfile, ha, ua, va, elevation, slice_index,
4888                         mean_stage=0, zscale=1,
4889                         verbose=False):   
4890    #Time stepping
4891    stage = outfile.variables['stage']
4892    xmomentum = outfile.variables['xmomentum']
4893    ymomentum = outfile.variables['ymomentum']
4894   
4895    w = zscale*ha + mean_stage
4896    stage[slice_index] = w
4897    h = w - elevation
4898    xmomentum[slice_index] = ua*h
4899    ymomentum[slice_index] = -1*va*h # -1 since in mux files south is positive.
4900
4901def urs2txt(basename_in, location_index=None):
4902    """
4903    Not finished or tested
4904    """
4905    files_in = [basename_in+'-z-mux',
4906                basename_in+'-e-mux',
4907                basename_in+'-n-mux']
4908    quantities = ['HA','UA','VA']
4909
4910    d = ","
4911   
4912    # instanciate urs_points of the three mux files.
4913    mux = {}
4914    for quantity, file in map(None, quantities, files_in):
4915        mux[quantity] = Urs_points(file)
4916       
4917    # Could check that the depth is the same. (hashing)
4918
4919    # handle to a mux file to do depth stuff
4920    a_mux = mux[quantities[0]]
4921   
4922    # Convert to utm
4923    latitudes = a_mux.lonlatdep[:,1]
4924    longitudes = a_mux.lonlatdep[:,0]
4925    points_utm, zone = convert_from_latlon_to_utm( \
4926        latitudes=latitudes, longitudes=longitudes)
4927    #print "points_utm", points_utm
4928    #print "zone", zone
4929    depths = a_mux.lonlatdep[:,2]  #
4930   
4931    fid = open(basename_in + '.txt', 'w')
4932
4933    fid.write("zone: " + str(zone) + "\n")
4934
4935    if location_index is not None:
4936        #Title
4937        li = location_index
4938        fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
4939                  'Northing' + "\n")
4940        fid.write(str(li) +d+ str(latitudes[li])+d+ \
4941              str(longitudes[li]) +d+ str(points_utm[li][0]) +d+ \
4942              str(points_utm[li][01]) + "\n")
4943
4944    # the non-time dependent stuff
4945    #Title
4946    fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
4947                  'Northing' +d+ 'depth m' + "\n")
4948    i = 0
4949    for depth, point_utm, lat, long in map(None, depths,
4950                                               points_utm, latitudes,
4951                                               longitudes):
4952       
4953        fid.write(str(i) +d+ str(lat)+d+ str(long) +d+ str(point_utm[0]) +d+ \
4954                  str(point_utm[01]) +d+ str(depth) + "\n")
4955        i +=1
4956    #Time dependent
4957    if location_index is not None:
4958        time_step = a_mux.time_step
4959        i = 0
4960        #Title
4961        fid.write('time' +d+ 'HA depth m'+d+ \
4962                      'UA momentum East x m/sec' +d+ 'VA momentum North y m/sec' \
4963                      + "\n")
4964        for HA, UA, VA in map(None, mux['HA'], mux['UA'], mux['VA']):
4965            fid.write(str(i*time_step) +d+ str(HA[location_index])+d+ \
4966                      str(UA[location_index]) +d+ str(VA[location_index]) \
4967                      + "\n")
4968           
4969            i +=1
4970   
4971class Urs_points:
4972    """
4973    Read the info in URS mux files.
4974
4975    for the quantities heres a correlation between the file names and
4976    what they mean;
4977    z-mux is height above sea level, m
4978    e-mux is velocity is Eastern direction, m/s
4979    n-mux is velocity is Northern direction, m/s   
4980    """
4981    def __init__(self,urs_file):
4982        self.iterated = False
4983        columns = 3 # long, lat , depth
4984        mux_file = open(urs_file, 'rb')
4985       
4986        # Number of points/stations
4987        (self.points_num,)= unpack('i',mux_file.read(4))
4988       
4989        # nt, int - Number of time steps
4990        (self.time_step_count,)= unpack('i',mux_file.read(4))
4991        #print "self.time_step_count", self.time_step_count
4992        #dt, float - time step, seconds
4993        (self.time_step,) = unpack('f', mux_file.read(4))
4994        #print "self.time_step", self.time_step
4995        msg = "Bad data in the urs file."
4996        if self.points_num < 0:
4997            mux_file.close()
4998            raise ANUGAError, msg
4999        if self.time_step_count < 0:
5000            mux_file.close()
5001            raise ANUGAError, msg
5002        if self.time_step < 0:
5003            mux_file.close()
5004            raise ANUGAError, msg
5005
5006        # the depth is in meters, and it is the distance from the ocean
5007        # to the sea bottom.
5008        lonlatdep = p_array.array('f')
5009        lonlatdep.read(mux_file, columns * self.points_num)
5010        lonlatdep = array(lonlatdep, typecode=Float)   
5011        lonlatdep = reshape(lonlatdep, (self.points_num, columns))
5012        #print 'lonlatdep',lonlatdep
5013        self.lonlatdep = lonlatdep
5014       
5015        self.mux_file = mux_file
5016        # check this array
5017
5018    def __iter__(self):
5019        """
5020        iterate over quantity data which is with respect to time.
5021
5022        Note: You can only interate once over an object
5023       
5024        returns quantity infomation for each time slice
5025        """
5026        msg =  "You can only interate once over a urs file."
5027        assert not self.iterated, msg
5028        self.iter_time_step = 0
5029        self.iterated = True
5030        return self
5031   
5032    def next(self):
5033        if self.time_step_count == self.iter_time_step:
5034            self.close()
5035            raise StopIteration
5036        #Read in a time slice  from mux file 
5037        hz_p_array = p_array.array('f')
5038        hz_p_array.read(self.mux_file, self.points_num)
5039        hz_p = array(hz_p_array, typecode=Float)
5040        self.iter_time_step += 1
5041       
5042        return hz_p
5043
5044    def close(self):
5045        self.mux_file.close()
5046
5047    #### END URS UNGRIDDED 2 SWW ###
5048
5049#-------------------------------------------------------------
5050if __name__ == "__main__":   
5051        pass
5052
Note: See TracBrowser for help on using the repository browser.