source: anuga_core/source/anuga/shallow_water/data_manager.py @ 4305

Last change on this file since 4305 was 4303, checked in by duncan, 18 years ago

finishing a momentum check and adding another way of reading txt boundary files.

File size: 156.6 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57
58
59
60import csv
61import os
62from struct import unpack
63import array as p_array
64#import time, os
65from os import sep, path
66
67from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
68     searchsorted, zeros, allclose, around, reshape, transpose, sort
69from Scientific.IO.NetCDF import NetCDFFile
70
71
72from anuga.coordinate_transforms.redfearn import redfearn, \
73     convert_from_latlon_to_utm
74from anuga.coordinate_transforms.geo_reference import Geo_reference
75from anuga.geospatial_data.geospatial_data import Geospatial_data
76from anuga.config import minimum_storable_height as default_minimum_storable_height
77from anuga.utilities.numerical_tools import ensure_numeric,  mean
78from anuga.caching.caching import myhash
79from anuga.utilities.anuga_exceptions import ANUGAError
80from anuga.shallow_water import Domain
81from anuga.abstract_2d_finite_volumes.pmesh2domain import \
82     pmesh_to_domain_instance
83   
84def make_filename(s):
85    """Transform argument string into a suitable filename
86    """
87
88    s = s.strip()
89    s = s.replace(' ', '_')
90    s = s.replace('(', '')
91    s = s.replace(')', '')
92    s = s.replace('__', '_')
93
94    return s
95
96
97def check_dir(path, verbose=None):
98    """Check that specified path exists.
99    If path does not exist it will be created if possible
100
101    USAGE:
102       checkdir(path, verbose):
103
104    ARGUMENTS:
105        path -- Directory
106        verbose -- Flag verbose output (default: None)
107
108    RETURN VALUE:
109        Verified path including trailing separator
110
111    """
112
113    import os, sys
114    import os.path
115
116    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
117        unix = 0
118    else:
119        unix = 1
120
121
122    if path[-1] != os.sep:
123        path = path + os.sep  # Add separator for directories
124
125    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
126    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
127        try:
128            exitcode=os.mkdir(path)
129
130            # Change access rights if possible
131            #
132            if unix:
133                exitcode=os.system('chmod 775 '+path)
134            else:
135                pass  # FIXME: What about acces rights under Windows?
136
137            if verbose: print 'MESSAGE: Directory', path, 'created.'
138
139        except:
140            print 'WARNING: Directory', path, 'could not be created.'
141            if unix:
142                path = '/tmp/'
143            else:
144                path = 'C:'
145
146            print 'Using directory %s instead' %path
147
148    return(path)
149
150
151
152def del_dir(path):
153    """Recursively delete directory path and all its contents
154    """
155
156    import os
157
158    if os.path.isdir(path):
159        for file in os.listdir(path):
160            X = os.path.join(path, file)
161
162
163            if os.path.isdir(X) and not os.path.islink(X):
164                del_dir(X)
165            else:
166                try:
167                    os.remove(X)
168                except:
169                    print "Could not remove file %s" %X
170
171        os.rmdir(path)
172
173
174
175def create_filename(datadir, filename, format, size=None, time=None):
176
177    import os
178    #from anuga.config import data_dir
179
180    FN = check_dir(datadir) + filename
181
182    if size is not None:
183        FN += '_size%d' %size
184
185    if time is not None:
186        FN += '_time%.2f' %time
187
188    FN += '.' + format
189    return FN
190
191
192def get_files(datadir, filename, format, size):
193    """Get all file (names) with gven name, size and format
194    """
195
196    import glob
197
198    import os
199    #from anuga.config import data_dir
200
201    dir = check_dir(datadir)
202
203    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
204    return glob.glob(pattern)
205
206
207
208#Generic class for storing output to e.g. visualisation or checkpointing
209class Data_format:
210    """Generic interface to data formats
211    """
212
213
214    def __init__(self, domain, extension, mode = 'w'):
215        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
216                                        '''   'w' (write)'''+\
217                                        '''   'r' (read)''' +\
218                                        '''   'a' (append)'''
219
220        #Create filename
221        self.filename = create_filename(domain.get_datadir(),
222                                        domain.get_name(), extension)
223
224        #print 'F', self.filename
225        self.timestep = 0
226        self.domain = domain
227       
228
229
230        # Exclude ghosts in case this is a parallel domain
231        self.number_of_nodes = domain.number_of_full_nodes       
232        self.number_of_volumes = domain.number_of_full_triangles
233        #self.number_of_volumes = len(domain)       
234
235
236
237
238        #FIXME: Should we have a general set_precision function?
239
240
241
242#Class for storing output to e.g. visualisation
243class Data_format_sww(Data_format):
244    """Interface to native NetCDF format (.sww) for storing model output
245
246    There are two kinds of data
247
248    1: Constant data: Vertex coordinates and field values. Stored once
249    2: Variable data: Conserved quantities. Stored once per timestep.
250
251    All data is assumed to reside at vertex locations.
252    """
253
254
255    def __init__(self, domain, mode = 'w',\
256                 max_size = 2000000000,
257                 recursion = False):
258        from Scientific.IO.NetCDF import NetCDFFile
259        from Numeric import Int, Float, Float32
260
261        self.precision = Float32 #Use single precision for quantities
262        if hasattr(domain, 'max_size'):
263            self.max_size = domain.max_size #file size max is 2Gig
264        else:
265            self.max_size = max_size
266        self.recursion = recursion
267        self.mode = mode
268
269        Data_format.__init__(self, domain, 'sww', mode)
270
271        if hasattr(domain, 'minimum_storable_height'):
272            self.minimum_storable_height =  domain.minimum_storable_height
273        else:
274            self.minimum_storable_height = default_minimum_storable_height
275
276        # NetCDF file definition
277        fid = NetCDFFile(self.filename, mode)
278
279        if mode == 'w':
280
281            #Create new file
282            fid.institution = 'Geoscience Australia'
283            fid.description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
284
285            if domain.smooth:
286                fid.smoothing = 'Yes'
287            else:
288                fid.smoothing = 'No'
289
290            fid.order = domain.default_order
291
292            if hasattr(domain, 'texture'):
293                fid.texture = domain.texture
294            #else:
295            #    fid.texture = 'None'
296
297            #Reference point
298            #Start time in seconds since the epoch (midnight 1/1/1970)
299            #FIXME: Use Georef
300            fid.starttime = domain.starttime
301
302            # dimension definitions
303            fid.createDimension('number_of_volumes', self.number_of_volumes)
304            fid.createDimension('number_of_vertices', 3)
305
306            if domain.smooth is True:
307                #fid.createDimension('number_of_points', len(domain.vertexlist))
308                fid.createDimension('number_of_points', self.domain.number_of_full_nodes)
309
310                # FIXME(Ole): This will cause sww files for paralle domains to
311                # have ghost nodes stored (but not used by triangles).
312                # To clean this up, we have to change get_vertex_values and friends in
313                # quantity.py (but I can't be bothered right now)
314            else:
315                fid.createDimension('number_of_points', 3*self.number_of_volumes)
316
317            fid.createDimension('number_of_timesteps', None) #extensible
318
319            # variable definitions
320            fid.createVariable('x', self.precision, ('number_of_points',))
321            fid.createVariable('y', self.precision, ('number_of_points',))
322            fid.createVariable('elevation', self.precision, ('number_of_points',))
323            if domain.geo_reference is not None:
324                domain.geo_reference.write_NetCDF(fid)
325
326            #FIXME: Backwards compatibility
327            fid.createVariable('z', self.precision, ('number_of_points',))
328            #################################
329
330            fid.createVariable('volumes', Int, ('number_of_volumes',
331                                                'number_of_vertices'))
332
333            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
334                                               # close to each other may appear as the same step
335                               ('number_of_timesteps',))
336
337            fid.createVariable('stage', self.precision,
338                               ('number_of_timesteps',
339                                'number_of_points'))
340
341            fid.createVariable('xmomentum', self.precision,
342                               ('number_of_timesteps',
343                                'number_of_points'))
344
345            fid.createVariable('ymomentum', self.precision,
346                               ('number_of_timesteps',
347                                'number_of_points'))
348
349        #Close
350        fid.close()
351
352
353    def store_connectivity(self):
354        """Specialisation of store_connectivity for net CDF format
355
356        Writes x,y,z coordinates of triangles constituting
357        the bed elevation.
358        """
359
360        from Scientific.IO.NetCDF import NetCDFFile
361
362        from Numeric import concatenate, Int
363
364        domain = self.domain
365
366        #Get NetCDF
367        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
368
369        # Get the variables
370        x = fid.variables['x']
371        y = fid.variables['y']
372        z = fid.variables['elevation']
373
374        volumes = fid.variables['volumes']
375
376        # Get X, Y and bed elevation Z
377        Q = domain.quantities['elevation']
378        X,Y,Z,V = Q.get_vertex_values(xy=True,
379                                      precision=self.precision)
380
381        volumes[:] = V.astype(volumes.typecode())
382        x[:] = X
383        y[:] = Y
384        z[:] = Z
385
386        #FIXME: Backwards compatibility
387        z = fid.variables['z']
388        z[:] = Z
389        ################################
390
391
392
393        #Close
394        fid.close()
395
396    def store_timestep(self, names):
397        """Store time and named quantities to file
398        """
399        from Scientific.IO.NetCDF import NetCDFFile
400        import types
401        from time import sleep
402        from os import stat
403
404        from Numeric import choose
405
406        #Get NetCDF
407        retries = 0
408        file_open = False
409        while not file_open and retries < 10:
410            try:
411                fid = NetCDFFile(self.filename, 'a') #Open existing file
412            except IOError:
413                #This could happen if someone was reading the file.
414                #In that case, wait a while and try again
415                msg = 'Warning (store_timestep): File %s could not be opened'\
416                      %self.filename
417                msg += ' - trying step %s again' %self.domain.time
418                print msg
419                retries += 1
420                sleep(1)
421            else:
422                file_open = True
423
424        if not file_open:
425            msg = 'File %s could not be opened for append' %self.filename
426            raise DataFileNotOpenError, msg
427
428
429
430        #Check to see if the file is already too big:
431        time = fid.variables['time']
432        i = len(time)+1
433        file_size = stat(self.filename)[6]
434        file_size_increase =  file_size/i
435        if file_size + file_size_increase > self.max_size*(2**self.recursion):
436            #in order to get the file name and start time correct,
437            #I change the domian.filename and domain.starttime.
438            #This is the only way to do this without changing
439            #other modules (I think).
440
441            #write a filename addon that won't break swollens reader
442            #(10.sww is bad)
443            filename_ext = '_time_%s'%self.domain.time
444            filename_ext = filename_ext.replace('.', '_')
445            #remember the old filename, then give domain a
446            #name with the extension
447            old_domain_filename = self.domain.get_name()
448            if not self.recursion:
449                self.domain.set_name(old_domain_filename+filename_ext)
450
451
452            #change the domain starttime to the current time
453            old_domain_starttime = self.domain.starttime
454            self.domain.starttime = self.domain.time
455
456            #build a new data_structure.
457            next_data_structure=\
458                Data_format_sww(self.domain, mode=self.mode,\
459                                max_size = self.max_size,\
460                                recursion = self.recursion+1)
461            if not self.recursion:
462                print '    file_size = %s'%file_size
463                print '    saving file to %s'%next_data_structure.filename
464            #set up the new data_structure
465            self.domain.writer = next_data_structure
466
467            #FIXME - could be cleaner to use domain.store_timestep etc.
468            next_data_structure.store_connectivity()
469            next_data_structure.store_timestep(names)
470            fid.sync()
471            fid.close()
472
473            #restore the old starttime and filename
474            self.domain.starttime = old_domain_starttime
475            self.domain.set_name(old_domain_filename)           
476        else:
477            self.recursion = False
478            domain = self.domain
479
480            # Get the variables
481            time = fid.variables['time']
482            stage = fid.variables['stage']
483            xmomentum = fid.variables['xmomentum']
484            ymomentum = fid.variables['ymomentum']
485            i = len(time)
486
487            #Store time
488            time[i] = self.domain.time
489
490
491            if type(names) not in [types.ListType, types.TupleType]:
492                names = [names]
493
494            for name in names:
495                # Get quantity
496                Q = domain.quantities[name]
497                A,V = Q.get_vertex_values(xy = False,
498                                          precision = self.precision)
499
500
501                #FIXME: Make this general (see below)
502                if name == 'stage':
503                    z = fid.variables['elevation']
504                    #print z[:]
505                    #print A-z[:]
506
507
508                    A = choose(A-z[:] >= self.minimum_storable_height,
509                               (z[:], A))
510                    stage[i,:] = A.astype(self.precision)
511                elif name == 'xmomentum':
512                    xmomentum[i,:] = A.astype(self.precision)
513                elif name == 'ymomentum':
514                    ymomentum[i,:] = A.astype(self.precision)
515
516                #As in....
517            #eval( name + '[i,:] = A.astype(self.precision)' )
518            #FIXME: But we need a UNIT test for that before refactoring
519
520
521
522            #Flush and close
523            fid.sync()
524            fid.close()
525
526
527
528#Class for handling checkpoints data
529class Data_format_cpt(Data_format):
530    """Interface to native NetCDF format (.cpt)
531    """
532
533
534    def __init__(self, domain, mode = 'w'):
535        from Scientific.IO.NetCDF import NetCDFFile
536        from Numeric import Int, Float, Float
537
538        self.precision = Float #Use full precision
539
540        Data_format.__init__(self, domain, 'sww', mode)
541
542
543        # NetCDF file definition
544        fid = NetCDFFile(self.filename, mode)
545
546        if mode == 'w':
547            #Create new file
548            fid.institution = 'Geoscience Australia'
549            fid.description = 'Checkpoint data'
550            #fid.smooth = domain.smooth
551            fid.order = domain.default_order
552
553            # dimension definitions
554            fid.createDimension('number_of_volumes', self.number_of_volumes)
555            fid.createDimension('number_of_vertices', 3)
556
557            #Store info at all vertices (no smoothing)
558            fid.createDimension('number_of_points', 3*self.number_of_volumes)
559            fid.createDimension('number_of_timesteps', None) #extensible
560
561            # variable definitions
562
563            #Mesh
564            fid.createVariable('x', self.precision, ('number_of_points',))
565            fid.createVariable('y', self.precision, ('number_of_points',))
566
567
568            fid.createVariable('volumes', Int, ('number_of_volumes',
569                                                'number_of_vertices'))
570
571            fid.createVariable('time', self.precision,
572                               ('number_of_timesteps',))
573
574            #Allocate space for all quantities
575            for name in domain.quantities.keys():
576                fid.createVariable(name, self.precision,
577                                   ('number_of_timesteps',
578                                    'number_of_points'))
579
580        #Close
581        fid.close()
582
583
584    def store_checkpoint(self):
585        """
586        Write x,y coordinates of triangles.
587        Write connectivity (
588        constituting
589        the bed elevation.
590        """
591
592        from Scientific.IO.NetCDF import NetCDFFile
593
594        from Numeric import concatenate
595
596        domain = self.domain
597
598        #Get NetCDF
599        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
600
601        # Get the variables
602        x = fid.variables['x']
603        y = fid.variables['y']
604
605        volumes = fid.variables['volumes']
606
607        # Get X, Y and bed elevation Z
608        Q = domain.quantities['elevation']
609        X,Y,Z,V = Q.get_vertex_values(xy=True,
610                      precision = self.precision)
611
612
613
614        x[:] = X.astype(self.precision)
615        y[:] = Y.astype(self.precision)
616        z[:] = Z.astype(self.precision)
617
618        volumes[:] = V
619
620        #Close
621        fid.close()
622
623
624    def store_timestep(self, name):
625        """Store time and named quantity to file
626        """
627        from Scientific.IO.NetCDF import NetCDFFile
628        from time import sleep
629
630        #Get NetCDF
631        retries = 0
632        file_open = False
633        while not file_open and retries < 10:
634            try:
635                fid = NetCDFFile(self.filename, 'a') #Open existing file
636            except IOError:
637                #This could happen if someone was reading the file.
638                #In that case, wait a while and try again
639                msg = 'Warning (store_timestep): File %s could not be opened'\
640                  %self.filename
641                msg += ' - trying again'
642                print msg
643                retries += 1
644                sleep(1)
645            else:
646                file_open = True
647
648        if not file_open:
649            msg = 'File %s could not be opened for append' %self.filename
650            raise DataFileNotOPenError, msg
651
652
653        domain = self.domain
654
655        # Get the variables
656        time = fid.variables['time']
657        stage = fid.variables['stage']
658        i = len(time)
659
660        #Store stage
661        time[i] = self.domain.time
662
663        # Get quantity
664        Q = domain.quantities[name]
665        A,V = Q.get_vertex_values(xy=False,
666                  precision = self.precision)
667
668        stage[i,:] = A.astype(self.precision)
669
670        #Flush and close
671        fid.sync()
672        fid.close()
673
674
675
676
677
678#Function for storing xya output
679#FIXME Not done yet for this version
680#This is obsolete.  Use geo_spatial_data instead
681class Data_format_xya(Data_format):
682    """Generic interface to data formats
683    """
684
685
686    def __init__(self, domain, mode = 'w'):
687        from Scientific.IO.NetCDF import NetCDFFile
688        from Numeric import Int, Float, Float32
689
690        self.precision = Float32 #Use single precision
691
692        Data_format.__init__(self, domain, 'xya', mode)
693
694
695
696    #FIXME -This is the old xya format
697    def store_all(self):
698        """Specialisation of store all for xya format
699
700        Writes x,y,z coordinates of triangles constituting
701        the bed elevation.
702        """
703
704        from Numeric import concatenate
705
706        domain = self.domain
707
708        fd = open(self.filename, 'w')
709
710
711        if domain.smooth is True:
712            number_of_points =  len(domain.vertexlist)
713        else:
714            number_of_points = 3*self.number_of_volumes
715
716        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
717
718        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
719                 " # <vertex #> <x> <y> [attributes]" + "\n")
720
721
722        # Get X, Y, bed elevation and friction (index=0,1)
723        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
724                                           indices = (0,1), precision = self.precision)
725
726        bed_eles = A[:,0]
727        fricts = A[:,1]
728
729        # Get stage (index=0)
730        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
731                                       indices = (0,), precision = self.precision)
732
733        stages = B[:,0]
734
735        #<vertex #> <x> <y> [attributes]
736        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
737                                               stages, fricts):
738
739            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
740            fd.write(s)
741
742        #close
743        fd.close()
744
745
746    def store_timestep(self, t, V0, V1, V2):
747        """Store time, water heights (and momentums) to file
748        """
749        pass
750
751
752#### NED is national exposure database
753   
754LAT_TITLE = 'LATITUDE'
755LONG_TITLE = 'LONGITUDE'
756X_TITLE = 'x'
757Y_TITLE = 'y'
758class Exposure_csv:
759    def __init__(self,file_name, latitude_title=LAT_TITLE,
760                 longitude_title=LONG_TITLE, is_x_y_locations=None,
761                 x_title=X_TITLE, y_title=Y_TITLE,
762                 refine_polygon=None):
763        """
764        This class is for handling the exposure csv file.
765        It reads the file in and converts the lats and longs to a geospatial
766        data object.
767        Use the methods to read and write columns.
768
769        The format of the csv files it reads is;
770           The first row is a title row.
771           comma's are the delimiters
772           each column is a 'set' of data
773
774        Feel free to use/expand it to read other csv files.
775           
776           
777        It is not for adding and deleting rows
778       
779        Can geospatial handle string attributes? It's not made for them.
780        Currently it can't load and save string att's.
781
782        So just use geospatial to hold the x, y and georef? Bad, since
783        different att's are in diferent structures.  Not so bad, the info
784        to write if the .csv file is saved is in attribute_dic
785
786        The location info is in the geospatial attribute.
787       
788       
789        """
790        self._file_name = file_name
791        self._geospatial = None #
792
793        # self._attribute_dic is a dictionary.
794        #The keys are the column titles.
795        #The values are lists of column data
796       
797        # self._title_index_dic is a dictionary.
798        #The keys are the column titles.
799        #The values are the index positions of file columns.
800        self._attribute_dic, self._title_index_dic = \
801        self._load_exposure_csv(self._file_name)
802        try:
803            #Have code here that handles caps or lower
804            lats = self._attribute_dic[latitude_title]
805            longs = self._attribute_dic[longitude_title]
806           
807        except KeyError:
808            # maybe a warning..
809            #Let's see if this works..
810            if False != is_x_y_locations:
811                is_x_y_locations = True
812            pass
813        else:
814            self._geospatial = Geospatial_data(latitudes = lats,
815                 longitudes = longs)
816
817        if is_x_y_locations is True:
818            if self._geospatial is not None:
819                pass #fixme throw an error
820            try:
821                xs = self._attribute_dic[x_title]
822                ys = self._attribute_dic[y_title]
823                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
824            except KeyError:
825                # maybe a warning..
826                msg = "Could not find location information."
827                raise TitleValueError, msg
828            else:
829                self._geospatial = Geospatial_data(data_points=points)
830           
831        # create a list of points that are in the refining_polygon
832        # described by a list of indexes representing the points
833
834    def __cmp__(self, other):
835        #print "self._attribute_dic",self._attribute_dic
836        #print "other._attribute_dic",other._attribute_dic
837        #print "self._title_index_dic", self._title_index_dic
838        #print "other._title_index_dic", other._title_index_dic
839       
840        #check that a is an instance of this class
841        if isinstance(self, type(other)):
842            result = cmp(self._attribute_dic, other._attribute_dic)
843            if result <>0:
844                return result
845            # The order of the columns is important. Therefore..
846            result = cmp(self._title_index_dic, other._title_index_dic)
847            if result <>0:
848                return result
849            for self_ls, other_ls in map(None,self._attribute_dic, \
850                    other._attribute_dic):
851                result = cmp(self._attribute_dic[self_ls],
852                             other._attribute_dic[other_ls])
853                if result <>0:
854                    return result
855            return 0
856        else:
857            return 1
858   
859    def _load_exposure_csv(self, file_name):
860        """
861        Load in the csv as a dic, title as key and column info as value, .
862        Also, create a dic, title as key and column index as value,
863        to keep track of the column order.
864        """
865        #
866        attribute_dic = {}
867        title_index_dic = {}
868        titles_stripped = [] # list of titles
869       
870        reader = csv.reader(file(file_name))
871
872        # Read in and manipulate the title info
873        titles = reader.next()
874        for i,title in enumerate(titles):
875            titles_stripped.append(title.strip())
876            title_index_dic[title.strip()] = i
877        title_count = len(titles_stripped)       
878        #print "title_index_dic",title_index_dic
879
880       
881        #create a dic of colum values, indexed by column title
882        for line in reader:
883            if len(line) <> title_count:
884                raise IOError #FIXME make this nicer
885            for i, value in enumerate(line):
886                attribute_dic.setdefault(titles_stripped[i],[]).append(value)
887           
888        return attribute_dic, title_index_dic
889
890    def get_column(self, column_name, use_refind_polygon=False):
891        """
892        Given a column name return a list of the column values
893
894        Note, the type of the values will be String!
895        do this to change a list of strings to a list of floats
896        time = [float(x) for x in time]
897       
898        Not implemented:
899        if use_refind_polygon is True, only return values in the
900        refined polygon
901        """
902        if not self._attribute_dic.has_key(column_name):
903            msg = 'Therer is  no column called %s!' %column_name
904            raise TitleValueError, msg
905        return self._attribute_dic[column_name]
906
907
908    def get_value(self, value_column_name,
909                  known_column_name,
910                  known_values,
911                  use_refind_polygon=False):
912        """
913        Do linear interpolation on the known_colum, using the known_value,
914        to return a value of the column_value_name.
915        """
916        pass
917
918
919    def get_location(self, use_refind_polygon=False):
920        """
921        Return a geospatial object which describes the
922        locations of the location file.
923
924        Note, if there is not location info, this returns None.
925       
926        Not implemented:
927        if use_refind_polygon is True, only return values in the
928        refined polygon
929        """
930        return self._geospatial
931
932    def set_column(self, column_name, column_values, overwrite=False):
933        """
934        Add a column to the 'end' (with the right most column being the end)
935        of the csv file.
936
937        Set overwrite to True if you want to overwrite a column.
938       
939        Note, in column_name white space is removed and case is not checked.
940        Precondition
941        The column_name and column_values cannot have comma's in it.
942        """
943       
944        value_row_count = \
945            len(self._attribute_dic[self._title_index_dic.keys()[0]])
946        if len(column_values) <> value_row_count: 
947            msg = 'The number of column values must equal the number of rows.'
948            raise DataMissingValuesError, msg
949       
950        if self._attribute_dic.has_key(column_name):
951            if not overwrite:
952                msg = 'Column name %s already in use!' %column_name
953                raise TitleValueError, msg
954        else:
955            # New title.  Add it to the title index.
956            self._title_index_dic[column_name] = len(self._title_index_dic)
957        self._attribute_dic[column_name] = column_values
958        #print "self._title_index_dic[column_name]",self._title_index_dic
959
960    def save(self, file_name=None):
961        """
962        Save the exposure csv file
963        """
964        if file_name is None:
965            file_name = self._file_name
966       
967        fd = open(file_name,'wb')
968        writer = csv.writer(fd)
969       
970        #Write the title to a cvs file
971        line = [None]* len(self._title_index_dic)
972        for title in self._title_index_dic.iterkeys():
973            line[self._title_index_dic[title]]= title
974        writer.writerow(line)
975       
976        # Write the values to a cvs file
977        value_row_count = \
978            len(self._attribute_dic[self._title_index_dic.keys()[0]])
979        for row_i in range(value_row_count):
980            line = [None]* len(self._title_index_dic)
981            for title in self._title_index_dic.iterkeys():
982                line[self._title_index_dic[title]]= \
983                     self._attribute_dic[title][row_i]
984            writer.writerow(line)
985
986
987#Auxiliary
988def write_obj(filename,x,y,z):
989    """Store x,y,z vectors into filename (obj format)
990       Vectors are assumed to have dimension (M,3) where
991       M corresponds to the number elements.
992       triangles are assumed to be disconnected
993
994       The three numbers in each vector correspond to three vertices,
995
996       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
997
998    """
999    #print 'Writing obj to %s' % filename
1000
1001    import os.path
1002
1003    root, ext = os.path.splitext(filename)
1004    if ext == '.obj':
1005        FN = filename
1006    else:
1007        FN = filename + '.obj'
1008
1009
1010    outfile = open(FN, 'wb')
1011    outfile.write("# Triangulation as an obj file\n")
1012
1013    M, N = x.shape
1014    assert N==3  #Assuming three vertices per element
1015
1016    for i in range(M):
1017        for j in range(N):
1018            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1019
1020    for i in range(M):
1021        base = i*N
1022        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1023
1024    outfile.close()
1025
1026
1027#########################################################
1028#Conversion routines
1029########################################################
1030
1031def sww2obj(basefilename, size):
1032    """Convert netcdf based data output to obj
1033    """
1034    from Scientific.IO.NetCDF import NetCDFFile
1035
1036    from Numeric import Float, zeros
1037
1038    #Get NetCDF
1039    FN = create_filename('.', basefilename, 'sww', size)
1040    print 'Reading from ', FN
1041    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1042
1043
1044    # Get the variables
1045    x = fid.variables['x']
1046    y = fid.variables['y']
1047    z = fid.variables['elevation']
1048    time = fid.variables['time']
1049    stage = fid.variables['stage']
1050
1051    M = size  #Number of lines
1052    xx = zeros((M,3), Float)
1053    yy = zeros((M,3), Float)
1054    zz = zeros((M,3), Float)
1055
1056    for i in range(M):
1057        for j in range(3):
1058            xx[i,j] = x[i+j*M]
1059            yy[i,j] = y[i+j*M]
1060            zz[i,j] = z[i+j*M]
1061
1062    #Write obj for bathymetry
1063    FN = create_filename('.', basefilename, 'obj', size)
1064    write_obj(FN,xx,yy,zz)
1065
1066
1067    #Now read all the data with variable information, combine with
1068    #x,y info and store as obj
1069
1070    for k in range(len(time)):
1071        t = time[k]
1072        print 'Processing timestep %f' %t
1073
1074        for i in range(M):
1075            for j in range(3):
1076                zz[i,j] = stage[k,i+j*M]
1077
1078
1079        #Write obj for variable data
1080        #FN = create_filename(basefilename, 'obj', size, time=t)
1081        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1082        write_obj(FN,xx,yy,zz)
1083
1084
1085def dat2obj(basefilename):
1086    """Convert line based data output to obj
1087    FIXME: Obsolete?
1088    """
1089
1090    import glob, os
1091    from anuga.config import data_dir
1092
1093
1094    #Get bathymetry and x,y's
1095    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1096
1097    from Numeric import zeros, Float
1098
1099    M = len(lines)  #Number of lines
1100    x = zeros((M,3), Float)
1101    y = zeros((M,3), Float)
1102    z = zeros((M,3), Float)
1103
1104    ##i = 0
1105    for i, line in enumerate(lines):
1106        tokens = line.split()
1107        values = map(float,tokens)
1108
1109        for j in range(3):
1110            x[i,j] = values[j*3]
1111            y[i,j] = values[j*3+1]
1112            z[i,j] = values[j*3+2]
1113
1114        ##i += 1
1115
1116
1117    #Write obj for bathymetry
1118    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1119
1120
1121    #Now read all the data files with variable information, combine with
1122    #x,y info
1123    #and store as obj
1124
1125    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1126
1127    for filename in files:
1128        print 'Processing %s' % filename
1129
1130        lines = open(data_dir+os.sep+filename,'r').readlines()
1131        assert len(lines) == M
1132        root, ext = os.path.splitext(filename)
1133
1134        #Get time from filename
1135        i0 = filename.find('_time=')
1136        if i0 == -1:
1137            #Skip bathymetry file
1138            continue
1139
1140        i0 += 6  #Position where time starts
1141        i1 = filename.find('.dat')
1142
1143        if i1 > i0:
1144            t = float(filename[i0:i1])
1145        else:
1146            raise DataTimeError, 'Hmmmm'
1147
1148
1149
1150        ##i = 0
1151        for i, line in enumerate(lines):
1152            tokens = line.split()
1153            values = map(float,tokens)
1154
1155            for j in range(3):
1156                z[i,j] = values[j]
1157
1158            ##i += 1
1159
1160        #Write obj for variable data
1161        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1162
1163
1164def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1165    """Read netcdf filename1, pick timesteps first:step:last and save to
1166    nettcdf file filename2
1167    """
1168    from Scientific.IO.NetCDF import NetCDFFile
1169
1170    #Get NetCDF
1171    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1172    outfile = NetCDFFile(filename2, 'w')  #Open new file
1173
1174
1175    #Copy dimensions
1176    for d in infile.dimensions:
1177        outfile.createDimension(d, infile.dimensions[d])
1178
1179    for name in infile.variables:
1180        var = infile.variables[name]
1181        outfile.createVariable(name, var.typecode(), var.dimensions)
1182
1183
1184    #Copy the static variables
1185    for name in infile.variables:
1186        if name == 'time' or name == 'stage':
1187            pass
1188        else:
1189            #Copy
1190            outfile.variables[name][:] = infile.variables[name][:]
1191
1192    #Copy selected timesteps
1193    time = infile.variables['time']
1194    stage = infile.variables['stage']
1195
1196    newtime = outfile.variables['time']
1197    newstage = outfile.variables['stage']
1198
1199    if last is None:
1200        last = len(time)
1201
1202    selection = range(first, last, step)
1203    for i, j in enumerate(selection):
1204        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1205        newtime[i] = time[j]
1206        newstage[i,:] = stage[j,:]
1207
1208    #Close
1209    infile.close()
1210    outfile.close()
1211
1212
1213#Get data objects
1214def get_dataobject(domain, mode='w'):
1215    """Return instance of class of given format using filename
1216    """
1217
1218    cls = eval('Data_format_%s' %domain.format)
1219    return cls(domain, mode)
1220
1221#FIXME move into geospatial.  There should only be one method that
1222# reads xya, and writes pts.
1223def xya2pts(basename_in, basename_out=None, verbose=False,
1224            #easting_min=None, easting_max=None,
1225            #northing_min=None, northing_max=None,
1226            stride = 1,
1227            attribute_name = 'elevation',
1228            z_func = None):
1229    """Read points data from ascii (.xya)
1230
1231    Example:
1232
1233              x(m)        y(m)        z(m)
1234         0.00000e+00  0.00000e+00  1.3535000e-01
1235         0.00000e+00  1.40000e-02  1.3535000e-01
1236
1237
1238
1239    Convert to NetCDF pts format which is
1240
1241    points:  (Nx2) Float array
1242    elevation: N Float array
1243
1244    Only lines that contain three numeric values are processed
1245
1246    If z_func is specified, it will be applied to the third field
1247    """
1248
1249    import os
1250    #from Scientific.IO.NetCDF import NetCDFFile
1251    from Numeric import Float, arrayrange, concatenate
1252
1253    root, ext = os.path.splitext(basename_in)
1254
1255    if ext == '': ext = '.xya'
1256
1257    #Get NetCDF
1258    infile = open(root + ext, 'r')  #Open existing xya file for read
1259
1260    if verbose: print 'Reading xya points from %s' %(root + ext)
1261
1262    points = []
1263    attribute = []
1264    for i, line in enumerate(infile.readlines()):
1265
1266        if i % stride != 0: continue
1267
1268        fields = line.split()
1269
1270        try:
1271            assert len(fields) == 3
1272        except:
1273            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1274
1275        try:
1276            x = float( fields[0] )
1277            y = float( fields[1] )
1278            z = float( fields[2] )
1279        except:
1280            continue
1281
1282        points.append( [x, y] )
1283
1284        if callable(z_func):
1285            attribute.append(z_func(z))
1286        else:
1287            attribute.append(z)
1288
1289
1290    #Get output file
1291    if basename_out == None:
1292        ptsname = root + '.pts'
1293    else:
1294        ptsname = basename_out + '.pts'
1295
1296    if verbose: print 'Store to NetCDF file %s' %ptsname
1297    write_ptsfile(ptsname, points, attribute, attribute_name)
1298
1299
1300
1301######Obsoleted by export_points in load_mesh
1302def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1303                  zone=None, xllcorner=None, yllcorner=None):
1304    """Write points and associated attribute to pts (NetCDF) format
1305    """
1306
1307    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1308
1309    from Numeric import Float
1310
1311    if attribute_name is None:
1312        attribute_name = 'attribute'
1313
1314
1315    from Scientific.IO.NetCDF import NetCDFFile
1316
1317    # NetCDF file definition
1318    outfile = NetCDFFile(ptsname, 'w')
1319
1320
1321    #Create new file
1322    outfile.institution = 'Geoscience Australia'
1323    outfile.description = 'NetCDF pts format for compact and '\
1324                          'portable storage of spatial point data'
1325
1326
1327    #Georeferencing
1328    from anuga.coordinate_transforms.geo_reference import Geo_reference
1329    if zone is None:
1330        assert xllcorner is None, 'xllcorner must be None'
1331        assert yllcorner is None, 'yllcorner must be None'
1332        Geo_reference().write_NetCDF(outfile)
1333    else:
1334        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1335
1336
1337
1338    outfile.createDimension('number_of_points', len(points))
1339    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1340
1341    # variable definitions
1342    outfile.createVariable('points', Float, ('number_of_points',
1343                                             'number_of_dimensions'))
1344    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1345
1346    # Get handles to the variables
1347    nc_points = outfile.variables['points']
1348    nc_attribute = outfile.variables[attribute_name]
1349
1350    #Store data
1351    nc_points[:, :] = points
1352    nc_attribute[:] = attribute
1353
1354    outfile.close()
1355
1356
1357def dem2pts(basename_in, basename_out=None,
1358            easting_min=None, easting_max=None,
1359            northing_min=None, northing_max=None,
1360            use_cache=False, verbose=False,):
1361    """Read Digitial Elevation model from the following NetCDF format (.dem)
1362
1363    Example:
1364
1365    ncols         3121
1366    nrows         1800
1367    xllcorner     722000
1368    yllcorner     5893000
1369    cellsize      25
1370    NODATA_value  -9999
1371    138.3698 137.4194 136.5062 135.5558 ..........
1372
1373    Convert to NetCDF pts format which is
1374
1375    points:  (Nx2) Float array
1376    elevation: N Float array
1377    """
1378
1379
1380
1381    kwargs = {'basename_out': basename_out,
1382              'easting_min': easting_min,
1383              'easting_max': easting_max,
1384              'northing_min': northing_min,
1385              'northing_max': northing_max,
1386              'verbose': verbose}
1387
1388    if use_cache is True:
1389        from caching import cache
1390        result = cache(_dem2pts, basename_in, kwargs,
1391                       dependencies = [basename_in + '.dem'],
1392                       verbose = verbose)
1393
1394    else:
1395        result = apply(_dem2pts, [basename_in], kwargs)
1396
1397    return result
1398
1399
1400def _dem2pts(basename_in, basename_out=None, verbose=False,
1401            easting_min=None, easting_max=None,
1402            northing_min=None, northing_max=None):
1403    """Read Digitial Elevation model from the following NetCDF format (.dem)
1404
1405    Internal function. See public function dem2pts for details.
1406    """
1407
1408    #FIXME: Can this be written feasibly using write_pts?
1409
1410    import os
1411    from Scientific.IO.NetCDF import NetCDFFile
1412    from Numeric import Float, zeros, reshape, sum
1413
1414    root = basename_in
1415
1416    #Get NetCDF
1417    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1418
1419    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1420
1421    ncols = infile.ncols[0]
1422    nrows = infile.nrows[0]
1423    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1424    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1425    cellsize = infile.cellsize[0]
1426    NODATA_value = infile.NODATA_value[0]
1427    dem_elevation = infile.variables['elevation']
1428
1429    zone = infile.zone[0]
1430    false_easting = infile.false_easting[0]
1431    false_northing = infile.false_northing[0]
1432
1433    #Text strings
1434    projection = infile.projection
1435    datum = infile.datum
1436    units = infile.units
1437
1438
1439    #Get output file
1440    if basename_out == None:
1441        ptsname = root + '.pts'
1442    else:
1443        ptsname = basename_out + '.pts'
1444
1445    if verbose: print 'Store to NetCDF file %s' %ptsname
1446    # NetCDF file definition
1447    outfile = NetCDFFile(ptsname, 'w')
1448
1449    #Create new file
1450    outfile.institution = 'Geoscience Australia'
1451    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1452                      'of spatial point data'
1453    #assign default values
1454    if easting_min is None: easting_min = xllcorner
1455    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1456    if northing_min is None: northing_min = yllcorner
1457    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1458
1459    #compute offsets to update georeferencing
1460    easting_offset = xllcorner - easting_min
1461    northing_offset = yllcorner - northing_min
1462
1463    #Georeferencing
1464    outfile.zone = zone
1465    outfile.xllcorner = easting_min #Easting of lower left corner
1466    outfile.yllcorner = northing_min #Northing of lower left corner
1467    outfile.false_easting = false_easting
1468    outfile.false_northing = false_northing
1469
1470    outfile.projection = projection
1471    outfile.datum = datum
1472    outfile.units = units
1473
1474
1475    #Grid info (FIXME: probably not going to be used, but heck)
1476    outfile.ncols = ncols
1477    outfile.nrows = nrows
1478
1479    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1480    totalnopoints = nrows*ncols
1481
1482    # calculating number of NODATA_values for each row in clipped region
1483    #FIXME: use array operations to do faster
1484    nn = 0
1485    k = 0
1486    i1_0 = 0
1487    j1_0 = 0
1488    thisj = 0
1489    thisi = 0
1490    for i in range(nrows):
1491        y = (nrows-i-1)*cellsize + yllcorner
1492        for j in range(ncols):
1493            x = j*cellsize + xllcorner
1494            if easting_min <= x <= easting_max and \
1495               northing_min <= y <= northing_max:
1496                thisj = j
1497                thisi = i
1498                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1499
1500                if k == 0:
1501                    i1_0 = i
1502                    j1_0 = j
1503                k += 1
1504
1505    index1 = j1_0
1506    index2 = thisj
1507
1508    # dimension definitions
1509    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1510    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1511
1512    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1513    nopoints = clippednopoints-nn
1514
1515    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1516
1517    if verbose:
1518        print 'There are %d values in the elevation' %totalnopoints
1519        print 'There are %d values in the clipped elevation' %clippednopoints
1520        print 'There are %d NODATA_values in the clipped elevation' %nn
1521
1522    outfile.createDimension('number_of_points', nopoints)
1523    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1524
1525    # variable definitions
1526    outfile.createVariable('points', Float, ('number_of_points',
1527                                             'number_of_dimensions'))
1528    outfile.createVariable('elevation', Float, ('number_of_points',))
1529
1530    # Get handles to the variables
1531    points = outfile.variables['points']
1532    elevation = outfile.variables['elevation']
1533
1534    lenv = index2-index1+1
1535    #Store data
1536    global_index = 0
1537    #for i in range(nrows):
1538    for i in range(i1_0,thisi+1,1):
1539        if verbose and i%((nrows+10)/10)==0:
1540            print 'Processing row %d of %d' %(i, nrows)
1541
1542        lower_index = global_index
1543
1544        v = dem_elevation_r[i,index1:index2+1]
1545        no_NODATA = sum(v == NODATA_value)
1546        if no_NODATA > 0:
1547            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1548        else:
1549            newcols = lenv #ncols_in_bounding_box
1550
1551        telev = zeros(newcols, Float)
1552        tpoints = zeros((newcols, 2), Float)
1553
1554        local_index = 0
1555
1556        y = (nrows-i-1)*cellsize + yllcorner
1557        #for j in range(ncols):
1558        for j in range(j1_0,index2+1,1):
1559
1560            x = j*cellsize + xllcorner
1561            if easting_min <= x <= easting_max and \
1562               northing_min <= y <= northing_max and \
1563               dem_elevation_r[i,j] <> NODATA_value:
1564                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1565                telev[local_index] = dem_elevation_r[i, j]
1566                global_index += 1
1567                local_index += 1
1568
1569        upper_index = global_index
1570
1571        if upper_index == lower_index + newcols:
1572            points[lower_index:upper_index, :] = tpoints
1573            elevation[lower_index:upper_index] = telev
1574
1575    assert global_index == nopoints, 'index not equal to number of points'
1576
1577    infile.close()
1578    outfile.close()
1579
1580
1581
1582def _read_hecras_cross_sections(lines):
1583    """Return block of surface lines for each cross section
1584    Starts with SURFACE LINE,
1585    Ends with END CROSS-SECTION
1586    """
1587
1588    points = []
1589
1590    reading_surface = False
1591    for i, line in enumerate(lines):
1592
1593        if len(line.strip()) == 0:    #Ignore blanks
1594            continue
1595
1596        if lines[i].strip().startswith('SURFACE LINE'):
1597            reading_surface = True
1598            continue
1599
1600        if lines[i].strip().startswith('END') and reading_surface:
1601            yield points
1602            reading_surface = False
1603            points = []
1604
1605        if reading_surface:
1606            fields = line.strip().split(',')
1607            easting = float(fields[0])
1608            northing = float(fields[1])
1609            elevation = float(fields[2])
1610            points.append([easting, northing, elevation])
1611
1612
1613
1614
1615def hecras_cross_sections2pts(basename_in,
1616                              basename_out=None,
1617                              verbose=False):
1618    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1619
1620    Example:
1621
1622
1623# RAS export file created on Mon 15Aug2005 11:42
1624# by HEC-RAS Version 3.1.1
1625
1626BEGIN HEADER:
1627  UNITS: METRIC
1628  DTM TYPE: TIN
1629  DTM: v:\1\cit\perth_topo\river_tin
1630  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1631  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1632  MAP PROJECTION: UTM
1633  PROJECTION ZONE: 50
1634  DATUM: AGD66
1635  VERTICAL DATUM:
1636  NUMBER OF REACHES:  19
1637  NUMBER OF CROSS-SECTIONS:  14206
1638END HEADER:
1639
1640
1641Only the SURFACE LINE data of the following form will be utilised
1642
1643  CROSS-SECTION:
1644    STREAM ID:Southern-Wungong
1645    REACH ID:Southern-Wungong
1646    STATION:19040.*
1647    CUT LINE:
1648      405548.671603161 , 6438142.7594925
1649      405734.536092045 , 6438326.10404912
1650      405745.130459356 , 6438331.48627354
1651      405813.89633823 , 6438368.6272789
1652    SURFACE LINE:
1653     405548.67,   6438142.76,   35.37
1654     405552.24,   6438146.28,   35.41
1655     405554.78,   6438148.78,   35.44
1656     405555.80,   6438149.79,   35.44
1657     405559.37,   6438153.31,   35.45
1658     405560.88,   6438154.81,   35.44
1659     405562.93,   6438156.83,   35.42
1660     405566.50,   6438160.35,   35.38
1661     405566.99,   6438160.83,   35.37
1662     ...
1663   END CROSS-SECTION
1664
1665    Convert to NetCDF pts format which is
1666
1667    points:  (Nx2) Float array
1668    elevation: N Float array
1669    """
1670
1671    #FIXME: Can this be written feasibly using write_pts?
1672
1673    import os
1674    from Scientific.IO.NetCDF import NetCDFFile
1675    from Numeric import Float, zeros, reshape
1676
1677    root = basename_in
1678
1679    #Get ASCII file
1680    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1681
1682    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1683
1684    lines = infile.readlines()
1685    infile.close()
1686
1687    if verbose: print 'Converting to pts format'
1688
1689    i = 0
1690    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1691        i += 1
1692
1693    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1694    i += 1
1695
1696    assert lines[i].strip().upper().startswith('UNITS:')
1697    units = lines[i].strip().split()[1]
1698    i += 1
1699
1700    assert lines[i].strip().upper().startswith('DTM TYPE:')
1701    i += 1
1702
1703    assert lines[i].strip().upper().startswith('DTM:')
1704    i += 1
1705
1706    assert lines[i].strip().upper().startswith('STREAM')
1707    i += 1
1708
1709    assert lines[i].strip().upper().startswith('CROSS')
1710    i += 1
1711
1712    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1713    projection = lines[i].strip().split(':')[1]
1714    i += 1
1715
1716    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1717    zone = int(lines[i].strip().split(':')[1])
1718    i += 1
1719
1720    assert lines[i].strip().upper().startswith('DATUM:')
1721    datum = lines[i].strip().split(':')[1]
1722    i += 1
1723
1724    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1725    i += 1
1726
1727    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1728    i += 1
1729
1730    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1731    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1732    i += 1
1733
1734
1735    #Now read all points
1736    points = []
1737    elevation = []
1738    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1739        for k, entry in enumerate(entries):
1740            points.append(entry[:2])
1741            elevation.append(entry[2])
1742
1743
1744    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1745          %(j+1, number_of_cross_sections)
1746    assert j+1 == number_of_cross_sections, msg
1747
1748    #Get output file
1749    if basename_out == None:
1750        ptsname = root + '.pts'
1751    else:
1752        ptsname = basename_out + '.pts'
1753
1754    #FIXME (DSG-ON): use loadASCII export_points_file
1755    if verbose: print 'Store to NetCDF file %s' %ptsname
1756    # NetCDF file definition
1757    outfile = NetCDFFile(ptsname, 'w')
1758
1759    #Create new file
1760    outfile.institution = 'Geoscience Australia'
1761    outfile.description = 'NetCDF pts format for compact and portable ' +\
1762                          'storage of spatial point data derived from HEC-RAS'
1763
1764    #Georeferencing
1765    outfile.zone = zone
1766    outfile.xllcorner = 0.0
1767    outfile.yllcorner = 0.0
1768    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1769    outfile.false_northing = 1000000
1770
1771    outfile.projection = projection
1772    outfile.datum = datum
1773    outfile.units = units
1774
1775
1776    outfile.createDimension('number_of_points', len(points))
1777    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1778
1779    # variable definitions
1780    outfile.createVariable('points', Float, ('number_of_points',
1781                                             'number_of_dimensions'))
1782    outfile.createVariable('elevation', Float, ('number_of_points',))
1783
1784    # Get handles to the variables
1785    outfile.variables['points'][:] = points
1786    outfile.variables['elevation'][:] = elevation
1787
1788    outfile.close()
1789
1790
1791
1792def sww2dem(basename_in, basename_out = None,
1793            quantity = None,
1794            timestep = None,
1795            reduction = None,
1796            cellsize = 10,
1797            NODATA_value = -9999,
1798            easting_min = None,
1799            easting_max = None,
1800            northing_min = None,
1801            northing_max = None,
1802            verbose = False,
1803            origin = None,
1804            datum = 'WGS84',
1805        format = 'ers'):
1806
1807    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1808
1809    Example (ASC):
1810
1811    ncols         3121
1812    nrows         1800
1813    xllcorner     722000
1814    yllcorner     5893000
1815    cellsize      25
1816    NODATA_value  -9999
1817    138.3698 137.4194 136.5062 135.5558 ..........
1818
1819    Also write accompanying file with same basename_in but extension .prj
1820    used to fix the UTM zone, datum, false northings and eastings.
1821
1822    The prj format is assumed to be as
1823
1824    Projection    UTM
1825    Zone          56
1826    Datum         WGS84
1827    Zunits        NO
1828    Units         METERS
1829    Spheroid      WGS84
1830    Xshift        0.0000000000
1831    Yshift        10000000.0000000000
1832    Parameters
1833
1834
1835    The parameter quantity must be the name of an existing quantity or
1836    an expression involving existing quantities. The default is
1837    'elevation'.
1838
1839    if timestep (an index) is given, output quantity at that timestep
1840
1841    if reduction is given use that to reduce quantity over all timesteps.
1842
1843    datum
1844
1845    format can be either 'asc' or 'ers'
1846    """
1847
1848    import sys
1849    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1850    from Numeric import array2string
1851
1852    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1853    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
1854
1855    msg = 'Format must be either asc or ers'
1856    assert format.lower() in ['asc', 'ers'], msg
1857
1858
1859    false_easting = 500000
1860    false_northing = 10000000
1861
1862    if quantity is None:
1863        quantity = 'elevation'
1864
1865    if reduction is None:
1866        reduction = max
1867
1868    if basename_out is None:
1869        basename_out = basename_in + '_%s' %quantity
1870
1871    swwfile = basename_in + '.sww'
1872    demfile = basename_out + '.' + format
1873    # Note the use of a .ers extension is optional (write_ermapper_grid will
1874    # deal with either option
1875
1876    #Read sww file
1877    if verbose: print 'Reading from %s' %swwfile
1878    from Scientific.IO.NetCDF import NetCDFFile
1879    fid = NetCDFFile(swwfile)
1880
1881    #Get extent and reference
1882    x = fid.variables['x'][:]
1883    y = fid.variables['y'][:]
1884    volumes = fid.variables['volumes'][:]
1885    if timestep is not None:
1886        times = fid.variables['time'][timestep]
1887    else:
1888        times = fid.variables['time'][:]
1889
1890    number_of_timesteps = fid.dimensions['number_of_timesteps']
1891    number_of_points = fid.dimensions['number_of_points']
1892   
1893    if origin is None:
1894
1895        #Get geo_reference
1896        #sww files don't have to have a geo_ref
1897        try:
1898            geo_reference = Geo_reference(NetCDFObject=fid)
1899        except AttributeError, e:
1900            geo_reference = Geo_reference() #Default georef object
1901
1902        xllcorner = geo_reference.get_xllcorner()
1903        yllcorner = geo_reference.get_yllcorner()
1904        zone = geo_reference.get_zone()
1905    else:
1906        zone = origin[0]
1907        xllcorner = origin[1]
1908        yllcorner = origin[2]
1909
1910
1911
1912    #FIXME: Refactor using code from file_function.statistics
1913    #Something like print swwstats(swwname)
1914    if verbose:
1915        print '------------------------------------------------'
1916        print 'Statistics of SWW file:'
1917        print '  Name: %s' %swwfile
1918        print '  Reference:'
1919        print '    Lower left corner: [%f, %f]'\
1920              %(xllcorner, yllcorner)
1921        if timestep is not None:
1922            print '    Time: %f' %(times)
1923        else:
1924            print '    Start time: %f' %fid.starttime[0]
1925        print '  Extent:'
1926        print '    x [m] in [%f, %f], len(x) == %d'\
1927              %(min(x.flat), max(x.flat), len(x.flat))
1928        print '    y [m] in [%f, %f], len(y) == %d'\
1929              %(min(y.flat), max(y.flat), len(y.flat))
1930        if timestep is not None:
1931            print '    t [s] = %f, len(t) == %d' %(times, 1)
1932        else:
1933            print '    t [s] in [%f, %f], len(t) == %d'\
1934              %(min(times), max(times), len(times))
1935        print '  Quantities [SI units]:'
1936        for name in ['stage', 'xmomentum', 'ymomentum']:
1937            q = fid.variables[name][:].flat
1938            if timestep is not None:
1939                q = q[timestep*len(x):(timestep+1)*len(x)]
1940            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1941        for name in ['elevation']:
1942            q = fid.variables[name][:].flat
1943            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1944           
1945    # Get quantity and reduce if applicable
1946    if verbose: print 'Processing quantity %s' %quantity
1947
1948    # Turn NetCDF objects into Numeric arrays
1949    quantity_dict = {}
1950    for name in fid.variables.keys():
1951        quantity_dict[name] = fid.variables[name][:] 
1952
1953
1954    # Convert quantity expression to quantities found in sww file   
1955    q = apply_expression_to_dictionary(quantity, quantity_dict)
1956
1957    if len(q.shape) == 2:
1958        #q has a time component and needs to be reduced along
1959        #the temporal dimension
1960        if verbose: print 'Reducing quantity %s' %quantity
1961        q_reduced = zeros( number_of_points, Float )
1962       
1963        if timestep is not None:
1964            for k in range(number_of_points):
1965                q_reduced[k] = q[timestep,k] 
1966        else:
1967            for k in range(number_of_points):
1968                q_reduced[k] = reduction( q[:,k] )
1969
1970        q = q_reduced
1971
1972    #Post condition: Now q has dimension: number_of_points
1973    assert len(q.shape) == 1
1974    assert q.shape[0] == number_of_points
1975
1976
1977    if verbose:
1978        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1979
1980
1981    #Create grid and update xll/yll corner and x,y
1982
1983    #Relative extent
1984    if easting_min is None:
1985        xmin = min(x)
1986    else:
1987        xmin = easting_min - xllcorner
1988
1989    if easting_max is None:
1990        xmax = max(x)
1991    else:
1992        xmax = easting_max - xllcorner
1993
1994    if northing_min is None:
1995        ymin = min(y)
1996    else:
1997        ymin = northing_min - yllcorner
1998
1999    if northing_max is None:
2000        ymax = max(y)
2001    else:
2002        ymax = northing_max - yllcorner
2003
2004
2005
2006    if verbose: print 'Creating grid'
2007    ncols = int((xmax-xmin)/cellsize)+1
2008    nrows = int((ymax-ymin)/cellsize)+1
2009
2010
2011    #New absolute reference and coordinates
2012    newxllcorner = xmin+xllcorner
2013    newyllcorner = ymin+yllcorner
2014
2015    x = x+xllcorner-newxllcorner
2016    y = y+yllcorner-newyllcorner
2017   
2018    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2019    assert len(vertex_points.shape) == 2
2020
2021    grid_points = zeros ( (ncols*nrows, 2), Float )
2022
2023
2024    for i in xrange(nrows):
2025        if format.lower() == 'asc':
2026            yg = i*cellsize
2027        else:
2028        #this will flip the order of the y values for ers
2029            yg = (nrows-i)*cellsize
2030
2031        for j in xrange(ncols):
2032            xg = j*cellsize
2033            k = i*ncols + j
2034
2035            grid_points[k,0] = xg
2036            grid_points[k,1] = yg
2037
2038    #Interpolate
2039    #from least_squares import Interpolation
2040    from anuga.fit_interpolate.interpolate import Interpolate
2041
2042    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2043
2044    #Interpolate using quantity values
2045    if verbose: print 'Interpolating'
2046    grid_values = interp.interpolate(q, grid_points).flat
2047
2048
2049    if verbose:
2050        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2051                                                      max(grid_values))
2052
2053    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2054    P = interp.mesh.get_boundary_polygon()
2055    outside_indices = outside_polygon(grid_points, P, closed=True)
2056
2057    for i in outside_indices:
2058        grid_values[i] = NODATA_value
2059
2060
2061
2062
2063    if format.lower() == 'ers':
2064        # setup ERS header information
2065        grid_values = reshape(grid_values,(nrows, ncols))
2066        header = {}
2067        header['datum'] = '"' + datum + '"'
2068        # FIXME The use of hardwired UTM and zone number needs to be made optional
2069        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2070        header['projection'] = '"UTM-' + str(zone) + '"'
2071        header['coordinatetype'] = 'EN'
2072        if header['coordinatetype'] == 'LL':
2073            header['longitude'] = str(newxllcorner)
2074            header['latitude'] = str(newyllcorner)
2075        elif header['coordinatetype'] == 'EN':
2076            header['eastings'] = str(newxllcorner)
2077            header['northings'] = str(newyllcorner)
2078        header['nullcellvalue'] = str(NODATA_value)
2079        header['xdimension'] = str(cellsize)
2080        header['ydimension'] = str(cellsize)
2081        header['value'] = '"' + quantity + '"'
2082        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2083
2084
2085        #Write
2086        if verbose: print 'Writing %s' %demfile
2087        import ermapper_grids
2088        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2089
2090        fid.close()
2091    else:
2092        #Write to Ascii format
2093
2094        #Write prj file
2095        prjfile = basename_out + '.prj'
2096
2097        if verbose: print 'Writing %s' %prjfile
2098        prjid = open(prjfile, 'w')
2099        prjid.write('Projection    %s\n' %'UTM')
2100        prjid.write('Zone          %d\n' %zone)
2101        prjid.write('Datum         %s\n' %datum)
2102        prjid.write('Zunits        NO\n')
2103        prjid.write('Units         METERS\n')
2104        prjid.write('Spheroid      %s\n' %datum)
2105        prjid.write('Xshift        %d\n' %false_easting)
2106        prjid.write('Yshift        %d\n' %false_northing)
2107        prjid.write('Parameters\n')
2108        prjid.close()
2109
2110
2111
2112        if verbose: print 'Writing %s' %demfile
2113
2114        ascid = open(demfile, 'w')
2115
2116        ascid.write('ncols         %d\n' %ncols)
2117        ascid.write('nrows         %d\n' %nrows)
2118        ascid.write('xllcorner     %d\n' %newxllcorner)
2119        ascid.write('yllcorner     %d\n' %newyllcorner)
2120        ascid.write('cellsize      %f\n' %cellsize)
2121        ascid.write('NODATA_value  %d\n' %NODATA_value)
2122
2123
2124        #Get bounding polygon from mesh
2125        #P = interp.mesh.get_boundary_polygon()
2126        #inside_indices = inside_polygon(grid_points, P)
2127
2128        for i in range(nrows):
2129            if verbose and i%((nrows+10)/10)==0:
2130                print 'Doing row %d of %d' %(i, nrows)
2131
2132            base_index = (nrows-i-1)*ncols
2133
2134            slice = grid_values[base_index:base_index+ncols]
2135            s = array2string(slice, max_line_width=sys.maxint)
2136            ascid.write(s[1:-1] + '\n')
2137
2138
2139            #print
2140            #for j in range(ncols):
2141            #    index = base_index+j#
2142            #    print grid_values[index],
2143            #    ascid.write('%f ' %grid_values[index])
2144            #ascid.write('\n')
2145
2146        #Close
2147        ascid.close()
2148        fid.close()
2149
2150#Backwards compatibility
2151def sww2asc(basename_in, basename_out = None,
2152            quantity = None,
2153            timestep = None,
2154            reduction = None,
2155            cellsize = 10,
2156            verbose = False,
2157            origin = None):
2158    print 'sww2asc will soon be obsoleted - please use sww2dem'
2159    sww2dem(basename_in,
2160            basename_out = basename_out,
2161            quantity = quantity,
2162            timestep = timestep,
2163            reduction = reduction,
2164            cellsize = cellsize,
2165            verbose = verbose,
2166            origin = origin,
2167        datum = 'WGS84',
2168        format = 'asc')
2169
2170def sww2ers(basename_in, basename_out = None,
2171            quantity = None,
2172            timestep = None,
2173            reduction = None,
2174            cellsize = 10,
2175            verbose = False,
2176            origin = None,
2177            datum = 'WGS84'):
2178    print 'sww2ers will soon be obsoleted - please use sww2dem'
2179    sww2dem(basename_in,
2180            basename_out = basename_out,
2181            quantity = quantity,
2182            timestep = timestep,
2183            reduction = reduction,
2184            cellsize = cellsize,
2185            verbose = verbose,
2186            origin = origin,
2187            datum = datum,
2188            format = 'ers')
2189################################# END COMPATIBILITY ##############
2190
2191
2192
2193def sww2pts(basename_in, basename_out=None,
2194            data_points=None,
2195            quantity=None,
2196            timestep=None,
2197            reduction=None,
2198            NODATA_value=-9999,
2199            verbose=False,
2200            origin=None):
2201            #datum = 'WGS84')
2202
2203
2204    """Read SWW file and convert to interpolated values at selected points
2205
2206    The parameter quantity' must be the name of an existing quantity or
2207    an expression involving existing quantities. The default is
2208    'elevation'.
2209
2210    if timestep (an index) is given, output quantity at that timestep
2211
2212    if reduction is given use that to reduce quantity over all timesteps.
2213
2214    data_points (Nx2 array) give locations of points where quantity is to be computed.
2215   
2216    """
2217
2218    import sys
2219    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2220    from Numeric import array2string
2221
2222    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2223    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
2224
2225    from anuga.geospatial_data.geospatial_data import Geospatial_data
2226
2227    if quantity is None:
2228        quantity = 'elevation'
2229
2230    if reduction is None:
2231        reduction = max
2232
2233    if basename_out is None:
2234        basename_out = basename_in + '_%s' %quantity
2235
2236    swwfile = basename_in + '.sww'
2237    ptsfile = basename_out + '.pts'
2238
2239    # Read sww file
2240    if verbose: print 'Reading from %s' %swwfile
2241    from Scientific.IO.NetCDF import NetCDFFile
2242    fid = NetCDFFile(swwfile)
2243
2244    # Get extent and reference
2245    x = fid.variables['x'][:]
2246    y = fid.variables['y'][:]
2247    volumes = fid.variables['volumes'][:]
2248
2249    number_of_timesteps = fid.dimensions['number_of_timesteps']
2250    number_of_points = fid.dimensions['number_of_points']
2251    if origin is None:
2252
2253        # Get geo_reference
2254        # sww files don't have to have a geo_ref
2255        try:
2256            geo_reference = Geo_reference(NetCDFObject=fid)
2257        except AttributeError, e:
2258            geo_reference = Geo_reference() #Default georef object
2259
2260        xllcorner = geo_reference.get_xllcorner()
2261        yllcorner = geo_reference.get_yllcorner()
2262        zone = geo_reference.get_zone()
2263    else:
2264        zone = origin[0]
2265        xllcorner = origin[1]
2266        yllcorner = origin[2]
2267
2268
2269
2270    # FIXME: Refactor using code from file_function.statistics
2271    # Something like print swwstats(swwname)
2272    if verbose:
2273        x = fid.variables['x'][:]
2274        y = fid.variables['y'][:]
2275        times = fid.variables['time'][:]
2276        print '------------------------------------------------'
2277        print 'Statistics of SWW file:'
2278        print '  Name: %s' %swwfile
2279        print '  Reference:'
2280        print '    Lower left corner: [%f, %f]'\
2281              %(xllcorner, yllcorner)
2282        print '    Start time: %f' %fid.starttime[0]
2283        print '  Extent:'
2284        print '    x [m] in [%f, %f], len(x) == %d'\
2285              %(min(x.flat), max(x.flat), len(x.flat))
2286        print '    y [m] in [%f, %f], len(y) == %d'\
2287              %(min(y.flat), max(y.flat), len(y.flat))
2288        print '    t [s] in [%f, %f], len(t) == %d'\
2289              %(min(times), max(times), len(times))
2290        print '  Quantities [SI units]:'
2291        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2292            q = fid.variables[name][:].flat
2293            print '    %s in [%f, %f]' %(name, min(q), max(q))
2294
2295
2296
2297    # Get quantity and reduce if applicable
2298    if verbose: print 'Processing quantity %s' %quantity
2299
2300    # Turn NetCDF objects into Numeric arrays
2301    quantity_dict = {}
2302    for name in fid.variables.keys():
2303        quantity_dict[name] = fid.variables[name][:]
2304
2305
2306
2307    # Convert quantity expression to quantities found in sww file   
2308    q = apply_expression_to_dictionary(quantity, quantity_dict)
2309
2310
2311
2312    if len(q.shape) == 2:
2313        # q has a time component and needs to be reduced along
2314        # the temporal dimension
2315        if verbose: print 'Reducing quantity %s' %quantity
2316        q_reduced = zeros( number_of_points, Float )
2317
2318        for k in range(number_of_points):
2319            q_reduced[k] = reduction( q[:,k] )
2320
2321        q = q_reduced
2322
2323    # Post condition: Now q has dimension: number_of_points
2324    assert len(q.shape) == 1
2325    assert q.shape[0] == number_of_points
2326
2327
2328    if verbose:
2329        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2330
2331
2332    # Create grid and update xll/yll corner and x,y
2333    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2334    assert len(vertex_points.shape) == 2
2335
2336    # Interpolate
2337    from anuga.fit_interpolate.interpolate import Interpolate
2338    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2339
2340    # Interpolate using quantity values
2341    if verbose: print 'Interpolating'
2342    interpolated_values = interp.interpolate(q, data_points).flat
2343
2344
2345    if verbose:
2346        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2347                                                      max(interpolated_values))
2348
2349    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2350    P = interp.mesh.get_boundary_polygon()
2351    outside_indices = outside_polygon(data_points, P, closed=True)
2352
2353    for i in outside_indices:
2354        interpolated_values[i] = NODATA_value
2355
2356
2357    # Store results   
2358    G = Geospatial_data(data_points=data_points,
2359                        attributes=interpolated_values)
2360
2361    G.export_points_file(ptsfile, absolute = True)
2362
2363    fid.close()
2364
2365
2366def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2367                                  use_cache = False,
2368                                  verbose = False):
2369    """Read Digitial Elevation model from the following ASCII format (.asc)
2370
2371    Example:
2372
2373    ncols         3121
2374    nrows         1800
2375    xllcorner     722000
2376    yllcorner     5893000
2377    cellsize      25
2378    NODATA_value  -9999
2379    138.3698 137.4194 136.5062 135.5558 ..........
2380
2381    Convert basename_in + '.asc' to NetCDF format (.dem)
2382    mimicking the ASCII format closely.
2383
2384
2385    An accompanying file with same basename_in but extension .prj must exist
2386    and is used to fix the UTM zone, datum, false northings and eastings.
2387
2388    The prj format is assumed to be as
2389
2390    Projection    UTM
2391    Zone          56
2392    Datum         WGS84
2393    Zunits        NO
2394    Units         METERS
2395    Spheroid      WGS84
2396    Xshift        0.0000000000
2397    Yshift        10000000.0000000000
2398    Parameters
2399    """
2400
2401
2402
2403    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2404
2405    if use_cache is True:
2406        from caching import cache
2407        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2408                       dependencies = [basename_in + '.asc'],
2409                       verbose = verbose)
2410
2411    else:
2412        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2413
2414    return result
2415
2416
2417
2418
2419
2420
2421def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2422                                  verbose = False):
2423    """Read Digitial Elevation model from the following ASCII format (.asc)
2424
2425    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2426    """
2427
2428    import os
2429    from Scientific.IO.NetCDF import NetCDFFile
2430    from Numeric import Float, array
2431
2432    #root, ext = os.path.splitext(basename_in)
2433    root = basename_in
2434
2435    ###########################################
2436    # Read Meta data
2437    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2438    metadatafile = open(root + '.prj')
2439    metalines = metadatafile.readlines()
2440    metadatafile.close()
2441
2442    L = metalines[0].strip().split()
2443    assert L[0].strip().lower() == 'projection'
2444    projection = L[1].strip()                   #TEXT
2445
2446    L = metalines[1].strip().split()
2447    assert L[0].strip().lower() == 'zone'
2448    zone = int(L[1].strip())
2449
2450    L = metalines[2].strip().split()
2451    assert L[0].strip().lower() == 'datum'
2452    datum = L[1].strip()                        #TEXT
2453
2454    L = metalines[3].strip().split()
2455    assert L[0].strip().lower() == 'zunits'     #IGNORE
2456    zunits = L[1].strip()                       #TEXT
2457
2458    L = metalines[4].strip().split()
2459    assert L[0].strip().lower() == 'units'
2460    units = L[1].strip()                        #TEXT
2461
2462    L = metalines[5].strip().split()
2463    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2464    spheroid = L[1].strip()                     #TEXT
2465
2466    L = metalines[6].strip().split()
2467    assert L[0].strip().lower() == 'xshift'
2468    false_easting = float(L[1].strip())
2469
2470    L = metalines[7].strip().split()
2471    assert L[0].strip().lower() == 'yshift'
2472    false_northing = float(L[1].strip())
2473
2474    #print false_easting, false_northing, zone, datum
2475
2476
2477    ###########################################
2478    #Read DEM data
2479
2480    datafile = open(basename_in + '.asc')
2481
2482    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2483    lines = datafile.readlines()
2484    datafile.close()
2485
2486    if verbose: print 'Got', len(lines), ' lines'
2487
2488    ncols = int(lines[0].split()[1].strip())
2489    nrows = int(lines[1].split()[1].strip())
2490    xllcorner = float(lines[2].split()[1].strip())
2491    yllcorner = float(lines[3].split()[1].strip())
2492    cellsize = float(lines[4].split()[1].strip())
2493    NODATA_value = int(lines[5].split()[1].strip())
2494
2495    assert len(lines) == nrows + 6
2496
2497
2498    ##########################################
2499
2500
2501    if basename_out == None:
2502        netcdfname = root + '.dem'
2503    else:
2504        netcdfname = basename_out + '.dem'
2505
2506    if verbose: print 'Store to NetCDF file %s' %netcdfname
2507    # NetCDF file definition
2508    fid = NetCDFFile(netcdfname, 'w')
2509
2510    #Create new file
2511    fid.institution = 'Geoscience Australia'
2512    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2513                      'of spatial point data'
2514
2515    fid.ncols = ncols
2516    fid.nrows = nrows
2517    fid.xllcorner = xllcorner
2518    fid.yllcorner = yllcorner
2519    fid.cellsize = cellsize
2520    fid.NODATA_value = NODATA_value
2521
2522    fid.zone = zone
2523    fid.false_easting = false_easting
2524    fid.false_northing = false_northing
2525    fid.projection = projection
2526    fid.datum = datum
2527    fid.units = units
2528
2529
2530    # dimension definitions
2531    fid.createDimension('number_of_rows', nrows)
2532    fid.createDimension('number_of_columns', ncols)
2533
2534    # variable definitions
2535    fid.createVariable('elevation', Float, ('number_of_rows',
2536                                            'number_of_columns'))
2537
2538    # Get handles to the variables
2539    elevation = fid.variables['elevation']
2540
2541    #Store data
2542    n = len(lines[6:])
2543    for i, line in enumerate(lines[6:]):
2544        fields = line.split()
2545        if verbose and i%((n+10)/10)==0:
2546            print 'Processing row %d of %d' %(i, nrows)
2547
2548        elevation[i, :] = array([float(x) for x in fields])
2549
2550    fid.close()
2551
2552
2553
2554
2555
2556def ferret2sww(basename_in, basename_out = None,
2557               verbose = False,
2558               minlat = None, maxlat = None,
2559               minlon = None, maxlon = None,
2560               mint = None, maxt = None, mean_stage = 0,
2561               origin = None, zscale = 1,
2562               fail_on_NaN = True,
2563               NaN_filler = 0,
2564               elevation = None,
2565               inverted_bathymetry = True
2566               ): #FIXME: Bathymetry should be obtained
2567                                  #from MOST somehow.
2568                                  #Alternatively from elsewhere
2569                                  #or, as a last resort,
2570                                  #specified here.
2571                                  #The value of -100 will work
2572                                  #for the Wollongong tsunami
2573                                  #scenario but is very hacky
2574    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2575    sww format native to abstract_2d_finite_volumes.
2576
2577    Specify only basename_in and read files of the form
2578    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2579    relative height, x-velocity and y-velocity, respectively.
2580
2581    Also convert latitude and longitude to UTM. All coordinates are
2582    assumed to be given in the GDA94 datum.
2583
2584    min's and max's: If omitted - full extend is used.
2585    To include a value min may equal it, while max must exceed it.
2586    Lat and lon are assuemd to be in decimal degrees
2587
2588    origin is a 3-tuple with geo referenced
2589    UTM coordinates (zone, easting, northing)
2590
2591    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2592    which means that longitude is the fastest
2593    varying dimension (row major order, so to speak)
2594
2595    ferret2sww uses grid points as vertices in a triangular grid
2596    counting vertices from lower left corner upwards, then right
2597    """
2598
2599    import os
2600    from Scientific.IO.NetCDF import NetCDFFile
2601    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2602    from Numeric import allclose, around
2603
2604    precision = Float
2605
2606    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2607
2608    if minlat != None:
2609        assert -90 < minlat < 90 , msg
2610    if maxlat != None:
2611        assert -90 < maxlat < 90 , msg
2612        if minlat != None:
2613            assert maxlat > minlat
2614    if minlon != None:
2615        assert -180 < minlon < 180 , msg
2616    if maxlon != None:
2617        assert -180 < maxlon < 180 , msg
2618        if minlon != None:
2619            assert maxlon > minlon
2620       
2621
2622
2623    #Get NetCDF data
2624    if verbose: print 'Reading files %s_*.nc' %basename_in
2625    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
2626    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2627    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2628    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2629    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2630
2631    if basename_out is None:
2632        swwname = basename_in + '.sww'
2633    else:
2634        swwname = basename_out + '.sww'
2635
2636#    Get dimensions of file_h
2637    for dimension in file_h.dimensions.keys():
2638        if dimension[:3] == 'LON':
2639            dim_h_longitude = dimension
2640        if dimension[:3] == 'LAT':
2641            dim_h_latitude = dimension
2642        if dimension[:4] == 'TIME':
2643            dim_h_time = dimension
2644
2645#    print 'long:', dim_h_longitude
2646#    print 'lats:', dim_h_latitude
2647#    print 'times:', dim_h_time
2648
2649    times = file_h.variables[dim_h_time]
2650    latitudes = file_h.variables[dim_h_latitude]
2651    longitudes = file_h.variables[dim_h_longitude]
2652   
2653# get dimensions for file_e
2654    for dimension in file_e.dimensions.keys():
2655        if dimension[:3] == 'LON':
2656            dim_e_longitude = dimension
2657        if dimension[:3] == 'LAT':
2658            dim_e_latitude = dimension
2659
2660# get dimensions for file_u
2661    for dimension in file_u.dimensions.keys():
2662        if dimension[:3] == 'LON':
2663            dim_u_longitude = dimension
2664        if dimension[:3] == 'LAT':
2665            dim_u_latitude = dimension
2666        if dimension[:4] == 'TIME':
2667            dim_u_time = dimension
2668           
2669# get dimensions for file_v
2670    for dimension in file_v.dimensions.keys():
2671        if dimension[:3] == 'LON':
2672            dim_v_longitude = dimension
2673        if dimension[:3] == 'LAT':
2674            dim_v_latitude = dimension
2675        if dimension[:4] == 'TIME':
2676            dim_v_time = dimension
2677
2678
2679    #Precision used by most for lat/lon is 4 or 5 decimals
2680    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2681    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2682
2683    #Check that files are compatible
2684    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2685    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2686    assert allclose(latitudes, e_lat)
2687
2688    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2689    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2690    assert allclose(longitudes, e_lon)
2691
2692    if mint == None:
2693        jmin = 0
2694    else:
2695        jmin = searchsorted(times, mint)
2696       
2697    if maxt == None:
2698        jmax=len(times)
2699    else:
2700        jmax = searchsorted(times, maxt)
2701
2702    #print "latitudes[:]",latitudes[:]
2703    #print "longitudes[:]",longitudes [:]
2704    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2705                                                  longitudes[:],
2706                                                 minlat, maxlat,
2707                                                 minlon, maxlon)
2708
2709
2710#    print' j', jmin, jmax
2711    times = times[jmin:jmax]
2712    latitudes = latitudes[kmin:kmax]
2713    longitudes = longitudes[lmin:lmax]
2714
2715    #print "latitudes[:]",latitudes[:]
2716    #print "longitudes[:]",longitudes [:]
2717
2718    if verbose: print 'cropping'
2719    zname = 'ELEVATION'
2720
2721    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2722    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2723    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2724    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2725
2726    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2727    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2728    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2729    #        from Numeric import asarray
2730    #        elevations=elevations.tolist()
2731    #        elevations.reverse()
2732    #        elevations=asarray(elevations)
2733    #    else:
2734    #        from Numeric import asarray
2735    #        elevations=elevations.tolist()
2736    #        elevations.reverse()
2737    #        elevations=asarray(elevations)
2738    #        'print hmmm'
2739
2740
2741
2742    #Get missing values
2743    nan_ha = file_h.variables['HA'].missing_value[0]
2744    nan_ua = file_u.variables['UA'].missing_value[0]
2745    nan_va = file_v.variables['VA'].missing_value[0]
2746    if hasattr(file_e.variables[zname],'missing_value'):
2747        nan_e  = file_e.variables[zname].missing_value[0]
2748    else:
2749        nan_e = None
2750
2751    #Cleanup
2752    from Numeric import sometrue
2753
2754    missing = (amplitudes == nan_ha)
2755    if sometrue (missing):
2756        if fail_on_NaN:
2757            msg = 'NetCDFFile %s contains missing values'\
2758                  %(basename_in+'_ha.nc')
2759            raise DataMissingValuesError, msg
2760        else:
2761            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2762
2763    missing = (uspeed == nan_ua)
2764    if sometrue (missing):
2765        if fail_on_NaN:
2766            msg = 'NetCDFFile %s contains missing values'\
2767                  %(basename_in+'_ua.nc')
2768            raise DataMissingValuesError, msg
2769        else:
2770            uspeed = uspeed*(missing==0) + missing*NaN_filler
2771
2772    missing = (vspeed == nan_va)
2773    if sometrue (missing):
2774        if fail_on_NaN:
2775            msg = 'NetCDFFile %s contains missing values'\
2776                  %(basename_in+'_va.nc')
2777            raise DataMissingValuesError, msg
2778        else:
2779            vspeed = vspeed*(missing==0) + missing*NaN_filler
2780
2781
2782    missing = (elevations == nan_e)
2783    if sometrue (missing):
2784        if fail_on_NaN:
2785            msg = 'NetCDFFile %s contains missing values'\
2786                  %(basename_in+'_e.nc')
2787            raise DataMissingValuesError, msg
2788        else:
2789            elevations = elevations*(missing==0) + missing*NaN_filler
2790
2791    #######
2792
2793
2794
2795    number_of_times = times.shape[0]
2796    number_of_latitudes = latitudes.shape[0]
2797    number_of_longitudes = longitudes.shape[0]
2798
2799    assert amplitudes.shape[0] == number_of_times
2800    assert amplitudes.shape[1] == number_of_latitudes
2801    assert amplitudes.shape[2] == number_of_longitudes
2802
2803    if verbose:
2804        print '------------------------------------------------'
2805        print 'Statistics:'
2806        print '  Extent (lat/lon):'
2807        print '    lat in [%f, %f], len(lat) == %d'\
2808              %(min(latitudes.flat), max(latitudes.flat),
2809                len(latitudes.flat))
2810        print '    lon in [%f, %f], len(lon) == %d'\
2811              %(min(longitudes.flat), max(longitudes.flat),
2812                len(longitudes.flat))
2813        print '    t in [%f, %f], len(t) == %d'\
2814              %(min(times.flat), max(times.flat), len(times.flat))
2815
2816        q = amplitudes.flat
2817        name = 'Amplitudes (ha) [cm]'
2818        print %s in [%f, %f]' %(name, min(q), max(q))
2819
2820        q = uspeed.flat
2821        name = 'Speeds (ua) [cm/s]'
2822        print %s in [%f, %f]' %(name, min(q), max(q))
2823
2824        q = vspeed.flat
2825        name = 'Speeds (va) [cm/s]'
2826        print %s in [%f, %f]' %(name, min(q), max(q))
2827
2828        q = elevations.flat
2829        name = 'Elevations (e) [m]'
2830        print %s in [%f, %f]' %(name, min(q), max(q))
2831
2832
2833    #print number_of_latitudes, number_of_longitudes
2834    number_of_points = number_of_latitudes*number_of_longitudes
2835    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2836
2837
2838    file_h.close()
2839    file_u.close()
2840    file_v.close()
2841    file_e.close()
2842
2843
2844    # NetCDF file definition
2845    outfile = NetCDFFile(swwname, 'w')
2846
2847    #Create new file
2848    outfile.institution = 'Geoscience Australia'
2849    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2850                          %(basename_in + '_ha.nc',
2851                            basename_in + '_ua.nc',
2852                            basename_in + '_va.nc',
2853                            basename_in + '_e.nc')
2854
2855
2856    #For sww compatibility
2857    outfile.smoothing = 'Yes'
2858    outfile.order = 1
2859
2860    #Start time in seconds since the epoch (midnight 1/1/1970)
2861    outfile.starttime = starttime = times[0]
2862    times = times - starttime  #Store relative times
2863
2864    # dimension definitions
2865    outfile.createDimension('number_of_volumes', number_of_volumes)
2866
2867    outfile.createDimension('number_of_vertices', 3)
2868    outfile.createDimension('number_of_points', number_of_points)
2869
2870
2871    #outfile.createDimension('number_of_timesteps', len(times))
2872    outfile.createDimension('number_of_timesteps', len(times))
2873
2874    # variable definitions
2875    outfile.createVariable('x', precision, ('number_of_points',))
2876    outfile.createVariable('y', precision, ('number_of_points',))
2877    outfile.createVariable('elevation', precision, ('number_of_points',))
2878
2879    #FIXME: Backwards compatibility
2880    outfile.createVariable('z', precision, ('number_of_points',))
2881    #################################
2882
2883    outfile.createVariable('volumes', Int, ('number_of_volumes',
2884                                            'number_of_vertices'))
2885
2886    outfile.createVariable('time', precision,
2887                           ('number_of_timesteps',))
2888
2889    outfile.createVariable('stage', precision,
2890                           ('number_of_timesteps',
2891                            'number_of_points'))
2892
2893    outfile.createVariable('xmomentum', precision,
2894                           ('number_of_timesteps',
2895                            'number_of_points'))
2896
2897    outfile.createVariable('ymomentum', precision,
2898                           ('number_of_timesteps',
2899                            'number_of_points'))
2900
2901
2902    #Store
2903    from anuga.coordinate_transforms.redfearn import redfearn
2904    x = zeros(number_of_points, Float)  #Easting
2905    y = zeros(number_of_points, Float)  #Northing
2906
2907
2908    if verbose: print 'Making triangular grid'
2909    #Check zone boundaries
2910    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2911
2912    vertices = {}
2913    i = 0
2914    for k, lat in enumerate(latitudes):       #Y direction
2915        for l, lon in enumerate(longitudes):  #X direction
2916
2917            vertices[l,k] = i
2918
2919            zone, easting, northing = redfearn(lat,lon)
2920
2921            msg = 'Zone boundary crossed at longitude =', lon
2922            #assert zone == refzone, msg
2923            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2924            x[i] = easting
2925            y[i] = northing
2926            i += 1
2927
2928
2929    #Construct 2 triangles per 'rectangular' element
2930    volumes = []
2931    for l in range(number_of_longitudes-1):    #X direction
2932        for k in range(number_of_latitudes-1): #Y direction
2933            v1 = vertices[l,k+1]
2934            v2 = vertices[l,k]
2935            v3 = vertices[l+1,k+1]
2936            v4 = vertices[l+1,k]
2937
2938            volumes.append([v1,v2,v3]) #Upper element
2939            volumes.append([v4,v3,v2]) #Lower element
2940
2941    volumes = array(volumes)
2942
2943    if origin == None:
2944        zone = refzone
2945        xllcorner = min(x)
2946        yllcorner = min(y)
2947    else:
2948        zone = origin[0]
2949        xllcorner = origin[1]
2950        yllcorner = origin[2]
2951
2952
2953    outfile.xllcorner = xllcorner
2954    outfile.yllcorner = yllcorner
2955    outfile.zone = zone
2956
2957
2958    if elevation is not None:
2959        z = elevation
2960    else:
2961        if inverted_bathymetry:
2962            z = -1*elevations
2963        else:
2964            z = elevations
2965    #FIXME: z should be obtained from MOST and passed in here
2966
2967    from Numeric import resize
2968    z = resize(z,outfile.variables['z'][:].shape)
2969    outfile.variables['x'][:] = x - xllcorner
2970    outfile.variables['y'][:] = y - yllcorner
2971    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
2972    outfile.variables['elevation'][:] = z
2973    outfile.variables['time'][:] = times   #Store time relative
2974    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
2975
2976
2977
2978    #Time stepping
2979    stage = outfile.variables['stage']
2980    xmomentum = outfile.variables['xmomentum']
2981    ymomentum = outfile.variables['ymomentum']
2982
2983    if verbose: print 'Converting quantities'
2984    n = len(times)
2985    for j in range(n):
2986        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2987        i = 0
2988        for k in range(number_of_latitudes):      #Y direction
2989            for l in range(number_of_longitudes): #X direction
2990                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2991                stage[j,i] = w
2992                h = w - z[i]
2993                xmomentum[j,i] = uspeed[j,k,l]/100*h
2994                ymomentum[j,i] = vspeed[j,k,l]/100*h
2995                i += 1
2996
2997    #outfile.close()
2998
2999    #FIXME: Refactor using code from file_function.statistics
3000    #Something like print swwstats(swwname)
3001    if verbose:
3002        x = outfile.variables['x'][:]
3003        y = outfile.variables['y'][:]
3004        times = outfile.variables['time'][:]
3005        print '------------------------------------------------'
3006        print 'Statistics of output file:'
3007        print '  Name: %s' %swwname
3008        print '  Reference:'
3009        print '    Lower left corner: [%f, %f]'\
3010              %(xllcorner, yllcorner)
3011        print '    Start time: %f' %starttime
3012        print '  Extent:'
3013        print '    x [m] in [%f, %f], len(x) == %d'\
3014              %(min(x.flat), max(x.flat), len(x.flat))
3015        print '    y [m] in [%f, %f], len(y) == %d'\
3016              %(min(y.flat), max(y.flat), len(y.flat))
3017        print '    t [s] in [%f, %f], len(t) == %d'\
3018              %(min(times), max(times), len(times))
3019        print '  Quantities [SI units]:'
3020        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3021            q = outfile.variables[name][:].flat
3022            print '    %s in [%f, %f]' %(name, min(q), max(q))
3023
3024
3025
3026    outfile.close()
3027
3028
3029
3030
3031
3032def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False):
3033    """Template for converting typical text files with time series to
3034    NetCDF tms file.
3035
3036
3037    The file format is assumed to be either two fields separated by a comma:
3038
3039        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3040
3041    E.g
3042
3043      31/08/04 00:00:00, 1.328223 0 0
3044      31/08/04 00:15:00, 1.292912 0 0
3045
3046    or time (seconds), value0 value1 value2 ...
3047   
3048      0.0, 1.328223 0 0
3049      0.1, 1.292912 0 0
3050
3051    will provide a time dependent function f(t) with three attributes
3052
3053    filename is assumed to be the rootname with extenisons .txt and .sww
3054    """
3055
3056    import time, calendar
3057    from Numeric import array
3058    from anuga.config import time_format
3059    from anuga.utilities.numerical_tools import ensure_numeric
3060
3061
3062
3063    fid = open(filename + '.txt')
3064    line = fid.readline()
3065    fid.close()
3066
3067    fields = line.split(',')
3068    msg = 'File %s must have the format date, value0 value1 value2 ...'
3069    assert len(fields) == 2, msg
3070
3071    if not time_as_seconds:
3072        try:
3073            starttime = calendar.timegm(time.strptime(fields[0], time_format))
3074        except ValueError:
3075            msg = 'First field in file %s must be' %filename
3076            msg += ' date-time with format %s.\n' %time_format
3077            msg += 'I got %s instead.' %fields[0]
3078            raise DataTimeError, msg
3079    else:
3080        try:
3081            starttime = float(fields[0])
3082        except Error:
3083            msg = "Bad time format"
3084            raise DataTimeError, msg
3085
3086
3087    #Split values
3088    values = []
3089    for value in fields[1].split():
3090        values.append(float(value))
3091
3092    q = ensure_numeric(values)
3093
3094    msg = 'ERROR: File must contain at least one independent value'
3095    assert len(q.shape) == 1, msg
3096
3097
3098
3099    #Read times proper
3100    from Numeric import zeros, Float, alltrue
3101    from anuga.config import time_format
3102    import time, calendar
3103
3104    fid = open(filename + '.txt')
3105    lines = fid.readlines()
3106    fid.close()
3107
3108    N = len(lines)
3109    d = len(q)
3110
3111    T = zeros(N, Float)       #Time
3112    Q = zeros((N, d), Float)  #Values
3113
3114    for i, line in enumerate(lines):
3115        fields = line.split(',')
3116        if not time_as_seconds:
3117            realtime = calendar.timegm(time.strptime(fields[0], time_format))
3118        else:
3119             realtime = float(fields[0])
3120        T[i] = realtime - starttime
3121
3122        for j, value in enumerate(fields[1].split()):
3123            Q[i, j] = float(value)
3124
3125    msg = 'File %s must list time as a monotonuosly ' %filename
3126    msg += 'increasing sequence'
3127    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3128
3129    #Create NetCDF file
3130    from Scientific.IO.NetCDF import NetCDFFile
3131
3132    fid = NetCDFFile(filename + '.tms', 'w')
3133
3134
3135    fid.institution = 'Geoscience Australia'
3136    fid.description = 'Time series'
3137
3138
3139    #Reference point
3140    #Start time in seconds since the epoch (midnight 1/1/1970)
3141    #FIXME: Use Georef
3142    fid.starttime = starttime
3143
3144    # dimension definitions
3145    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3146    #fid.createDimension('number_of_vertices', 3)
3147
3148
3149    fid.createDimension('number_of_timesteps', len(T))
3150
3151    fid.createVariable('time', Float, ('number_of_timesteps',))
3152
3153    fid.variables['time'][:] = T
3154
3155    for i in range(Q.shape[1]):
3156        try:
3157            name = quantity_names[i]
3158        except:
3159            name = 'Attribute%d'%i
3160
3161        fid.createVariable(name, Float, ('number_of_timesteps',))
3162        fid.variables[name][:] = Q[:,i]
3163
3164    fid.close()
3165
3166
3167def extent_sww(file_name):
3168    """
3169    Read in an sww file.
3170
3171    Input;
3172    file_name - the sww file
3173
3174    Output;
3175    z - Vector of bed elevation
3176    volumes - Array.  Each row has 3 values, representing
3177    the vertices that define the volume
3178    time - Vector of the times where there is stage information
3179    stage - array with respect to time and vertices (x,y)
3180    """
3181
3182
3183    from Scientific.IO.NetCDF import NetCDFFile
3184
3185    #Check contents
3186    #Get NetCDF
3187    fid = NetCDFFile(file_name, 'r')
3188
3189    # Get the variables
3190    x = fid.variables['x'][:]
3191    y = fid.variables['y'][:]
3192    stage = fid.variables['stage'][:]
3193    #print "stage",stage
3194    #print "stage.shap",stage.shape
3195    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3196    #print "min(stage)",min(stage)
3197
3198    fid.close()
3199
3200    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3201
3202
3203def sww2domain(filename,boundary=None,t=None,\
3204               fail_if_NaN=True,NaN_filler=0\
3205               ,verbose = False,very_verbose = False):
3206    """
3207    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3208
3209    Boundary is not recommended if domain.smooth is not selected, as it
3210    uses unique coordinates, but not unique boundaries. This means that
3211    the boundary file will not be compatable with the coordinates, and will
3212    give a different final boundary, or crash.
3213    """
3214    NaN=9.969209968386869e+036
3215    #initialise NaN.
3216
3217    from Scientific.IO.NetCDF import NetCDFFile
3218    from shallow_water import Domain
3219    from Numeric import asarray, transpose, resize
3220
3221    if verbose: print 'Reading from ', filename
3222    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3223    time = fid.variables['time']       #Timesteps
3224    if t is None:
3225        t = time[-1]
3226    time_interp = get_time_interp(time,t)
3227
3228    # Get the variables as Numeric arrays
3229    x = fid.variables['x'][:]             #x-coordinates of vertices
3230    y = fid.variables['y'][:]             #y-coordinates of vertices
3231    elevation = fid.variables['elevation']     #Elevation
3232    stage = fid.variables['stage']     #Water level
3233    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3234    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3235
3236    starttime = fid.starttime[0]
3237    volumes = fid.variables['volumes'][:] #Connectivity
3238    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3239
3240    conserved_quantities = []
3241    interpolated_quantities = {}
3242    other_quantities = []
3243
3244    # get geo_reference
3245    #sww files don't have to have a geo_ref
3246    try:
3247        geo_reference = Geo_reference(NetCDFObject=fid)
3248    except: #AttributeError, e:
3249        geo_reference = None
3250
3251    if verbose: print '    getting quantities'
3252    for quantity in fid.variables.keys():
3253        dimensions = fid.variables[quantity].dimensions
3254        if 'number_of_timesteps' in dimensions:
3255            conserved_quantities.append(quantity)
3256            interpolated_quantities[quantity]=\
3257                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3258        else: other_quantities.append(quantity)
3259
3260    other_quantities.remove('x')
3261    other_quantities.remove('y')
3262    other_quantities.remove('z')
3263    other_quantities.remove('volumes')
3264
3265    conserved_quantities.remove('time')
3266
3267    if verbose: print '    building domain'
3268    #    From domain.Domain:
3269    #    domain = Domain(coordinates, volumes,\
3270    #                    conserved_quantities = conserved_quantities,\
3271    #                    other_quantities = other_quantities,zone=zone,\
3272    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3273
3274    #   From shallow_water.Domain:
3275    coordinates=coordinates.tolist()
3276    volumes=volumes.tolist()
3277    #FIXME:should this be in mesh?(peter row)
3278    if fid.smoothing == 'Yes': unique = False
3279    else: unique = True
3280    if unique:
3281        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3282
3283
3284    try:
3285        domain = Domain(coordinates, volumes, boundary)
3286    except AssertionError, e:
3287        fid.close()
3288        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3289        raise DataDomainError, msg
3290
3291    if not boundary is None:
3292        domain.boundary = boundary
3293
3294    domain.geo_reference = geo_reference
3295
3296    domain.starttime=float(starttime)+float(t)
3297    domain.time=0.0
3298
3299    for quantity in other_quantities:
3300        try:
3301            NaN = fid.variables[quantity].missing_value
3302        except:
3303            pass #quantity has no missing_value number
3304        X = fid.variables[quantity][:]
3305        if very_verbose:
3306            print '       ',quantity
3307            print '        NaN =',NaN
3308            print '        max(X)'
3309            print '       ',max(X)
3310            print '        max(X)==NaN'
3311            print '       ',max(X)==NaN
3312            print ''
3313        if (max(X)==NaN) or (min(X)==NaN):
3314            if fail_if_NaN:
3315                msg = 'quantity "%s" contains no_data entry'%quantity
3316                raise DataMissingValuesError, msg
3317            else:
3318                data = (X<>NaN)
3319                X = (X*data)+(data==0)*NaN_filler
3320        if unique:
3321            X = resize(X,(len(X)/3,3))
3322        domain.set_quantity(quantity,X)
3323    #
3324    for quantity in conserved_quantities:
3325        try:
3326            NaN = fid.variables[quantity].missing_value
3327        except:
3328            pass #quantity has no missing_value number
3329        X = interpolated_quantities[quantity]
3330        if very_verbose:
3331            print '       ',quantity
3332            print '        NaN =',NaN
3333            print '        max(X)'
3334            print '       ',max(X)
3335            print '        max(X)==NaN'
3336            print '       ',max(X)==NaN
3337            print ''
3338        if (max(X)==NaN) or (min(X)==NaN):
3339            if fail_if_NaN:
3340                msg = 'quantity "%s" contains no_data entry'%quantity
3341                raise DataMissingValuesError, msg
3342            else:
3343                data = (X<>NaN)
3344                X = (X*data)+(data==0)*NaN_filler
3345        if unique:
3346            X = resize(X,(X.shape[0]/3,3))
3347        domain.set_quantity(quantity,X)
3348
3349    fid.close()
3350    return domain
3351
3352def interpolated_quantity(saved_quantity,time_interp):
3353
3354    #given an index and ratio, interpolate quantity with respect to time.
3355    index,ratio = time_interp
3356    Q = saved_quantity
3357    if ratio > 0:
3358        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3359    else:
3360        q = Q[index]
3361    #Return vector of interpolated values
3362    return q
3363
3364def get_time_interp(time,t=None):
3365    #Finds the ratio and index for time interpolation.
3366    #It is borrowed from previous abstract_2d_finite_volumes code.
3367    if t is None:
3368        t=time[-1]
3369        index = -1
3370        ratio = 0.
3371    else:
3372        T = time
3373        tau = t
3374        index=0
3375        msg = 'Time interval derived from file %s [%s:%s]'\
3376            %('FIXMEfilename', T[0], T[-1])
3377        msg += ' does not match model time: %s' %tau
3378        if tau < time[0]: raise DataTimeError, msg
3379        if tau > time[-1]: raise DataTimeError, msg
3380        while tau > time[index]: index += 1
3381        while tau < time[index]: index -= 1
3382        if tau == time[index]:
3383            #Protect against case where tau == time[-1] (last time)
3384            # - also works in general when tau == time[i]
3385            ratio = 0
3386        else:
3387            #t is now between index and index+1
3388            ratio = (tau - time[index])/(time[index+1] - time[index])
3389    return (index,ratio)
3390
3391
3392def weed(coordinates,volumes,boundary = None):
3393    if type(coordinates)=='array':
3394        coordinates = coordinates.tolist()
3395    if type(volumes)=='array':
3396        volumes = volumes.tolist()
3397
3398    unique = False
3399    point_dict = {}
3400    same_point = {}
3401    for i in range(len(coordinates)):
3402        point = tuple(coordinates[i])
3403        if point_dict.has_key(point):
3404            unique = True
3405            same_point[i]=point
3406            #to change all point i references to point j
3407        else:
3408            point_dict[point]=i
3409            same_point[i]=point
3410
3411    coordinates = []
3412    i = 0
3413    for point in point_dict.keys():
3414        point = tuple(point)
3415        coordinates.append(list(point))
3416        point_dict[point]=i
3417        i+=1
3418
3419
3420    for volume in volumes:
3421        for i in range(len(volume)):
3422            index = volume[i]
3423            if index>-1:
3424                volume[i]=point_dict[same_point[index]]
3425
3426    new_boundary = {}
3427    if not boundary is None:
3428        for segment in boundary.keys():
3429            point0 = point_dict[same_point[segment[0]]]
3430            point1 = point_dict[same_point[segment[1]]]
3431            label = boundary[segment]
3432            #FIXME should the bounday attributes be concaterated
3433            #('exterior, pond') or replaced ('pond')(peter row)
3434
3435            if new_boundary.has_key((point0,point1)):
3436                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3437                                              #+','+label
3438
3439            elif new_boundary.has_key((point1,point0)):
3440                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3441                                              #+','+label
3442            else: new_boundary[(point0,point1)]=label
3443
3444        boundary = new_boundary
3445
3446    return coordinates,volumes,boundary
3447
3448
3449def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3450                 verbose=False):
3451    """Read Digitial Elevation model from the following NetCDF format (.dem)
3452
3453    Example:
3454
3455    ncols         3121
3456    nrows         1800
3457    xllcorner     722000
3458    yllcorner     5893000
3459    cellsize      25
3460    NODATA_value  -9999
3461    138.3698 137.4194 136.5062 135.5558 ..........
3462
3463    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3464    """
3465
3466    import os
3467    from Scientific.IO.NetCDF import NetCDFFile
3468    from Numeric import Float, zeros, sum, reshape, equal
3469
3470    root = basename_in
3471    inname = root + '.dem'
3472
3473    #Open existing netcdf file to read
3474    infile = NetCDFFile(inname, 'r')
3475    if verbose: print 'Reading DEM from %s' %inname
3476
3477    #Read metadata
3478    ncols = infile.ncols[0]
3479    nrows = infile.nrows[0]
3480    xllcorner = infile.xllcorner[0]
3481    yllcorner = infile.yllcorner[0]
3482    cellsize = infile.cellsize[0]
3483    NODATA_value = infile.NODATA_value[0]
3484    zone = infile.zone[0]
3485    false_easting = infile.false_easting[0]
3486    false_northing = infile.false_northing[0]
3487    projection = infile.projection
3488    datum = infile.datum
3489    units = infile.units
3490
3491    dem_elevation = infile.variables['elevation']
3492
3493    #Get output file name
3494    if basename_out == None:
3495        outname = root + '_' + repr(cellsize_new) + '.dem'
3496    else:
3497        outname = basename_out + '.dem'
3498
3499    if verbose: print 'Write decimated NetCDF file to %s' %outname
3500
3501    #Determine some dimensions for decimated grid
3502    (nrows_stencil, ncols_stencil) = stencil.shape
3503    x_offset = ncols_stencil / 2
3504    y_offset = nrows_stencil / 2
3505    cellsize_ratio = int(cellsize_new / cellsize)
3506    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3507    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3508
3509    #Open netcdf file for output
3510    outfile = NetCDFFile(outname, 'w')
3511
3512    #Create new file
3513    outfile.institution = 'Geoscience Australia'
3514    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3515                           'of spatial point data'
3516    #Georeferencing
3517    outfile.zone = zone
3518    outfile.projection = projection
3519    outfile.datum = datum
3520    outfile.units = units
3521
3522    outfile.cellsize = cellsize_new
3523    outfile.NODATA_value = NODATA_value
3524    outfile.false_easting = false_easting
3525    outfile.false_northing = false_northing
3526
3527    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3528    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3529    outfile.ncols = ncols_new
3530    outfile.nrows = nrows_new
3531
3532    # dimension definition
3533    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3534
3535    # variable definition
3536    outfile.createVariable('elevation', Float, ('number_of_points',))
3537
3538    # Get handle to the variable
3539    elevation = outfile.variables['elevation']
3540
3541    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3542
3543    #Store data
3544    global_index = 0
3545    for i in range(nrows_new):
3546        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3547        lower_index = global_index
3548        telev =  zeros(ncols_new, Float)
3549        local_index = 0
3550        trow = i * cellsize_ratio
3551
3552        for j in range(ncols_new):
3553            tcol = j * cellsize_ratio
3554            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3555
3556            #if dem contains 1 or more NODATA_values set value in
3557            #decimated dem to NODATA_value, else compute decimated
3558            #value using stencil
3559            if sum(sum(equal(tmp, NODATA_value))) > 0:
3560                telev[local_index] = NODATA_value
3561            else:
3562                telev[local_index] = sum(sum(tmp * stencil))
3563
3564            global_index += 1
3565            local_index += 1
3566
3567        upper_index = global_index
3568
3569        elevation[lower_index:upper_index] = telev
3570
3571    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3572
3573    infile.close()
3574    outfile.close()
3575
3576
3577
3578
3579def tsh2sww(filename, verbose=False): #test_tsh2sww
3580    """
3581    to check if a tsh/msh file 'looks' good.
3582    """
3583
3584
3585    if verbose == True:print 'Creating domain from', filename
3586    domain = pmesh_to_domain_instance(filename, Domain)
3587    if verbose == True:print "Number of triangles = ", len(domain)
3588
3589    domain.smooth = True
3590    domain.format = 'sww'   #Native netcdf visualisation format
3591    file_path, filename = path.split(filename)
3592    filename, ext = path.splitext(filename)
3593    domain.set_name(filename)   
3594    domain.reduction = mean
3595    if verbose == True:print "file_path",file_path
3596    if file_path == "":file_path = "."
3597    domain.set_datadir(file_path)
3598
3599    if verbose == True:
3600        print "Output written to " + domain.get_datadir() + sep + \
3601              domain.get_name() + "." + domain.format
3602    sww = get_dataobject(domain)
3603    sww.store_connectivity()
3604    sww.store_timestep('stage')
3605
3606
3607def asc_csiro2sww(bath_dir,
3608                  elevation_dir,
3609                  ucur_dir,
3610                  vcur_dir,
3611                  sww_file,
3612                  minlat = None, maxlat = None,
3613                  minlon = None, maxlon = None,
3614                  zscale=1,
3615                  mean_stage = 0,
3616                  fail_on_NaN = True,
3617                  elevation_NaN_filler = 0,
3618                  bath_prefix='ba',
3619                  elevation_prefix='el',
3620                  verbose=False):
3621    """
3622    Produce an sww boundary file, from esri ascii data from CSIRO.
3623
3624    Also convert latitude and longitude to UTM. All coordinates are
3625    assumed to be given in the GDA94 datum.
3626
3627    assume:
3628    All files are in esri ascii format
3629
3630    4 types of information
3631    bathymetry
3632    elevation
3633    u velocity
3634    v velocity
3635
3636    Assumptions
3637    The metadata of all the files is the same
3638    Each type is in a seperate directory
3639    One bath file with extention .000
3640    The time period is less than 24hrs and uniform.
3641    """
3642    from Scientific.IO.NetCDF import NetCDFFile
3643
3644    from anuga.coordinate_transforms.redfearn import redfearn
3645
3646    precision = Float # So if we want to change the precision its done here
3647
3648    # go in to the bath dir and load the only file,
3649    bath_files = os.listdir(bath_dir)
3650
3651    bath_file = bath_files[0]
3652    bath_dir_file =  bath_dir + os.sep + bath_file
3653    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3654
3655    #Use the date.time of the bath file as a basis for
3656    #the start time for other files
3657    base_start = bath_file[-12:]
3658
3659    #go into the elevation dir and load the 000 file
3660    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3661                         + base_start
3662
3663    elevation_files = os.listdir(elevation_dir)
3664    ucur_files = os.listdir(ucur_dir)
3665    vcur_files = os.listdir(vcur_dir)
3666    elevation_files.sort()
3667    # the first elevation file should be the
3668    # file with the same base name as the bath data
3669    assert elevation_files[0] == 'el' + base_start
3670
3671    number_of_latitudes = bath_grid.shape[0]
3672    number_of_longitudes = bath_grid.shape[1]
3673    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3674
3675    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3676                  for x in range(number_of_longitudes)]
3677    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3678                 for y in range(number_of_latitudes)]
3679
3680     # reverse order of lat, so the fist lat represents the first grid row
3681    latitudes.reverse()
3682
3683    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
3684                                                 minlat=minlat, maxlat=maxlat,
3685                                                 minlon=minlon, maxlon=maxlon)
3686
3687
3688    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3689    latitudes = latitudes[kmin:kmax]
3690    longitudes = longitudes[lmin:lmax]
3691    number_of_latitudes = len(latitudes)
3692    number_of_longitudes = len(longitudes)
3693    number_of_times = len(os.listdir(elevation_dir))
3694    number_of_points = number_of_latitudes*number_of_longitudes
3695    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3696
3697    #Work out the times
3698    if len(elevation_files) > 1:
3699        # Assume: The time period is less than 24hrs.
3700        time_period = (int(elevation_files[1][-3:]) - \
3701                      int(elevation_files[0][-3:]))*60*60
3702        times = [x*time_period for x in range(len(elevation_files))]
3703    else:
3704        times = [0.0]
3705
3706
3707    if verbose:
3708        print '------------------------------------------------'
3709        print 'Statistics:'
3710        print '  Extent (lat/lon):'
3711        print '    lat in [%f, %f], len(lat) == %d'\
3712              %(min(latitudes), max(latitudes),
3713                len(latitudes))
3714        print '    lon in [%f, %f], len(lon) == %d'\
3715              %(min(longitudes), max(longitudes),
3716                len(longitudes))
3717        print '    t in [%f, %f], len(t) == %d'\
3718              %(min(times), max(times), len(times))
3719
3720    ######### WRITE THE SWW FILE #############
3721    # NetCDF file definition
3722    outfile = NetCDFFile(sww_file, 'w')
3723
3724    #Create new file
3725    outfile.institution = 'Geoscience Australia'
3726    outfile.description = 'Converted from XXX'
3727
3728
3729    #For sww compatibility
3730    outfile.smoothing = 'Yes'
3731    outfile.order = 1
3732
3733    #Start time in seconds since the epoch (midnight 1/1/1970)
3734    outfile.starttime = starttime = times[0]
3735
3736
3737    # dimension definitions
3738    outfile.createDimension('number_of_volumes', number_of_volumes)
3739
3740    outfile.createDimension('number_of_vertices', 3)
3741    outfile.createDimension('number_of_points', number_of_points)
3742    outfile.createDimension('number_of_timesteps', number_of_times)
3743
3744    # variable definitions
3745    outfile.createVariable('x', precision, ('number_of_points',))
3746    outfile.createVariable('y', precision, ('number_of_points',))
3747    outfile.createVariable('elevation', precision, ('number_of_points',))
3748
3749    #FIXME: Backwards compatibility
3750    outfile.createVariable('z', precision, ('number_of_points',))
3751    #################################
3752
3753    outfile.createVariable('volumes', Int, ('number_of_volumes',
3754                                            'number_of_vertices'))
3755
3756    outfile.createVariable('time', precision,
3757                           ('number_of_timesteps',))
3758
3759    outfile.createVariable('stage', precision,
3760                           ('number_of_timesteps',
3761                            'number_of_points'))
3762
3763    outfile.createVariable('xmomentum', precision,
3764                           ('number_of_timesteps',
3765                            'number_of_points'))
3766
3767    outfile.createVariable('ymomentum', precision,
3768                           ('number_of_timesteps',
3769                            'number_of_points'))
3770
3771    #Store
3772    from anuga.coordinate_transforms.redfearn import redfearn
3773    x = zeros(number_of_points, Float)  #Easting
3774    y = zeros(number_of_points, Float)  #Northing
3775
3776    if verbose: print 'Making triangular grid'
3777    #Get zone of 1st point.
3778    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3779
3780    vertices = {}
3781    i = 0
3782    for k, lat in enumerate(latitudes):
3783        for l, lon in enumerate(longitudes):
3784
3785            vertices[l,k] = i
3786
3787            zone, easting, northing = redfearn(lat,lon)
3788
3789            msg = 'Zone boundary crossed at longitude =', lon
3790            #assert zone == refzone, msg
3791            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3792            x[i] = easting
3793            y[i] = northing
3794            i += 1
3795
3796
3797    #Construct 2 triangles per 'rectangular' element
3798    volumes = []
3799    for l in range(number_of_longitudes-1):    #X direction
3800        for k in range(number_of_latitudes-1): #Y direction
3801            v1 = vertices[l,k+1]
3802            v2 = vertices[l,k]
3803            v3 = vertices[l+1,k+1]
3804            v4 = vertices[l+1,k]
3805
3806            #Note, this is different to the ferrit2sww code
3807            #since the order of the lats is reversed.
3808            volumes.append([v1,v3,v2]) #Upper element
3809            volumes.append([v4,v2,v3]) #Lower element
3810
3811    volumes = array(volumes)
3812
3813    geo_ref = Geo_reference(refzone,min(x),min(y))
3814    geo_ref.write_NetCDF(outfile)
3815
3816    # This will put the geo ref in the middle
3817    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3818
3819
3820    if verbose:
3821        print '------------------------------------------------'
3822        print 'More Statistics:'
3823        print '  Extent (/lon):'
3824        print '    x in [%f, %f], len(lat) == %d'\
3825              %(min(x), max(x),
3826                len(x))
3827        print '    y in [%f, %f], len(lon) == %d'\
3828              %(min(y), max(y),
3829                len(y))
3830        print 'geo_ref: ',geo_ref
3831
3832    z = resize(bath_grid,outfile.variables['z'][:].shape)
3833    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3834    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3835    outfile.variables['z'][:] = z
3836    outfile.variables['elevation'][:] = z  #FIXME HACK
3837    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3838
3839    stage = outfile.variables['stage']
3840    xmomentum = outfile.variables['xmomentum']
3841    ymomentum = outfile.variables['ymomentum']
3842
3843    outfile.variables['time'][:] = times   #Store time relative
3844
3845    if verbose: print 'Converting quantities'
3846    n = number_of_times
3847    for j in range(number_of_times):
3848        # load in files
3849        elevation_meta, elevation_grid = \
3850           _read_asc(elevation_dir + os.sep + elevation_files[j])
3851
3852        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3853        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3854
3855        #cut matrix to desired size
3856        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3857        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3858        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3859       
3860        # handle missing values
3861        missing = (elevation_grid == elevation_meta['NODATA_value'])
3862        if sometrue (missing):
3863            if fail_on_NaN:
3864                msg = 'File %s contains missing values'\
3865                      %(elevation_files[j])
3866                raise DataMissingValuesError, msg
3867            else:
3868                elevation_grid = elevation_grid*(missing==0) + \
3869                                 missing*elevation_NaN_filler
3870
3871
3872        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3873        i = 0
3874        for k in range(number_of_latitudes):      #Y direction
3875            for l in range(number_of_longitudes): #X direction
3876                w = zscale*elevation_grid[k,l] + mean_stage
3877                stage[j,i] = w
3878                h = w - z[i]
3879                xmomentum[j,i] = u_momentum_grid[k,l]*h
3880                ymomentum[j,i] = v_momentum_grid[k,l]*h
3881                i += 1
3882    outfile.close()
3883
3884def _get_min_max_indexes(latitudes_ref,longitudes_ref,
3885                        minlat=None, maxlat=None,
3886                        minlon=None, maxlon=None):
3887    """
3888    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3889    specified with min/max lat/long
3890
3891    Think of the latitudes and longitudes describing a 2d surface.
3892    The area returned is, if possible, just big enough to cover the
3893    inputed max/min area. (This will not be possible if the max/min area
3894    has a section outside of the latitudes/longitudes area.)
3895
3896    asset  longitudes are sorted,
3897    long - from low to high (west to east, eg 148 - 151)
3898    assert latitudes are sorted, ascending or decending
3899    """
3900    latitudes = latitudes_ref[:]
3901    longitudes = longitudes_ref[:]
3902
3903    latitudes = ensure_numeric(latitudes)
3904    longitudes = ensure_numeric(longitudes)
3905   
3906    assert allclose(sort(longitudes), longitudes)
3907   
3908    lat_ascending = True
3909    if not allclose(sort(latitudes), latitudes):
3910        lat_ascending = False
3911        # reverse order of lat, so it's in ascending order         
3912        latitudes = latitudes[::-1]
3913        assert allclose(sort(latitudes), latitudes)
3914    #print "latitudes  in funct", latitudes
3915   
3916    largest_lat_index = len(latitudes)-1
3917    #Cut out a smaller extent.
3918    if minlat == None:
3919        lat_min_index = 0
3920    else:
3921        lat_min_index = searchsorted(latitudes, minlat)-1
3922        if lat_min_index <0:
3923            lat_min_index = 0
3924
3925
3926    if maxlat == None:
3927        lat_max_index = largest_lat_index #len(latitudes)
3928    else:
3929        lat_max_index = searchsorted(latitudes, maxlat)
3930        if lat_max_index > largest_lat_index:
3931            lat_max_index = largest_lat_index
3932
3933    if minlon == None:
3934        lon_min_index = 0
3935    else:
3936        lon_min_index = searchsorted(longitudes, minlon)-1
3937        if lon_min_index <0:
3938            lon_min_index = 0
3939
3940    if maxlon == None:
3941        lon_max_index = len(longitudes)
3942    else:
3943        lon_max_index = searchsorted(longitudes, maxlon)
3944
3945    # Reversing the indexes, if the lat array is decending
3946    if lat_ascending is False:
3947        lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3948                                       largest_lat_index - lat_min_index
3949    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3950    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3951
3952    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3953
3954
3955def _read_asc(filename, verbose=False):
3956    """Read esri file from the following ASCII format (.asc)
3957
3958    Example:
3959
3960    ncols         3121
3961    nrows         1800
3962    xllcorner     722000
3963    yllcorner     5893000
3964    cellsize      25
3965    NODATA_value  -9999
3966    138.3698 137.4194 136.5062 135.5558 ..........
3967
3968    """
3969
3970    datafile = open(filename)
3971
3972    if verbose: print 'Reading DEM from %s' %(filename)
3973    lines = datafile.readlines()
3974    datafile.close()
3975
3976    if verbose: print 'Got', len(lines), ' lines'
3977
3978    ncols = int(lines.pop(0).split()[1].strip())
3979    nrows = int(lines.pop(0).split()[1].strip())
3980    xllcorner = float(lines.pop(0).split()[1].strip())
3981    yllcorner = float(lines.pop(0).split()[1].strip())
3982    cellsize = float(lines.pop(0).split()[1].strip())
3983    NODATA_value = float(lines.pop(0).split()[1].strip())
3984
3985    assert len(lines) == nrows
3986
3987    #Store data
3988    grid = []
3989
3990    n = len(lines)
3991    for i, line in enumerate(lines):
3992        cells = line.split()
3993        assert len(cells) == ncols
3994        grid.append(array([float(x) for x in cells]))
3995    grid = array(grid)
3996
3997    return {'xllcorner':xllcorner,
3998            'yllcorner':yllcorner,
3999            'cellsize':cellsize,
4000            'NODATA_value':NODATA_value}, grid
4001
4002
4003
4004    ####  URS 2 SWW  ###
4005
4006lon_name = 'LON'
4007lat_name = 'LAT'
4008time_name = 'TIME'
4009precision = Float # So if we want to change the precision its done here       
4010class Write_nc:
4011    """
4012    Write an nc file.
4013   
4014    Note, this should be checked to meet cdc netcdf conventions for gridded
4015    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4016   
4017    """
4018    def __init__(self,
4019                 quantity_name,
4020                 file_name,
4021                 time_step_count,
4022                 time_step,
4023                 lon,
4024                 lat):
4025        """
4026        time_step_count is the number of time steps.
4027        time_step is the time step size
4028       
4029        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4030        """
4031        self.quantity_name = quantity_name
4032        quantity_units= {'HA':'CENTIMETERS',
4033                              'UA':'CENTIMETERS/SECOND',
4034                              'VA':'CENTIMETERS/SECOND'}
4035       
4036        #self.file_name = file_name
4037        self.time_step_count = time_step_count
4038        self.time_step = time_step
4039
4040        # NetCDF file definition
4041        self.outfile = NetCDFFile(file_name, 'w')
4042        outfile = self.outfile       
4043
4044        #Create new file
4045        nc_lon_lat_header(outfile, lon, lat)
4046   
4047        # TIME
4048        outfile.createDimension(time_name, None)
4049        outfile.createVariable(time_name, precision, (time_name,))
4050
4051        #QUANTITY
4052        outfile.createVariable(self.quantity_name, precision,
4053                               (time_name, lat_name, lon_name))
4054        outfile.variables[self.quantity_name].missing_value=-1.e+034
4055        outfile.variables[self.quantity_name].units= \
4056                                 quantity_units[self.quantity_name]
4057        outfile.variables[lon_name][:]= ensure_numeric(lon)
4058        outfile.variables[lat_name][:]= ensure_numeric(lat)
4059
4060        #Assume no one will be wanting to read this, while we are writing
4061        #outfile.close()
4062       
4063    def store_timestep(self, quantity_slice):
4064        """
4065        quantity_slice is the data to be stored at this time step
4066        """
4067       
4068        outfile = self.outfile
4069       
4070        # Get the variables
4071        time = outfile.variables[time_name]
4072        quantity = outfile.variables[self.quantity_name]
4073           
4074        i = len(time)
4075
4076        #Store time
4077        time[i] = i*self.time_step #self.domain.time
4078        quantity[i,:] = quantity_slice*100 # To convert from m to cm
4079                                           # And m/s to cm/sec
4080       
4081    def close(self):
4082        self.outfile.close()
4083
4084def urs2sww(basename_in='o', basename_out=None, verbose=False,
4085            remove_nc_files=True,
4086            minlat=None, maxlat=None,
4087            minlon= None, maxlon=None,
4088            mint=None, maxt=None,
4089            mean_stage=0,
4090            origin = None,
4091            zscale=1,
4092            fail_on_NaN=True,
4093            NaN_filler=0,
4094            elevation=None,
4095            gridded=True):
4096    """
4097    Convert URS C binary format for wave propagation to
4098    sww format native to abstract_2d_finite_volumes.
4099
4100    Specify only basename_in and read files of the form
4101    basefilename-z-mux, basefilename-e-mux and basefilename-n-mux containing
4102    relative height, x-velocity and y-velocity, respectively.
4103
4104    Also convert latitude and longitude to UTM. All coordinates are
4105    assumed to be given in the GDA94 datum. The latitude and longitude
4106    information is for  a grid.
4107
4108    min's and max's: If omitted - full extend is used.
4109    To include a value min may equal it, while max must exceed it.
4110    Lat and lon are assumed to be in decimal degrees.
4111    NOTE: minlon is the most east boundary.
4112   
4113    origin is a 3-tuple with geo referenced
4114    UTM coordinates (zone, easting, northing)
4115    It will be the origin of the sww file. This shouldn't be used,
4116    since all of anuga should be able to handle an arbitary origin.
4117
4118
4119    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4120    which means that latitude is the fastest
4121    varying dimension (row major order, so to speak)
4122
4123    In URS C binary the latitudes and longitudes are in assending order.
4124    """
4125    if gridded is False:
4126        pass
4127        #urs_ungridded2sww()
4128       
4129    if basename_out == None:
4130        basename_out = basename_in
4131    files_out = urs2nc(basename_in, basename_out)
4132    ferret2sww(basename_out,
4133               minlat=minlat,
4134               maxlat=maxlat,
4135               minlon=minlon,
4136               maxlon=maxlon,
4137               mint=mint,
4138               maxt=maxt,
4139               mean_stage=mean_stage,
4140               origin=origin,
4141               zscale=zscale,
4142               fail_on_NaN=fail_on_NaN,
4143               NaN_filler=NaN_filler,
4144               inverted_bathymetry=True,
4145               verbose=verbose)
4146    #print "files_out",files_out
4147    if remove_nc_files:
4148        for file_out in files_out:
4149            os.remove(file_out)
4150   
4151def urs2nc(basename_in = 'o', basename_out = 'urs'):
4152    """
4153    Convert the 3 urs files to 4 nc files.
4154
4155    The name of the urs file names must be;
4156    [basename_in]-z-mux
4157    [basename_in]-e-mux
4158    [basename_in]-n-mux
4159   
4160    """
4161    files_in = [basename_in+'-z-mux',
4162                basename_in+'-e-mux',
4163                basename_in+'-n-mux']
4164    files_out = [basename_out+'_ha.nc',
4165                 basename_out+'_ua.nc',
4166                 basename_out+'_va.nc']
4167    quantities = ['HA','UA','VA']
4168
4169    for file_name in files_in:
4170        if os.access(file_name, os.F_OK) == 0 :
4171            msg = 'File %s does not exist or is not accessible' %file_name
4172            raise IOError, msg
4173       
4174    hashed_elevation = None
4175    for file_in, file_out, quantity in map(None, files_in,
4176                                           files_out,
4177                                           quantities):
4178        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
4179                                         file_out,
4180                                         quantity)
4181        #print "lonlatdep", lonlatdep
4182        if hashed_elevation == None:
4183            elevation_file = basename_out+'_e.nc'
4184            write_elevation_nc(elevation_file,
4185                                lon,
4186                                lat,
4187                                depth)
4188            hashed_elevation = myhash(lonlatdep)
4189        else:
4190            msg = "The elevation information in the mux files is inconsistent"
4191            assert hashed_elevation == myhash(lonlatdep), msg
4192    files_out.append(elevation_file)
4193    return files_out
4194   
4195def _binary_c2nc(file_in, file_out, quantity):
4196    """
4197    Reads in a quantity urs file and writes a quantity nc file.
4198    additionally, returns the depth and lat, long info,
4199    so it can be written to a file.
4200    """
4201    columns = 3 # long, lat , depth
4202    mux_file = open(file_in, 'rb')
4203   
4204    # Number of points/stations
4205    (points_num,)= unpack('i',mux_file.read(4))
4206
4207    # nt, int - Number of time steps
4208    (time_step_count,)= unpack('i',mux_file.read(4))
4209
4210    #dt, float - time step, seconds
4211    (time_step,) = unpack('f', mux_file.read(4))
4212   
4213    msg = "Bad data in the mux file."
4214    if points_num < 0:
4215        mux_file.close()
4216        raise ANUGAError, msg
4217    if time_step_count < 0:
4218        mux_file.close()
4219        raise ANUGAError, msg
4220    if time_step < 0:
4221        mux_file.close()
4222        raise ANUGAError, msg
4223   
4224    lonlatdep = p_array.array('f')
4225    lonlatdep.read(mux_file, columns * points_num)
4226    lonlatdep = array(lonlatdep, typecode=Float)   
4227    lonlatdep = reshape(lonlatdep, (points_num, columns))
4228   
4229    lon, lat, depth = lon_lat2grid(lonlatdep)
4230    lon_sorted = list(lon)
4231    lon_sorted.sort()
4232
4233    if not lon == lon_sorted:
4234        msg = "Longitudes in mux file are not in ascending order"
4235        raise IOError, msg
4236    lat_sorted = list(lat)
4237    lat_sorted.sort()
4238
4239    if not lat == lat_sorted:
4240        msg = "Latitudes in mux file are not in ascending order"
4241   
4242    nc_file = Write_nc(quantity,
4243                       file_out,
4244                       time_step_count,
4245                       time_step,
4246                       lon,
4247                       lat)
4248
4249    for i in range(time_step_count):
4250        #Read in a time slice  from mux file 
4251        hz_p_array = p_array.array('f')
4252        hz_p_array.read(mux_file, points_num)
4253        hz_p = array(hz_p_array, typecode=Float)
4254        hz_p = reshape(hz_p, (len(lon), len(lat)))
4255        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
4256
4257        #write time slice to nc file
4258        nc_file.store_timestep(hz_p)
4259    mux_file.close()
4260    nc_file.close()
4261
4262    return lonlatdep, lon, lat, depth
4263   
4264
4265def write_elevation_nc(file_out, lon, lat, depth_vector):
4266    """
4267    Write an nc elevation file.
4268    """
4269   
4270    # NetCDF file definition
4271    outfile = NetCDFFile(file_out, 'w')
4272
4273    #Create new file
4274    nc_lon_lat_header(outfile, lon, lat)
4275   
4276    # ELEVATION
4277    zname = 'ELEVATION'
4278    outfile.createVariable(zname, precision, (lat_name, lon_name))
4279    outfile.variables[zname].units='CENTIMETERS'
4280    outfile.variables[zname].missing_value=-1.e+034
4281
4282    outfile.variables[lon_name][:]= ensure_numeric(lon)
4283    outfile.variables[lat_name][:]= ensure_numeric(lat)
4284
4285    depth = reshape(depth_vector, ( len(lat), len(lon)))
4286    outfile.variables[zname][:]= depth
4287   
4288    outfile.close()
4289   
4290def nc_lon_lat_header(outfile, lon, lat):
4291    """
4292    outfile is the netcdf file handle.
4293    lon - a list/array of the longitudes
4294    lat - a list/array of the latitudes
4295    """
4296   
4297    outfile.institution = 'Geoscience Australia'
4298    outfile.description = 'Converted from URS binary C'
4299   
4300    # Longitude
4301    outfile.createDimension(lon_name, len(lon))
4302    outfile.createVariable(lon_name, precision, (lon_name,))
4303    outfile.variables[lon_name].point_spacing='uneven'
4304    outfile.variables[lon_name].units='degrees_east'
4305    outfile.variables[lon_name].assignValue(lon)
4306
4307
4308    # Latitude
4309    outfile.createDimension(lat_name, len(lat))
4310    outfile.createVariable(lat_name, precision, (lat_name,))
4311    outfile.variables[lat_name].point_spacing='uneven'
4312    outfile.variables[lat_name].units='degrees_north'
4313    outfile.variables[lat_name].assignValue(lat)
4314
4315
4316   
4317def lon_lat2grid(long_lat_dep):
4318    """
4319    given a list of points that are assumed to be an a grid,
4320    return the long's and lat's of the grid.
4321    long_lat_dep is an array where each row is a position.
4322    The first column is longitudes.
4323    The second column is latitudes.
4324
4325    The latitude is the fastest varying dimension - in mux files
4326    """
4327    LONG = 0
4328    LAT = 1
4329    QUANTITY = 2
4330
4331    long_lat_dep = ensure_numeric(long_lat_dep, Float)
4332   
4333    num_points = long_lat_dep.shape[0]
4334    this_rows_long = long_lat_dep[0,LONG]
4335
4336    # Count the length of unique latitudes
4337    i = 0
4338    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4339        i += 1
4340    # determine the lats and longsfrom the grid
4341    lat = long_lat_dep[:i, LAT]       
4342    long = long_lat_dep[::i, LONG]
4343   
4344    lenlong = len(long)
4345    lenlat = len(lat)
4346    #print 'len lat', lat, len(lat)
4347    #print 'len long', long, len(long)
4348         
4349    msg = 'Input data is not gridded'     
4350    assert num_points % lenlat == 0, msg
4351    assert num_points % lenlong == 0, msg
4352         
4353    # Test that data is gridded       
4354    for i in range(lenlong):
4355        msg = 'Data is not gridded.  It must be for this operation'
4356        first = i*lenlat
4357        last = first + lenlat
4358               
4359        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4360        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
4361   
4362   
4363#    print 'range long', min(long), max(long)
4364#    print 'range lat', min(lat), max(lat)
4365#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4366#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4367   
4368   
4369   
4370    msg = 'Out of range latitudes/longitudes'
4371    for l in lat:assert -90 < l < 90 , msg
4372    for l in long:assert -180 < l < 180 , msg
4373
4374    # Changing quantity from lat being the fastest varying dimension to
4375    # long being the fastest varying dimension
4376    # FIXME - make this faster/do this a better way
4377    # use numeric transpose, after reshaping the quantity vector
4378#    quantity = zeros(len(long_lat_dep), Float)
4379    quantity = zeros(num_points, Float)
4380   
4381#    print 'num',num_points
4382    for lat_i, _ in enumerate(lat):
4383        for long_i, _ in enumerate(long):
4384            q_index = lat_i*lenlong+long_i
4385            lld_index = long_i*lenlat+lat_i
4386#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4387            temp = long_lat_dep[lld_index, QUANTITY]
4388            quantity[q_index] = temp
4389           
4390    return long, lat, quantity
4391
4392    ####  END URS 2 SWW  ###
4393
4394    #### URS UNGRIDDED 2 SWW ###
4395
4396    ### PRODUCING THE POINTS NEEDED FILE ###
4397LL_LAT = -50.0
4398LL_LONG = 80.0
4399GRID_SPACING = 1.0/60.0
4400LAT_AMOUNT = 4800
4401LONG_AMOUNT = 3600
4402def URS_points_needed_to_file(file_name, boundary_polygon,
4403                              ll_lat=LL_LAT, ll_long=LL_LONG,
4404                              grid_spacing=GRID_SPACING, 
4405                              lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT,
4406                              zone=None, export_csv=False, use_cache=False,
4407                              verbose=False):
4408    """
4409    file_name - name of the urs file produced for David.
4410    boundary_polygon - a list of points that describes a polygon.
4411                      The last point is assumed ot join the first point.
4412                      This is in UTM (lat long would be better though)
4413
4414    ll_lat - lower left latitude, in decimal degrees
4415    ll-long - lower left longitude, in decimal degrees
4416    grid_spacing - in deciamal degrees
4417
4418
4419    Don't add the file extension.  It will be added.
4420    """
4421    geo = URS_points_needed(boundary_polygon, ll_lat, ll_long, grid_spacing, 
4422                      lat_amount, long_amount, zone, use_cache, verbose)
4423    if not file_name[-4:] == ".urs":
4424        file_name += ".urs"
4425    geo.export_points_file(file_name)
4426    if export_csv:
4427        if file_name[-4:] == ".urs":
4428            file_name = file_name[:-4] + ".csv"
4429        geo.export_points_file(file_name)
4430
4431def URS_points_needed(boundary_polygon, ll_lat=LL_LAT,
4432                      ll_long=LL_LONG, grid_spacing=GRID_SPACING, 
4433                      lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT,
4434                      zone=None, use_cache=False, verbose=False):
4435    args = (boundary_polygon)
4436    kwargs = {'ll_lat': ll_lat,
4437              'll_long': ll_long,
4438              'grid_spacing': grid_spacing,
4439              'lat_amount': lat_amount,
4440              'long_amount': long_amount,
4441              'zone': zone} 
4442    if use_cache is True:
4443        try:
4444            from anuga.caching import cache
4445        except:
4446            msg = 'Caching was requested, but caching module'+\
4447                  'could not be imported'
4448            raise msg
4449
4450
4451        geo = cache(_URS_points_needed,
4452                  args, kwargs,
4453                  verbose=verbose,
4454                  compression=False)
4455    else:
4456        #I was getting 'got multiple values for keyword argument' errors
4457        #geo = apply(_URS_points_needed, args, kwargs)
4458        geo = _URS_points_needed(boundary_polygon, ll_lat,
4459                      ll_long, grid_spacing, 
4460                      lat_amount, long_amount,
4461                      zone)
4462
4463    return geo   
4464def _URS_points_needed(boundary_polygon, ll_lat=LL_LAT,
4465                      ll_long=LL_LONG, grid_spacing=GRID_SPACING, 
4466                      lat_amount=LAT_AMOUNT, long_amount=LONG_AMOUNT,
4467                      zone=None):
4468    """
4469
4470    boundary_polygon - a list of points that describes a polygon.
4471                      The last point is assumed ot join the first point.
4472                      This is in UTM (lat long would be better though)
4473
4474    ll_lat - lower left latitude, in decimal degrees
4475    ll-long - lower left longitude, in decimal degrees
4476    grid_spacing - in deciamal degrees
4477
4478    """
4479    #FIXME cache this function!
4480   
4481    from sets import ImmutableSet
4482   
4483    msg = "grid_spacing can not be zero"
4484    assert not grid_spacing ==0, msg
4485    a = boundary_polygon
4486    # List of segments.  Each segment is two points.
4487    segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))]
4488
4489    # convert the segs to Lat's and longs.
4490    if zone is None:
4491        # Assume the zone of the segments is the same as the lower left
4492        # corner of the lat long data
4493        zone, _, _ = redfearn(ll_lat, ll_long)
4494    lat_long_set = ImmutableSet()
4495    for seg in segs:
4496        points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, 
4497                      lat_amount, long_amount, zone)
4498        lat_long_set |= ImmutableSet(points_lat_long)
4499    #print "lat_long_set",lat_long_set
4500    geo = Geospatial_data(data_points=list(lat_long_set),
4501                              points_are_lats_longs=True)
4502    return geo
4503   
4504def points_needed(seg, ll_lat, ll_long, grid_spacing, 
4505                  lat_amount, long_amount, zone):
4506    """
4507    return a list of the points, in lats and longs that are needed to
4508    interpolate any point on the segment.
4509    """
4510    from math import sqrt
4511    #print "zone",zone
4512    geo_reference = Geo_reference(zone=zone)
4513    #print "seg",seg
4514    geo = Geospatial_data(seg,geo_reference=geo_reference)
4515    seg_lat_long = geo.get_data_points(as_lat_long=True)
4516    #print "seg_lat_long", seg_lat_long
4517    # 1.415 = 2^0.5, rounded up....
4518    sqrt_2_rounded_up = 1.415
4519    buffer = sqrt_2_rounded_up * grid_spacing
4520   
4521    #
4522   
4523    max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer
4524    max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer
4525    min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer
4526    min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer
4527
4528    first_row = (min_long - ll_long)/grid_spacing
4529    # To round up
4530    first_row_long = int(round(first_row + 0.5))
4531    #print "first_row", first_row_long
4532
4533    last_row = (max_long - ll_long)/grid_spacing # round down
4534    last_row_long = int(round(last_row))
4535    #print "last_row",last_row _long
4536   
4537    first_row = (min_lat - ll_lat)/grid_spacing
4538    # To round up
4539    first_row_lat = int(round(first_row + 0.5))
4540    #print "first_row", first_row_lat
4541
4542    last_row = (max_lat - ll_lat)/grid_spacing # round down
4543    last_row_lat = int(round(last_row))
4544    #print "last_row",last_row_lat
4545
4546    # to work out the max distance -
4547    # 111120 - horizontal distance between 1 deg latitude.
4548    #max_distance = sqrt_2_rounded_up * 111120 * grid_spacing
4549    max_distance = 157147.4112 * grid_spacing
4550    #print "max_distance", max_distance #2619.12 m for 1 minute
4551    points_lat_long = []
4552    # Create a list of the lat long points to include.
4553    for index_lat in range(first_row_lat, last_row_lat + 1):
4554        for index_long in range(first_row_long, last_row_long + 1):
4555            lat = ll_lat + index_lat*grid_spacing
4556            long = ll_long + index_long*grid_spacing
4557
4558            #filter here to keep good points
4559            if keep_point(lat, long, seg, max_distance):
4560                points_lat_long.append((lat, long)) #must be hashable
4561    #print "points_lat_long", points_lat_long
4562
4563    # Now that we have these points, lets throw ones out that are too far away
4564    return points_lat_long
4565
4566def keep_point(lat, long, seg, max_distance):
4567    """
4568    seg is two points, UTM
4569    """
4570    from math import sqrt
4571    _ , x0, y0 = redfearn(lat, long)
4572    x1 = seg[0][0]
4573    y1 = seg[0][1]
4574    x2 = seg[1][0]
4575    y2 = seg[1][1]
4576
4577    x2_1 = x2-x1
4578    y2_1 = y2-y1
4579    d = abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1))/sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1))
4580    if d <= max_distance:
4581        return True
4582    else:
4583        return False
4584   
4585    #### CONVERTING UNGRIDDED URS DATA TO AN SWW FILE ####
4586def urs_ungridded2sww_link(basename_in='o', basename_out=None, verbose=False,
4587            remove_nc_files=True,
4588            minlat=None, maxlat=None,
4589            minlon= None, maxlon=None,
4590            mint=None, maxt=None,
4591            mean_stage=0,
4592            origin = None,
4593            zscale=1,
4594            fail_on_NaN=True,
4595            NaN_filler=0,
4596            elevation=None):
4597    """
4598    validation stuff on parrameters not used
4599    """
4600    pass
4601   
4602def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False,
4603            mint=None, maxt=None,
4604            mean_stage=0,
4605            origin = None,
4606            zscale=1):
4607    """   
4608    Convert URS C binary format for wave propagation to
4609    sww format native to abstract_2d_finite_volumes.
4610
4611    Specify only basename_in and read files of the form
4612    basefilename-z-mux, basefilename-e-mux and basefilename-n-mux containing
4613    relative height, x-velocity and y-velocity, respectively.
4614
4615    Also convert latitude and longitude to UTM. All coordinates are
4616    assumed to be given in the GDA94 datum. The latitude and longitude
4617    information is assumed ungridded grid.
4618
4619    min's and max's: If omitted - full extend is used.
4620    To include a value min ans max may equal it.
4621    Lat and lon are assumed to be in decimal degrees.
4622   
4623    origin is a 3-tuple with geo referenced
4624    UTM coordinates (zone, easting, northing)
4625    It will be the origin of the sww file. This shouldn't be used,
4626    since all of anuga should be able to handle an arbitary origin.
4627
4628
4629    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4630    which means that latitude is the fastest
4631    varying dimension (row major order, so to speak)
4632
4633    In URS C binary the latitudes and longitudes are in assending order.
4634    """ 
4635    from anuga.pmesh.mesh import Mesh
4636
4637    files_in = [basename_in+'-z-mux',
4638                basename_in+'-e-mux',
4639                basename_in+'-n-mux']
4640    quantities = ['HA','UA','VA']
4641
4642    # instanciate urs_points of the three mux files.
4643    mux = {}
4644    for quantity, file in map(None, quantities, files_in):
4645        mux[quantity] = Urs_points(file)
4646       
4647    # Could check that the depth is the same. (hashing)
4648
4649    # handle to a mux file to do depth stuff
4650    a_mux = mux[quantities[0]]
4651   
4652    # Convert to utm
4653    lat = a_mux.lonlatdep[:,1]
4654    long = a_mux.lonlatdep[:,0]
4655    points_utm, zone = convert_from_latlon_to_utm( \
4656        latitudes=lat, longitudes=long)
4657    #print "points_utm", points_utm
4658    #print "zone", zone
4659
4660    elevation = a_mux.lonlatdep[:,2] * -1 #
4661   
4662    # grid ( create a mesh from the selected points)
4663    # This mesh has a problem.  Triangles are streched over ungridded areas.
4664    #  If these areas could be described as holes in pmesh, that would be great
4665   
4666    mesh = Mesh()
4667    mesh.add_vertices(points_utm)
4668    mesh.auto_segment()
4669    mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4670    mesh_dic = mesh.Mesh2MeshList()
4671
4672    #mesh.export_mesh_file(basename_in + '.tsh')
4673    # These are the times of the mux file
4674    mux_times = []
4675    for i in range(a_mux.time_step_count):
4676        mux_times.append(a_mux.time_step * i) 
4677    mux_times_start_i, mux_times_fin_i = mux2sww_time(mux_times, mint, maxt)
4678    times = mux_times[mux_times_start_i:mux_times_fin_i]
4679   
4680    if mux_times_start_i == mux_times_fin_i:
4681        # Close the mux files
4682        for quantity, file in map(None, quantities, files_in):
4683            mux[quantity].close()
4684        msg="Due to mint and maxt there's no time info in the boundary SWW."
4685        assert 1==0 , msg #Happy to know a better way of doing this..
4686       
4687    # If this raise is removed there is currently no downstream errors
4688           
4689    points_utm=ensure_numeric(points_utm)
4690    #print "mesh_dic['generatedpointlist']", mesh_dic['generatedpointlist']
4691    #print "points_utm", points_utm
4692    assert ensure_numeric(mesh_dic['generatedpointlist']) == \
4693           ensure_numeric(points_utm)
4694   
4695    volumes = mesh_dic['generatedtrianglelist']
4696   
4697    # write sww intro and grid stuff.   
4698    if basename_out is None:
4699        swwname = basename_in + '.sww'
4700    else:
4701        swwname = basename_out + '.sww'
4702
4703    outfile = NetCDFFile(swwname, 'w')
4704    # For a different way of doing this, check out tsh2sww
4705    # work out sww_times and the index range this covers
4706    write_sww_header(outfile, times, len(volumes), len(points_utm))
4707    outfile.mean_stage = mean_stage
4708    outfile.zscale = zscale
4709    write_sww_triangulation(outfile, points_utm, volumes,
4710                            elevation, zone,  origin=origin, verbose=verbose)
4711   
4712    if verbose: print 'Converting quantities'
4713    j = 0
4714    # Read in a time slice from each mux file and write it to the sww file
4715    for HA, UA, VA in map(None, mux['HA'], mux['UA'], mux['VA']):
4716        if j >= mux_times_start_i and j < mux_times_fin_i:
4717            write_sww_time_slice(outfile, HA, UA, VA,  elevation,
4718                                 j - mux_times_start_i,
4719                                 mean_stage=mean_stage, zscale=zscale,
4720                                 verbose=verbose)
4721        j += 1
4722    outfile.close()
4723    #
4724    # Do some conversions while writing the sww file
4725def mux2sww_time(mux_times, mint, maxt):
4726    """
4727    """
4728
4729    if mint == None:
4730        mux_times_start_i = 0
4731    else:
4732        mux_times_start_i = searchsorted(mux_times, mint)
4733       
4734    if maxt == None:
4735        mux_times_fin_i = len(mux_times)
4736    else:
4737        maxt += 0.5 # so if you specify a time where there is
4738                    # data that time will be included
4739        mux_times_fin_i = searchsorted(mux_times, maxt)
4740
4741    return mux_times_start_i, mux_times_fin_i
4742
4743def write_sww_header(outfile, times, number_of_volumes, number_of_points ):
4744    """
4745    outfile - the name of the file that will be written
4746    times - A list of the time slice times
4747    number_of_volumes - the number of triangles
4748    """
4749    number_of_times = len(times)
4750   
4751    outfile.institution = 'Geoscience Australia'
4752    outfile.description = 'Converted from XXX'
4753
4754
4755    #For sww compatibility
4756    outfile.smoothing = 'Yes'
4757    outfile.order = 1
4758
4759    #Start time in seconds since the epoch (midnight 1/1/1970)
4760    if len(times) == 0:
4761        outfile.starttime = starttime = 0
4762    else:
4763        outfile.starttime = starttime = times[0]
4764
4765
4766    # dimension definitions
4767    outfile.createDimension('number_of_volumes', number_of_volumes)
4768
4769    outfile.createDimension('number_of_vertices', 3)
4770    outfile.createDimension('number_of_points', number_of_points)
4771    outfile.createDimension('number_of_timesteps', number_of_times)
4772
4773    # variable definitions
4774    outfile.createVariable('x', precision, ('number_of_points',))
4775    outfile.createVariable('y', precision, ('number_of_points',))
4776    outfile.createVariable('elevation', precision, ('number_of_points',))
4777
4778    #FIXME: Backwards compatibility
4779    outfile.createVariable('z', precision, ('number_of_points',))
4780    #################################
4781
4782    outfile.createVariable('volumes', Int, ('number_of_volumes',
4783                                            'number_of_vertices'))
4784
4785    outfile.createVariable('time', precision,
4786                           ('number_of_timesteps',))
4787
4788    outfile.createVariable('stage', precision,
4789                           ('number_of_timesteps',
4790                            'number_of_points'))
4791
4792    outfile.createVariable('xmomentum', precision,
4793                           ('number_of_timesteps',
4794                            'number_of_points'))
4795
4796    outfile.createVariable('ymomentum', precision,
4797                           ('number_of_timesteps',
4798                            'number_of_points'))
4799   
4800    outfile.variables['time'][:] = times   
4801
4802
4803def write_sww_triangulation(outfile, points_utm, volumes,
4804                            elevation, zone, origin=None, verbose=False):
4805
4806    number_of_points = len(points_utm)   
4807    volumes = array(volumes)
4808
4809    if origin is not None:
4810        if isinstance(origin, Geo_reference): 
4811            geo_ref = origin
4812        else:
4813            geo_ref = apply(Geo_reference,origin)
4814    else:
4815        geo_ref = Geo_reference(zone,min(points_utm[:,0]),min(points_utm[:,1]))
4816    #geo_ref = Geo_reference(zone,0,0)
4817    geo_ref.write_NetCDF(outfile)
4818
4819    # This will put the geo ref in the middle
4820    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
4821    x =  points_utm[:,0]
4822    y =  points_utm[:,1]
4823    z = outfile.variables['z'][:]
4824    #FIXME the w.r.t time is lost now..
4825    if verbose:
4826        print '------------------------------------------------'
4827        print 'More Statistics:'
4828        print '  Extent (/lon):'
4829        print '    x in [%f, %f], len(lat) == %d'\
4830              %(min(x), max(x),
4831                len(x))
4832        print '    y in [%f, %f], len(lon) == %d'\
4833              %(min(y), max(y),
4834                len(y))
4835        print '    z in [%f, %f], len(z) == %d'\
4836              %(min(elevation), max(elevation),
4837                len(elevation))
4838        print 'geo_ref: ',geo_ref
4839        print '------------------------------------------------'
4840
4841    #z = resize(bath_grid,outfile.variables['z'][:].shape)
4842    outfile.variables['x'][:] = points_utm[:,0] - geo_ref.get_xllcorner()
4843    outfile.variables['y'][:] = points_utm[:,1] - geo_ref.get_yllcorner()
4844    outfile.variables['z'][:] = elevation
4845    outfile.variables['elevation'][:] = elevation  #FIXME HACK
4846    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
4847
4848
4849def write_sww_time_slices(outfile, has, uas, vas, elevation,
4850                         mean_stage=0, zscale=1,
4851                         verbose=False):   
4852    #Time stepping
4853    stage = outfile.variables['stage']
4854    xmomentum = outfile.variables['xmomentum']
4855    ymomentum = outfile.variables['ymomentum']
4856
4857    n = len(has)
4858    j=0
4859    for ha, ua, va in map(None, has, uas, vas):
4860        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
4861        w = zscale*ha + mean_stage
4862        stage[j] = w
4863        h = w - elevation
4864        xmomentum[j] = ua*h
4865        ymomentum[j] = va*h
4866        j += 1
4867
4868def write_sww_time_slice(outfile, ha, ua, va, elevation, slice_index,
4869                         mean_stage=0, zscale=1,
4870                         verbose=False):   
4871    #Time stepping
4872    stage = outfile.variables['stage']
4873    xmomentum = outfile.variables['xmomentum']
4874    ymomentum = outfile.variables['ymomentum']
4875   
4876    w = zscale*ha + mean_stage
4877    stage[slice_index] = w
4878    h = w - elevation
4879    xmomentum[slice_index] = ua*h
4880    ymomentum[slice_index] = va*h
4881
4882def urs2txt(basename_in, location_index=None):
4883    """
4884    Not finished or tested
4885    """
4886    files_in = [basename_in+'-z-mux',
4887                basename_in+'-e-mux',
4888                basename_in+'-n-mux']
4889    quantities = ['HA','UA','VA']
4890
4891    d = ","
4892   
4893    # instanciate urs_points of the three mux files.
4894    mux = {}
4895    for quantity, file in map(None, quantities, files_in):
4896        mux[quantity] = Urs_points(file)
4897       
4898    # Could check that the depth is the same. (hashing)
4899
4900    # handle to a mux file to do depth stuff
4901    a_mux = mux[quantities[0]]
4902   
4903    # Convert to utm
4904    latitudes = a_mux.lonlatdep[:,1]
4905    longitudes = a_mux.lonlatdep[:,0]
4906    points_utm, zone = convert_from_latlon_to_utm( \
4907        latitudes=latitudes, longitudes=longitudes)
4908    #print "points_utm", points_utm
4909    #print "zone", zone
4910    depths = a_mux.lonlatdep[:,2]  #
4911   
4912    fid = open(basename_in + '.txt', 'w')
4913
4914    fid.write("zone: " + str(zone) + "\n")
4915
4916    if location_index is not None:
4917        #Title
4918        li = location_index
4919        fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
4920                  'Northing' + "\n")
4921        fid.write(str(li) +d+ str(latitudes[li])+d+ \
4922              str(longitudes[li]) +d+ str(points_utm[li][0]) +d+ \
4923              str(points_utm[li][01]) + "\n")
4924
4925    # the non-time dependent stuff
4926    #Title
4927    fid.write('location_index'+d+'lat'+d+ 'long' +d+ 'Easting' +d+ \
4928                  'Northing' +d+ 'depth m' + "\n")
4929    i = 0
4930    for depth, point_utm, lat, long in map(None, depths,
4931                                               points_utm, latitudes,
4932                                               longitudes):
4933       
4934        fid.write(str(i) +d+ str(lat)+d+ str(long) +d+ str(point_utm[0]) +d+ \
4935                  str(point_utm[01]) +d+ str(depth) + "\n")
4936        i +=1
4937    #Time dependent
4938    if location_index is not None:
4939        time_step = a_mux.time_step
4940        i = 0
4941        #Title
4942        fid.write('time' +d+ 'HA depth m'+d+ \
4943                      'UA momentum East x m/sec' +d+ 'VA momentum North y m/sec' \
4944                      + "\n")
4945        for HA, UA, VA in map(None, mux['HA'], mux['UA'], mux['VA']):
4946            fid.write(str(i*time_step) +d+ str(HA[location_index])+d+ \
4947                      str(UA[location_index]) +d+ str(VA[location_index]) \
4948                      + "\n")
4949           
4950            i +=1
4951   
4952class Urs_points:
4953    """
4954    Read the info in URS mux files.
4955
4956    for the quantities heres a correlation between the file names and
4957    what they mean;
4958    z-mux is height above sea level, m
4959    e-mux is velocity is Eastern direction, m/s
4960    n-mux is velocity is Northern direction, m/s   
4961    """
4962    def __init__(self,urs_file):
4963        self.iterated = False
4964        columns = 3 # long, lat , depth
4965        mux_file = open(urs_file, 'rb')
4966       
4967        # Number of points/stations
4968        (self.points_num,)= unpack('i',mux_file.read(4))
4969       
4970        # nt, int - Number of time steps
4971        (self.time_step_count,)= unpack('i',mux_file.read(4))
4972        #print "self.time_step_count", self.time_step_count
4973        #dt, float - time step, seconds
4974        (self.time_step,) = unpack('f', mux_file.read(4))
4975        #print "self.time_step", self.time_step
4976        msg = "Bad data in the urs file."
4977        if self.points_num < 0:
4978            mux_file.close()
4979            raise ANUGAError, msg
4980        if self.time_step_count < 0:
4981            mux_file.close()
4982            raise ANUGAError, msg
4983        if self.time_step < 0:
4984            mux_file.close()
4985            raise ANUGAError, msg
4986
4987        # the depth is in meters, and it is the distance from the ocean
4988        # to the sea bottom.
4989        lonlatdep = p_array.array('f')
4990        lonlatdep.read(mux_file, columns * self.points_num)
4991        lonlatdep = array(lonlatdep, typecode=Float)   
4992        lonlatdep = reshape(lonlatdep, (self.points_num, columns))
4993        #print 'lonlatdep',lonlatdep
4994        self.lonlatdep = lonlatdep
4995       
4996        self.mux_file = mux_file
4997        # check this array
4998
4999    def __iter__(self):
5000        """
5001        iterate over quantity data which is with respect to time.
5002
5003        Note: You can only interate once over an object
5004       
5005        returns quantity infomation for each time slice
5006        """
5007        msg =  "You can only interate once over a urs file."
5008        assert not self.iterated, msg
5009        self.iter_time_step = 0
5010        self.iterated = True
5011        return self
5012   
5013    def next(self):
5014        if self.time_step_count == self.iter_time_step:
5015            self.close()
5016            raise StopIteration
5017        #Read in a time slice  from mux file 
5018        hz_p_array = p_array.array('f')
5019        hz_p_array.read(self.mux_file, self.points_num)
5020        hz_p = array(hz_p_array, typecode=Float)
5021        self.iter_time_step += 1
5022       
5023        return hz_p
5024
5025    def close(self):
5026        self.mux_file.close()
5027
5028    #### END URS UNGRIDDED 2 SWW ###
5029
5030#-------------------------------------------------------------
5031if __name__ == "__main__":
5032    points=Urs_points('urs2sww_test/o_velocity-e-mux')
5033    for i in points:
5034        print 'i',i
5035        pass
5036
Note: See TracBrowser for help on using the repository browser.