source: trunk/anuga_core/source/anuga/file/sww.py @ 8209

Last change on this file since 8209 was 8209, checked in by steve, 13 years ago

Commiting quite a few changes to operators and parallel stuff

File size: 42.6 KB
Line 
1""" Classes to read an SWW file.
2"""
3
4import exceptions
5class DataFileNotOpenError(exceptions.Exception): pass
6class DataMissingValuesError(exceptions.Exception): pass
7class NewQuantity(exceptions.Exception): pass
8class DataDomainError(exceptions.Exception): pass
9class DataMissingValuesError(exceptions.Exception): pass
10class DataTimeError(exceptions.Exception): pass
11
12
13from anuga.coordinate_transforms.geo_reference import Geo_reference
14from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
15from anuga.config import netcdf_float, netcdf_float32, netcdf_int
16from anuga.config import max_float
17from anuga.utilities.numerical_tools import ensure_numeric
18import anuga.utilities.log as log
19from Scientific.IO.NetCDF import NetCDFFile
20
21from sts import Write_sts
22
23from anuga.coordinate_transforms.geo_reference import \
24        ensure_geo_reference
25
26from file_utils import create_filename
27import numpy as num
28
29class Data_format:
30    """Generic interface to data formats
31    """
32
33    def __init__(self, domain, extension, mode=netcdf_mode_w):
34        assert mode[0] in ['r', 'w', 'a'], \
35               "Mode %s must be either:\n" % mode + \
36               "   'w' (write)\n" + \
37               "   'r' (read)\n" + \
38               "   'a' (append)"
39
40        #Create filename
41        self.filename = create_filename(domain.get_datadir(),
42                                        domain.get_name(), extension)
43
44        self.timestep = 0
45        self.domain = domain
46
47        # Probably should exclude ghosts in case this is a parallel domain
48       
49        self.number_of_nodes   = domain.number_of_nodes
50        self.number_of_volumes = domain.number_of_triangles
51        #self.number_of_volumes = len(domain)
52
53        #FIXME: Should we have a general set_precision function?
54
55
56class SWW_file(Data_format):
57    """Interface to native NetCDF format (.sww) for storing model output
58
59    There are two kinds of data
60
61    1: Constant data: Vertex coordinates and field values. Stored once
62    2: Variable data: Conserved quantities. Stored once per timestep.
63
64    All data is assumed to reside at vertex locations.
65    """
66
67    def __init__(self, domain, 
68                 mode=netcdf_mode_w, max_size=2000000000, recursion=False):
69        from Scientific.IO.NetCDF import NetCDFFile
70
71        self.precision = netcdf_float32 # Use single precision for quantities
72        self.recursion = recursion
73        self.mode = mode
74        if hasattr(domain, 'max_size'):
75            self.max_size = domain.max_size # File size max is 2Gig
76        else:
77            self.max_size = max_size
78        if hasattr(domain, 'minimum_storable_height'):
79            self.minimum_storable_height = domain.minimum_storable_height
80        else:
81            self.minimum_storable_height = default_minimum_storable_height
82
83        # Call parent constructor
84        Data_format.__init__(self, domain, 'sww', mode)
85
86        # Get static and dynamic quantities from domain
87        static_quantities = []
88        dynamic_quantities = []
89       
90        for q in domain.quantities_to_be_stored:
91            flag = domain.quantities_to_be_stored[q]
92       
93            msg = 'Quantity %s is requested to be stored ' % q
94            msg += 'but it does not exist in domain.quantities'
95            assert q in domain.quantities, msg
96       
97            assert flag in [1,2]
98            if flag == 1: static_quantities.append(q)
99            if flag == 2: dynamic_quantities.append(q)               
100                       
101       
102        # NetCDF file definition
103        fid = NetCDFFile(self.filename, mode)
104        if mode[0] == 'w':
105            description = 'Output from anuga.abstract_2d_finite_volumes ' \
106                          'suitable for plotting'
107                         
108            self.writer = Write_sww(static_quantities, dynamic_quantities)
109            self.writer.store_header(fid,
110                                     domain.starttime,
111                                     self.number_of_volumes,
112                                     self.domain.number_of_nodes,
113                                     description=description,
114                                     smoothing=domain.smooth,
115                                     order=domain.default_order,
116                                     sww_precision=self.precision)
117
118            # Extra optional information
119            if hasattr(domain, 'texture'):
120                fid.texture = domain.texture
121
122            if domain.quantities_to_be_monitored is not None:
123                fid.createDimension('singleton', 1)
124                fid.createDimension('two', 2)
125
126                poly = domain.monitor_polygon
127                if poly is not None:
128                    N = len(poly)
129                    fid.createDimension('polygon_length', N)
130                    fid.createVariable('extrema.polygon',
131                                       self.precision,
132                                       ('polygon_length', 'two'))
133                    fid.variables['extrema.polygon'][:] = poly
134
135                interval = domain.monitor_time_interval
136                if interval is not None:
137                    fid.createVariable('extrema.time_interval',
138                                       self.precision,
139                                       ('two',))
140                    fid.variables['extrema.time_interval'][:] = interval
141
142                for q in domain.quantities_to_be_monitored:
143                    fid.createVariable(q + '.extrema', self.precision,
144                                       ('numbers_in_range',))
145                    fid.createVariable(q + '.min_location', self.precision,
146                                       ('numbers_in_range',))
147                    fid.createVariable(q + '.max_location', self.precision,
148                                       ('numbers_in_range',))
149                    fid.createVariable(q + '.min_time', self.precision,
150                                       ('singleton',))
151                    fid.createVariable(q + '.max_time', self.precision,
152                                       ('singleton',))
153
154        fid.close()
155
156    def store_connectivity(self):
157        """Store information about nodes, triangles and static quantities
158
159        Writes x,y coordinates of triangles and their connectivity.
160       
161        Store also any quantity that has been identified as static.
162        """
163
164        # FIXME: Change name to reflect the fact thta this function
165        # stores both connectivity (triangulation) and static quantities
166       
167        from Scientific.IO.NetCDF import NetCDFFile
168
169        domain = self.domain
170
171        # append to the NetCDF file
172        fid = NetCDFFile(self.filename, netcdf_mode_a)
173
174        # Get X, Y from one (any) of the quantities
175        Q = domain.quantities.values()[0]
176        X,Y,_,V = Q.get_vertex_values(xy=True, precision=self.precision)
177
178        # store the connectivity data
179        points = num.concatenate((X[:,num.newaxis],Y[:,num.newaxis]), axis=1)
180        self.writer.store_triangulation(fid,
181                                        points,
182                                        V.astype(num.float32),
183                                        points_georeference=\
184                                            domain.geo_reference)
185
186
187        # Get names of static quantities
188        static_quantities = {}
189        for name in self.writer.static_quantities:
190            Q = domain.quantities[name]
191            A, _ = Q.get_vertex_values(xy=False, 
192                                       precision=self.precision)
193            static_quantities[name] = A
194       
195        # Store static quantities       
196        self.writer.store_static_quantities(fid, **static_quantities)
197                                           
198        fid.close()
199
200    def store_timestep(self):
201        """Store time and time dependent quantities
202        """
203
204        from Scientific.IO.NetCDF import NetCDFFile
205        import types
206        from time import sleep
207        from os import stat
208
209        # Get NetCDF
210        retries = 0
211        file_open = False
212        while not file_open and retries < 10:
213            try:
214                # Open existing file
215                fid = NetCDFFile(self.filename, netcdf_mode_a)
216            except IOError:
217                # This could happen if someone was reading the file.
218                # In that case, wait a while and try again
219                msg = 'Warning (store_timestep): File %s could not be opened' \
220                      % self.filename
221                msg += ' - trying step %s again' % self.domain.time
222                log.critical(msg)
223                retries += 1
224                sleep(1)
225            else:
226                file_open = True
227
228        if not file_open:
229            msg = 'File %s could not be opened for append' % self.filename
230            raise DataFileNotOpenError, msg
231
232        # Check to see if the file is already too big:
233        time = fid.variables['time']
234        i = len(time) + 1
235        file_size = stat(self.filename)[6]
236        file_size_increase = file_size / i
237        if file_size + file_size_increase > self.max_size * 2**self.recursion:
238            # In order to get the file name and start time correct,
239            # I change the domain.filename and domain.starttime.
240            # This is the only way to do this without changing
241            # other modules (I think).
242
243            # Write a filename addon that won't break the anuga viewers
244            # (10.sww is bad)
245            filename_ext = '_time_%s' % self.domain.time
246            filename_ext = filename_ext.replace('.', '_')
247
248            # Remember the old filename, then give domain a
249            # name with the extension
250            old_domain_filename = self.domain.get_name()
251            if not self.recursion:
252                self.domain.set_name(old_domain_filename + filename_ext)
253
254            # Temporarily change the domain starttime to the current time
255            old_domain_starttime = self.domain.starttime
256            self.domain.starttime = self.domain.get_time()
257
258            # Build a new data_structure.
259            next_data_structure = SWW_file(self.domain, mode=self.mode,
260                                           max_size=self.max_size,
261                                           recursion=self.recursion+1)
262            if not self.recursion:
263                log.critical('    file_size = %s' % file_size)
264                log.critical('    saving file to %s'
265                             % next_data_structure.filename) 
266
267            # Set up the new data_structure
268            self.domain.writer = next_data_structure
269
270            # Store connectivity and first timestep
271            next_data_structure.store_connectivity()
272            next_data_structure.store_timestep()
273            fid.sync()
274            fid.close()
275
276            # Restore the old starttime and filename
277            self.domain.starttime = old_domain_starttime
278            self.domain.set_name(old_domain_filename)
279        else:
280            self.recursion = False
281            domain = self.domain
282
283            # Get the variables
284            time = fid.variables['time']
285            i = len(time)
286             
287            if 'stage' in self.writer.dynamic_quantities:           
288                # Select only those values for stage,
289                # xmomentum and ymomentum (if stored) where
290                # depth exceeds minimum_storable_height
291                #
292                # In this branch it is assumed that elevation
293                # is also available as a quantity           
294           
295                Q = domain.quantities['stage']
296                w, _ = Q.get_vertex_values(xy=False)
297               
298                Q = domain.quantities['elevation']
299                z, _ = Q.get_vertex_values(xy=False)               
300               
301                storable_indices = (w-z >= self.minimum_storable_height)
302            else:
303                # Very unlikely branch
304                storable_indices = None # This means take all
305           
306           
307            # Now store dynamic quantities
308            dynamic_quantities = {}
309            for name in self.writer.dynamic_quantities:
310                netcdf_array = fid.variables[name]
311               
312                Q = domain.quantities[name]
313                A, _ = Q.get_vertex_values(xy=False,
314                                           precision=self.precision)
315
316                if storable_indices is not None:
317                    if name == 'stage':
318                        A = num.choose(storable_indices, (z, A))
319
320                    if name in ['xmomentum', 'ymomentum']:
321                        # Get xmomentum where depth exceeds
322                        # minimum_storable_height
323                       
324                        # Define a zero vector of same size and type as A
325                        # for use with momenta
326                        null = num.zeros(num.size(A), A.dtype.char)
327                        A = num.choose(storable_indices, (null, A))
328               
329                dynamic_quantities[name] = A
330               
331                                       
332            # Store dynamic quantities
333            self.writer.store_quantities(fid,
334                                         time=self.domain.time,
335                                         sww_precision=self.precision,
336                                         **dynamic_quantities)
337
338
339            # Update extrema if requested
340            domain = self.domain
341            if domain.quantities_to_be_monitored is not None:
342                for q, info in domain.quantities_to_be_monitored.items():
343                    if info['min'] is not None:
344                        fid.variables[q + '.extrema'][0] = info['min']
345                        fid.variables[q + '.min_location'][:] = \
346                                        info['min_location']
347                        fid.variables[q + '.min_time'][0] = info['min_time']
348
349                    if info['max'] is not None:
350                        fid.variables[q + '.extrema'][1] = info['max']
351                        fid.variables[q + '.max_location'][:] = \
352                                        info['max_location']
353                        fid.variables[q + '.max_time'][0] = info['max_time']
354
355            # Flush and close
356            fid.sync()
357            fid.close()
358
359
360class Read_sww:
361
362    def __init__(self, source):
363        """The source parameter is assumed to be a NetCDF sww file.
364        """
365
366        self.source = source
367
368        self.frame_number = 0
369
370        fin = NetCDFFile(self.source, 'r')
371
372        self.time = num.array(fin.variables['time'], num.float)
373        self.last_frame_number = self.time.shape[0] - 1
374
375        self.frames = num.arange(self.last_frame_number+1)
376
377        fin.close()
378       
379        self.read_mesh()
380
381        self.quantities = {}
382
383        self.read_quantities()
384
385
386    def read_mesh(self):
387        """ Read and store the mesh data contained within this sww file.
388        """
389        fin = NetCDFFile(self.source, 'r')
390
391        self.vertices = num.array(fin.variables['volumes'], num.int)
392       
393        self.x = x = num.array(fin.variables['x'], num.float)
394        self.y = y = num.array(fin.variables['y'], num.float)
395
396        assert len(self.x) == len(self.y)
397       
398        self.xmin = num.min(x)
399        self.xmax = num.max(x)
400        self.ymin = num.min(y)
401        self.ymax = num.max(y)
402
403
404        fin.close()
405       
406    def read_quantities(self, frame_number=0):
407        """
408        Read the quantities contained in this file.
409        frame_number is the time index to load.
410        """
411        assert frame_number >= 0 and frame_number <= self.last_frame_number
412
413        self.frame_number = frame_number
414
415        M = len(self.x)/3
416       
417        fin = NetCDFFile(self.source, 'r')
418       
419        for q in filter(lambda n:n != 'x' and n != 'y' and n != 'time' and n != 'volumes' and \
420                        '_range' not in n, \
421                        fin.variables.keys()):
422            if len(fin.variables[q].shape) == 1: # Not a time-varying quantity
423                self.quantities[q] = num.ravel(num.array(fin.variables[q], num.float)).reshape(M,3)
424            else: # Time-varying, get the current timestep data
425                self.quantities[q] = num.array(fin.variables[q][self.frame_number], num.float).reshape(M,3)
426        fin.close()
427        return self.quantities
428
429    def get_bounds(self):
430        """
431            Get the bounding rect around the mesh.
432        """
433        return [self.xmin, self.xmax, self.ymin, self.ymax]
434
435    def get_last_frame_number(self):
436        """
437            Return the last loaded frame index.
438        """
439        return self.last_frame_number
440
441    def get_time(self):
442        """
443            Get time at the current frame num, in secs.
444        """
445        return self.time[self.frame_number]
446
447
448class Write_sww(Write_sts):
449    """
450        A class to write an SWW file.
451       
452        It is domain agnostic, and requires all the data to be fed in
453        manually.
454    """
455
456    def __init__(self, static_quantities, dynamic_quantities):
457        """Initialise Write_sww with two list af quantity names:
458       
459        static_quantities (e.g. elevation or friction):
460            Stored once at the beginning of the simulation in a 1D array
461            of length number_of_points   
462        dynamic_quantities (e.g stage):
463            Stored every timestep in a 2D array with
464            dimensions number_of_points X number_of_timesteps       
465       
466        """
467        self.static_quantities = static_quantities   
468        self.dynamic_quantities = dynamic_quantities
469
470
471    def store_header(self,
472                     outfile,
473                     times,
474                     number_of_volumes,
475                     number_of_points,
476                     description='Generated by ANUGA',
477                     smoothing=True,
478                     order=1,
479                     sww_precision=netcdf_float32,
480                     verbose=False):
481        """Write an SWW file header.
482
483        Writes the first section of the .sww file.
484
485        outfile - the open file that will be written
486        times - A list of the time slice times OR a start time
487        Note, if a list is given the info will be made relative.
488        number_of_volumes - the number of triangles
489        number_of_points - the number of vertices in the mesh
490        """
491
492        from anuga.abstract_2d_finite_volumes.util \
493            import get_revision_number
494
495        outfile.institution = 'Geoscience Australia'
496        outfile.description = description
497
498        # For sww compatibility
499        if smoothing is True:
500            # Smoothing to be depreciated
501            outfile.smoothing = 'Yes'
502            outfile.vertices_are_stored_uniquely = 'False'
503        else:
504            # Smoothing to be depreciated
505            outfile.smoothing = 'No'
506            outfile.vertices_are_stored_uniquely = 'True'
507        outfile.order = order
508
509        try:
510            revision_number = get_revision_number()
511        except:
512            # This will be triggered if the system cannot get the SVN
513            # revision number.
514            revision_number = None
515        # Allow None to be stored as a string
516        outfile.revision_number = str(revision_number)
517
518        # This is being used to seperate one number from a list.
519        # what it is actually doing is sorting lists from numeric arrays.
520        if isinstance(times, (list, num.ndarray)):
521            number_of_times = len(times)
522            times = ensure_numeric(times)
523            if number_of_times == 0:
524                starttime = 0
525            else:
526                starttime = times[0]
527                times = times - starttime  #Store relative times
528        else:
529            number_of_times = 0
530            starttime = times
531
532
533        outfile.starttime = starttime
534
535        # dimension definitions
536        outfile.createDimension('number_of_volumes', number_of_volumes)
537        outfile.createDimension('number_of_vertices', 3)
538        outfile.createDimension('numbers_in_range', 2)
539
540        if smoothing is True:
541            outfile.createDimension('number_of_points', number_of_points)
542            # FIXME(Ole): This will cause sww files for parallel domains to
543            # have ghost nodes stored (but not used by triangles).
544            # To clean this up, we have to change get_vertex_values and
545            # friends in quantity.py (but I can't be bothered right now)
546        else:
547            outfile.createDimension('number_of_points', 3*number_of_volumes)
548
549        outfile.createDimension('number_of_timesteps', number_of_times)
550
551        # variable definitions
552        outfile.createVariable('x', sww_precision, ('number_of_points',))
553        outfile.createVariable('y', sww_precision, ('number_of_points',))
554
555        outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',
556                                                       'number_of_vertices'))
557
558
559        for q in self.static_quantities:
560           
561            outfile.createVariable(q, sww_precision,
562                                   ('number_of_points',))
563           
564            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
565                                   ('numbers_in_range',))
566                                   
567            # Initialise ranges with small and large sentinels.
568            # If this was in pure Python we could have used None sensibly
569            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
570            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
571
572       
573        self.write_dynamic_quantities(outfile, self.dynamic_quantities, times, \
574                                        precis = sww_precision)
575
576
577
578    def store_triangulation(self,
579                            outfile,
580                            points_utm,
581                            volumes,
582                            zone=None, 
583                            new_origin=None,
584                            points_georeference=None, 
585                            verbose=False):
586        """
587        Store triangulation data in the underlying file.
588       
589        Stores the points and triangle indices in the sww file
590       
591        outfile Open handle to underlying file.
592
593        new_origin georeference that the points can be set to.
594
595        points_georeference The georeference of the points_utm.
596
597        verbose True if this function is to be verbose.
598
599        new_origin - qa georeference that the points can be set to. (Maybe
600        do this before calling this function.)
601
602        points_utm - currently a list or array of the points in UTM.
603        points_georeference - the georeference of the points_utm
604
605        How about passing new_origin and current_origin.
606        If you get both, do a convertion from the old to the new.
607
608        If you only get new_origin, the points are absolute,
609        convert to relative
610
611        if you only get the current_origin the points are relative, store
612        as relative.
613
614        if you get no georefs create a new georef based on the minimums of
615        points_utm.  (Another option would be to default to absolute)
616
617        Yes, and this is done in another part of the code.
618        Probably geospatial.
619
620        If you don't supply either geo_refs, then supply a zone. If not
621        the default zone will be used.
622
623        precon:
624            header has been called.
625        """
626
627        number_of_points = len(points_utm)
628        volumes = num.array(volumes)
629        points_utm = num.array(points_utm)
630
631        # Given the two geo_refs and the points, do the stuff
632        # described in the method header
633        # if this is needed else where, pull out as a function
634        points_georeference = ensure_geo_reference(points_georeference)
635        new_origin = ensure_geo_reference(new_origin)
636        if new_origin is None and points_georeference is not None:
637            points = points_utm
638            geo_ref = points_georeference
639        else:
640            if new_origin is None:
641                new_origin = Geo_reference(zone, min(points_utm[:,0]),
642                                                 min(points_utm[:,1]))
643            points = new_origin.change_points_geo_ref(points_utm,
644                                                      points_georeference)
645            geo_ref = new_origin
646
647        # At this stage I need a georef and points
648        # the points are relative to the georef
649        geo_ref.write_NetCDF(outfile)
650
651        # This will put the geo ref in the middle
652        #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
653
654        x =  points[:,0]
655        y =  points[:,1]
656
657        if verbose:
658            log.critical('------------------------------------------------')
659            log.critical('More Statistics:')
660            log.critical('  Extent (/lon):')
661            log.critical('    x in [%f, %f], len(lat) == %d'
662                         % (min(x), max(x), len(x)))
663            log.critical('    y in [%f, %f], len(lon) == %d'
664                         % (min(y), max(y), len(y)))
665            #log.critical('    z in [%f, %f], len(z) == %d'
666            #             % (min(elevation), max(elevation), len(elevation)))
667            log.critical('geo_ref: %s' % str(geo_ref))
668            log.critical('------------------------------------------------')
669
670        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
671        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
672        outfile.variables['volumes'][:] = volumes.astype(num.int32) #On Opteron 64
673
674
675    def store_static_quantities(self, 
676                                outfile, 
677                                sww_precision=num.float32,
678                                verbose=False, 
679                                **quant):
680        """
681        Write the static quantity info.
682
683        **quant is extra keyword arguments passed in. These must be
684          the numpy arrays to be stored in the sww file at each timestep.
685
686        The argument sww_precision allows for storing as either
687        * single precision (default): num.float32
688        * double precision: num.float64 or num.float
689
690        Precondition:
691            store_triangulation and
692            store_header have been called.
693        """
694
695        # The dictionary quant must contain numpy arrays for each name.
696        # These will typically be quantities from Domain such as friction
697        #
698        # Arrays not listed in static_quantitiues will be ignored, silently.
699        #
700        # This method will also write the ranges for each quantity,
701        # e.g. stage_range, xmomentum_range and ymomentum_range
702        for q in self.static_quantities:
703            if not quant.has_key(q):
704                msg = 'Values for quantity %s was not specified in ' % q
705                msg += 'store_quantities so they cannot be stored.'
706                raise NewQuantity, msg
707            else:
708                q_values = ensure_numeric(quant[q])
709               
710                x = q_values.astype(sww_precision)
711                outfile.variables[q][:] = x
712       
713                # This populates the _range values
714                outfile.variables[q + Write_sww.RANGE][0] = num.min(x)
715                outfile.variables[q + Write_sww.RANGE][1] = num.max(x)
716                   
717        # FIXME: Hack for backwards compatibility with old viewer
718        #if 'elevation' in self.static_quantities:
719        #    outfile.variables['z'][:] = outfile.variables['elevation'][:]
720
721                   
722                   
723       
724       
725    def store_quantities(self, 
726                         outfile, 
727                         sww_precision=num.float32,
728                         slice_index=None,
729                         time=None,
730                         verbose=False, 
731                         **quant):
732        """
733        Write the quantity info at each timestep.
734
735        **quant is extra keyword arguments passed in. These must be
736          the numpy arrays to be stored in the sww file at each timestep.
737
738        if the time array is already been built, use the slice_index
739        to specify the index.
740
741        Otherwise, use time to increase the time dimension
742
743        Maybe make this general, but the viewer assumes these quantities,
744        so maybe we don't want it general - unless the viewer is general
745       
746        The argument sww_precision allows for storing as either
747        * single precision (default): num.float32
748        * double precision: num.float64 or num.float
749
750        Precondition:
751            store_triangulation and
752            store_header have been called.
753        """
754
755        if time is not None:
756            file_time = outfile.variables['time']
757            slice_index = len(file_time)
758            file_time[slice_index] = time
759        else:
760            slice_index = int(slice_index) # Has to be cast in case it was numpy.int   
761
762        # Write the named dynamic quantities
763        # The dictionary quant must contain numpy arrays for each name.
764        # These will typically be the conserved quantities from Domain
765        # (Typically stage,  xmomentum, ymomentum).
766        #
767        # Arrays not listed in dynamic_quantitiues will be ignored, silently.
768        #
769        # This method will also write the ranges for each quantity,
770        # e.g. stage_range, xmomentum_range and ymomentum_range
771        for q in self.dynamic_quantities:
772            if not quant.has_key(q):
773                msg = 'Values for quantity %s was not specified in ' % q
774                msg += 'store_quantities so they cannot be stored.'
775                raise NewQuantity, msg
776            else:
777                q_values = ensure_numeric(quant[q])
778               
779                q_retyped = q_values.astype(sww_precision)
780                outfile.variables[q][slice_index] = q_retyped
781                   
782                # This updates the _range values
783                q_range = outfile.variables[q + Write_sww.RANGE][:]
784                q_values_min = num.min(q_values)
785                if q_values_min < q_range[0]:
786                    outfile.variables[q + Write_sww.RANGE][0] = q_values_min
787                q_values_max = num.max(q_values)
788                if q_values_max > q_range[1]:
789                    outfile.variables[q + Write_sww.RANGE][1] = q_values_max
790
791    def verbose_quantities(self, outfile):
792        log.critical('------------------------------------------------')
793        log.critical('More Statistics:')
794        for q in self.dynamic_quantities:
795            log.critical(%s in [%f, %f]'
796                         % (q, outfile.variables[q+Write_sww.RANGE][0],
797                            outfile.variables[q+Write_sww.RANGE][1]))
798        log.critical('------------------------------------------------')
799
800
801
802
803def extent_sww(file_name):
804    """Read in an sww file, then get its extents
805
806    Input:
807    file_name - the sww file
808
809    Output:
810    A list: [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
811    """
812
813    from Scientific.IO.NetCDF import NetCDFFile
814
815    #Get NetCDF
816    fid = NetCDFFile(file_name, netcdf_mode_r)
817
818    # Get the variables
819    x = fid.variables['x'][:]
820    y = fid.variables['y'][:]
821    stage = fid.variables['stage'][:]
822
823    fid.close()
824
825    return [min(x), max(x), min(y), max(y), num.min(stage), num.max(stage)]
826
827
828def load_sww_as_domain(filename, boundary=None, t=None,
829               fail_if_NaN=True, NaN_filler=0,
830               verbose=False, very_verbose=False):
831    """
832    Load an sww file into a domain.
833
834    Usage: domain = load_sww_as_domain('file.sww',
835                        t=time (default = last time in file))
836
837    Boundary is not recommended if domain.smooth is not selected, as it
838    uses unique coordinates, but not unique boundaries. This means that
839    the boundary file will not be compatable with the coordinates, and will
840    give a different final boundary, or crash.
841    """
842   
843    from Scientific.IO.NetCDF import NetCDFFile
844    from anuga.shallow_water.shallow_water_domain import Domain
845
846    # initialise NaN.
847    NaN = 9.969209968386869e+036
848
849    if verbose: log.critical('Reading from %s' % filename)
850
851    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
852    time = fid.variables['time']       # Timesteps
853    if t is None:
854        t = time[-1]
855    time_interp = get_time_interp(time,t)
856
857    # Get the variables as numeric arrays
858    x = fid.variables['x'][:]                   # x-coordinates of vertices
859    y = fid.variables['y'][:]                   # y-coordinates of vertices
860    elevation = fid.variables['elevation']      # Elevation
861    stage = fid.variables['stage']              # Water level
862    xmomentum = fid.variables['xmomentum']      # Momentum in the x-direction
863    ymomentum = fid.variables['ymomentum']      # Momentum in the y-direction
864
865    starttime = fid.starttime[0]
866    volumes = fid.variables['volumes'][:]       # Connectivity
867    coordinates = num.transpose(num.asarray([x.tolist(), y.tolist()]))
868    # FIXME (Ole): Something like this might be better:
869    #                 concatenate((x, y), axis=1)
870    # or              concatenate((x[:,num.newaxis], x[:,num.newaxis]), axis=1)
871
872    conserved_quantities = []
873    interpolated_quantities = {}
874    other_quantities = []
875
876    # get geo_reference
877    try:                             # sww files don't have to have a geo_ref
878        geo_reference = Geo_reference(NetCDFObject=fid)
879    except: # AttributeError, e:
880        geo_reference = None
881
882    if verbose: log.critical('    getting quantities')
883
884    for quantity in fid.variables.keys():
885        dimensions = fid.variables[quantity].dimensions
886        if 'number_of_timesteps' in dimensions:
887            conserved_quantities.append(quantity)
888            interpolated_quantities[quantity] = \
889                  interpolated_quantity(fid.variables[quantity][:], time_interp)
890        else:
891            other_quantities.append(quantity)
892
893    other_quantities.remove('x')
894    other_quantities.remove('y')
895    #other_quantities.remove('z')
896    other_quantities.remove('volumes')
897    try:
898        other_quantities.remove('stage_range')
899        other_quantities.remove('xmomentum_range')
900        other_quantities.remove('ymomentum_range')
901        other_quantities.remove('elevation_range')
902    except:
903        pass
904
905    conserved_quantities.remove('time')
906
907    if verbose: log.critical('    building domain')
908
909    #    From domain.Domain:
910    #    domain = Domain(coordinates, volumes,\
911    #                    conserved_quantities = conserved_quantities,\
912    #                    other_quantities = other_quantities,zone=zone,\
913    #                    xllcorner=xllcorner, yllcorner=yllcorner)
914
915    # From shallow_water.Domain:
916    coordinates = coordinates.tolist()
917    volumes = volumes.tolist()
918    # FIXME:should this be in mesh? (peter row)
919    if fid.smoothing == 'Yes':
920        unique = False
921    else:
922        unique = True
923    if unique:
924        coordinates, volumes, boundary = weed(coordinates, volumes,boundary)
925
926     
927   
928    try:
929        domain = Domain(coordinates, volumes, boundary, starttime=(float(starttime) + float(t)))
930    except AssertionError, e:
931        fid.close()
932        msg = 'Domain could not be created: %s. ' \
933              'Perhaps use "fail_if_NaN=False and NaN_filler = ..."' % e
934        raise DataDomainError, msg
935
936    if not boundary is None:
937        domain.boundary = boundary
938
939    domain.geo_reference = geo_reference
940
941    for quantity in other_quantities:
942        try:
943            NaN = fid.variables[quantity].missing_value
944        except:
945            pass                       # quantity has no missing_value number
946        X = fid.variables[quantity][:]
947        if very_verbose:
948            log.critical('       %s' % str(quantity))
949            log.critical('        NaN = %s' % str(NaN))
950            log.critical('        max(X)')
951            log.critical('       %s' % str(max(X)))
952            log.critical('        max(X)==NaN')
953            log.critical('       %s' % str(max(X)==NaN))
954            log.critical('')
955        if max(X) == NaN or min(X) == NaN:
956            if fail_if_NaN:
957                msg = 'quantity "%s" contains no_data entry' % quantity
958                raise DataMissingValuesError, msg
959            else:
960                data = (X != NaN)
961                X = (X*data) + (data==0)*NaN_filler
962        if unique:
963            X = num.resize(X, (len(X)/3, 3))
964        domain.set_quantity(quantity, X)
965    #
966    for quantity in conserved_quantities:
967        try:
968            NaN = fid.variables[quantity].missing_value
969        except:
970            pass                       # quantity has no missing_value number
971        X = interpolated_quantities[quantity]
972        if very_verbose:
973            log.critical('       %s' % str(quantity))
974            log.critical('        NaN = %s' % str(NaN))
975            log.critical('        max(X)')
976            log.critical('       %s' % str(max(X)))
977            log.critical('        max(X)==NaN')
978            log.critical('       %s' % str(max(X)==NaN))
979            log.critical('')
980        if max(X) == NaN or min(X) == NaN:
981            if fail_if_NaN:
982                msg = 'quantity "%s" contains no_data entry' % quantity
983                raise DataMissingValuesError, msg
984            else:
985                data = (X != NaN)
986                X = (X*data) + (data==0)*NaN_filler
987        if unique:
988            X = num.resize(X, (X.shape[0]/3, 3))
989        domain.set_quantity(quantity, X)
990
991    fid.close()
992
993    return domain
994
995
996def get_mesh_and_quantities_from_file(filename,
997                                      quantities=None,
998                                      verbose=False):
999    """Get and rebuild mesh structure and associated quantities from sww file
1000
1001    Input:
1002        filename - Name os sww file
1003        quantities - Names of quantities to load
1004
1005    Output:
1006        mesh - instance of class Interpolate
1007               (including mesh and interpolation functionality)
1008        quantities - arrays with quantity values at each mesh node
1009        time - vector of stored timesteps
1010
1011    This function is used by e.g.:
1012        get_interpolated_quantities_at_polyline_midpoints
1013    """
1014
1015    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
1016
1017    import types
1018    from Scientific.IO.NetCDF import NetCDFFile
1019    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
1020
1021    if verbose: log.critical('Reading from %s' % filename)
1022
1023    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1024    time = fid.variables['time'][:]    # Time vector
1025    time += fid.starttime[0]
1026
1027    # Get the variables as numeric arrays
1028    x = fid.variables['x'][:]                   # x-coordinates of nodes
1029    y = fid.variables['y'][:]                   # y-coordinates of nodes
1030    elevation = fid.variables['elevation'][:]   # Elevation
1031    stage = fid.variables['stage'][:]           # Water level
1032    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
1033    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
1034
1035    # Mesh (nodes (Mx2), triangles (Nx3))
1036    nodes = num.concatenate((x[:,num.newaxis], y[:,num.newaxis]), axis=1)
1037    triangles = fid.variables['volumes'][:]
1038
1039    # Get geo_reference
1040    try:
1041        geo_reference = Geo_reference(NetCDFObject=fid)
1042    except: #AttributeError, e:
1043        # Sww files don't have to have a geo_ref
1044        geo_reference = None
1045
1046    if verbose: log.critical('    building mesh from sww file %s' % filename)
1047
1048    boundary = None
1049
1050    #FIXME (Peter Row): Should this be in mesh?
1051    if fid.smoothing != 'Yes':
1052        nodes = nodes.tolist()
1053        triangles = triangles.tolist()
1054        nodes, triangles, boundary = weed(nodes, triangles, boundary)
1055
1056    try:
1057        mesh = Mesh(nodes, triangles, boundary, geo_reference=geo_reference)
1058    except AssertionError, e:
1059        fid.close()
1060        msg = 'Domain could not be created: %s. "' % e
1061        raise DataDomainError, msg
1062
1063    quantities = {}
1064    quantities['elevation'] = elevation
1065    quantities['stage'] = stage
1066    quantities['xmomentum'] = xmomentum
1067    quantities['ymomentum'] = ymomentum
1068
1069    fid.close()
1070
1071    return mesh, quantities, time
1072
1073
1074def get_time_interp(time, t=None):
1075    """Finds the ratio and index for time interpolation.
1076        time is an array of time steps
1077        t is the sample time.
1078        returns a tuple containing index into time, and ratio
1079    """
1080    if t is None:
1081        t=time[-1]
1082        index = -1
1083        ratio = 0.
1084    else:
1085        T = time
1086        tau = t
1087        index=0
1088        msg = 'Time interval derived from file %s [%s:%s]' \
1089              % ('FIXMEfilename', T[0], T[-1])
1090        msg += ' does not match model time: %s' % tau
1091        if tau < time[0]: raise DataTimeError, msg
1092        if tau > time[-1]: raise DataTimeError, msg
1093        while tau > time[index]: index += 1
1094        while tau < time[index]: index -= 1
1095        if tau == time[index]:
1096            #Protect against case where tau == time[-1] (last time)
1097            # - also works in general when tau == time[i]
1098            ratio = 0
1099        else:
1100            #t is now between index and index+1
1101            ratio = (tau - time[index])/(time[index+1] - time[index])
1102
1103    return (index, ratio)
1104
1105
1106
1107def interpolated_quantity(saved_quantity, time_interp):
1108    """Interpolate a quantity with respect to time.
1109
1110    saved_quantity  the quantity to interpolate
1111    time_interp     (index, ratio)
1112
1113    Returns a vector of interpolated values.
1114    """
1115
1116    index, ratio = time_interp
1117
1118    Q = saved_quantity
1119
1120    if ratio > 0:
1121        q = (1-ratio)*Q[index] + ratio*Q[index+1]
1122    else:
1123        q = Q[index]
1124
1125    #Return vector of interpolated values
1126    return q
1127
1128
1129def weed(coordinates, volumes, boundary=None):
1130    """ Excise all duplicate points.
1131    """
1132    if isinstance(coordinates, num.ndarray):
1133        coordinates = coordinates.tolist()
1134    if isinstance(volumes, num.ndarray):
1135        volumes = volumes.tolist()
1136
1137    unique = False
1138    point_dict = {}
1139    same_point = {}
1140    for i in range(len(coordinates)):
1141        point = tuple(coordinates[i])
1142        if point_dict.has_key(point):
1143            unique = True
1144            same_point[i] = point
1145            #to change all point i references to point j
1146        else:
1147            point_dict[point] = i
1148            same_point[i] = point
1149
1150    coordinates = []
1151    i = 0
1152    for point in point_dict.keys():
1153        point = tuple(point)
1154        coordinates.append(list(point))
1155        point_dict[point] = i
1156        i += 1
1157
1158    for volume in volumes:
1159        for i in range(len(volume)):
1160            index = volume[i]
1161            if index > -1:
1162                volume[i] = point_dict[same_point[index]]
1163
1164    new_boundary = {}
1165    if not boundary is None:
1166        for segment in boundary.keys():
1167            point0 = point_dict[same_point[segment[0]]]
1168            point1 = point_dict[same_point[segment[1]]]
1169            label = boundary[segment]
1170            #FIXME should the bounday attributes be concaterated
1171            #('exterior, pond') or replaced ('pond')(peter row)
1172
1173            if new_boundary.has_key((point0, point1)):
1174                new_boundary[(point0,point1)] = new_boundary[(point0, point1)]
1175
1176            elif new_boundary.has_key((point1, point0)):
1177                new_boundary[(point1,point0)] = new_boundary[(point1, point0)]
1178            else: new_boundary[(point0, point1)] = label
1179
1180        boundary = new_boundary
1181
1182    return coordinates, volumes, boundary
1183
1184
1185
Note: See TracBrowser for help on using the repository browser.