source: trunk/anuga_core/source/anuga/file/sww.py @ 7861

Last change on this file since 7861 was 7861, checked in by hudson, 12 years ago

Fixed AABB parameter passing bug.

File size: 43.1 KB
Line 
1""" Classes to read an SWW file.
2"""
3
4from anuga.coordinate_transforms.geo_reference import Geo_reference
5from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
6from anuga.config import netcdf_float, netcdf_float32, netcdf_int
7from anuga.config import max_float
8from anuga.utilities.numerical_tools import ensure_numeric
9import anuga.utilities.log as log
10from Scientific.IO.NetCDF import NetCDFFile
11
12from sts import Write_sts
13
14from anuga.coordinate_transforms.geo_reference import \
15        ensure_geo_reference
16
17from file_utils import create_filename
18import numpy as num
19
20##
21# @brief Generic class for storing output to e.g. visualisation or checkpointing
22class Data_format:
23    """Generic interface to data formats
24    """
25
26    ##
27    # @brief Instantiate this instance.
28    # @param domain
29    # @param extension
30    # @param mode The mode of the underlying file.
31    def __init__(self, domain, extension, mode=netcdf_mode_w):
32        assert mode[0] in ['r', 'w', 'a'], \
33               "Mode %s must be either:\n" % mode + \
34               "   'w' (write)\n" + \
35               "   'r' (read)\n" + \
36               "   'a' (append)"
37
38        #Create filename
39        self.filename = create_filename(domain.get_datadir(),
40                                        domain.get_name(), extension)
41
42        self.timestep = 0
43        self.domain = domain
44
45        # Exclude ghosts in case this is a parallel domain
46        self.number_of_nodes = domain.number_of_full_nodes
47        self.number_of_volumes = domain.number_of_full_triangles
48        #self.number_of_volumes = len(domain)
49
50        #FIXME: Should we have a general set_precision function?
51
52
53class SWW_file(Data_format):
54    """Interface to native NetCDF format (.sww) for storing model output
55
56    There are two kinds of data
57
58    1: Constant data: Vertex coordinates and field values. Stored once
59    2: Variable data: Conserved quantities. Stored once per timestep.
60
61    All data is assumed to reside at vertex locations.
62    """
63
64    ##
65    # @brief Instantiate this instance.
66    # @param domain ??
67    # @param mode Mode of the underlying data file.
68    # @param max_size ??
69    # @param recursion ??
70    # @note Prepare the underlying data file if mode starts with 'w'.
71    def __init__(self, domain, 
72                 mode=netcdf_mode_w, max_size=2000000000, recursion=False):
73        from Scientific.IO.NetCDF import NetCDFFile
74
75        self.precision = netcdf_float32 # Use single precision for quantities
76        self.recursion = recursion
77        self.mode = mode
78        if hasattr(domain, 'max_size'):
79            self.max_size = domain.max_size # File size max is 2Gig
80        else:
81            self.max_size = max_size
82        if hasattr(domain, 'minimum_storable_height'):
83            self.minimum_storable_height = domain.minimum_storable_height
84        else:
85            self.minimum_storable_height = default_minimum_storable_height
86
87        # Call parent constructor
88        Data_format.__init__(self, domain, 'sww', mode)
89
90        # Get static and dynamic quantities from domain
91        static_quantities = []
92        dynamic_quantities = []
93       
94        for q in domain.quantities_to_be_stored:
95            flag = domain.quantities_to_be_stored[q]
96       
97            msg = 'Quantity %s is requested to be stored ' % q
98            msg += 'but it does not exist in domain.quantities'
99            assert q in domain.quantities, msg
100       
101            assert flag in [1,2]
102            if flag == 1: static_quantities.append(q)
103            if flag == 2: dynamic_quantities.append(q)               
104                       
105       
106        # NetCDF file definition
107        fid = NetCDFFile(self.filename, mode)
108        if mode[0] == 'w':
109            description = 'Output from anuga.abstract_2d_finite_volumes ' \
110                          'suitable for plotting'
111                         
112            self.writer = Write_sww(static_quantities, dynamic_quantities)
113            self.writer.store_header(fid,
114                                     domain.starttime,
115                                     self.number_of_volumes,
116                                     self.domain.number_of_full_nodes,
117                                     description=description,
118                                     smoothing=domain.smooth,
119                                     order=domain.default_order,
120                                     sww_precision=self.precision)
121
122            # Extra optional information
123            if hasattr(domain, 'texture'):
124                fid.texture = domain.texture
125
126            if domain.quantities_to_be_monitored is not None:
127                fid.createDimension('singleton', 1)
128                fid.createDimension('two', 2)
129
130                poly = domain.monitor_polygon
131                if poly is not None:
132                    N = len(poly)
133                    fid.createDimension('polygon_length', N)
134                    fid.createVariable('extrema.polygon',
135                                       self.precision,
136                                       ('polygon_length', 'two'))
137                    fid.variables['extrema.polygon'][:] = poly
138
139                interval = domain.monitor_time_interval
140                if interval is not None:
141                    fid.createVariable('extrema.time_interval',
142                                       self.precision,
143                                       ('two',))
144                    fid.variables['extrema.time_interval'][:] = interval
145
146                for q in domain.quantities_to_be_monitored:
147                    fid.createVariable(q + '.extrema', self.precision,
148                                       ('numbers_in_range',))
149                    fid.createVariable(q + '.min_location', self.precision,
150                                       ('numbers_in_range',))
151                    fid.createVariable(q + '.max_location', self.precision,
152                                       ('numbers_in_range',))
153                    fid.createVariable(q + '.min_time', self.precision,
154                                       ('singleton',))
155                    fid.createVariable(q + '.max_time', self.precision,
156                                       ('singleton',))
157
158        fid.close()
159
160    ##
161    # @brief Store connectivity data into the underlying data file.
162    def store_connectivity(self):
163        """Store information about nodes, triangles and static quantities
164
165        Writes x,y coordinates of triangles and their connectivity.
166       
167        Store also any quantity that has been identified as static.
168        """
169
170        # FIXME: Change name to reflect the fact thta this function
171        # stores both connectivity (triangulation) and static quantities
172       
173        from Scientific.IO.NetCDF import NetCDFFile
174
175        domain = self.domain
176
177        # append to the NetCDF file
178        fid = NetCDFFile(self.filename, netcdf_mode_a)
179
180        # Get X, Y from one (any) of the quantities
181        Q = domain.quantities.values()[0]
182        X,Y,_,V = Q.get_vertex_values(xy=True, precision=self.precision)
183
184        # store the connectivity data
185        points = num.concatenate((X[:,num.newaxis],Y[:,num.newaxis]), axis=1)
186        self.writer.store_triangulation(fid,
187                                        points,
188                                        V.astype(num.float32),
189                                        points_georeference=\
190                                            domain.geo_reference)
191
192
193        # Get names of static quantities
194        static_quantities = {}
195        for name in self.writer.static_quantities:
196            Q = domain.quantities[name]
197            A, _ = Q.get_vertex_values(xy=False, 
198                                       precision=self.precision)
199            static_quantities[name] = A
200       
201        # Store static quantities       
202        self.writer.store_static_quantities(fid, **static_quantities)
203                                           
204        fid.close()
205
206    ##
207    # @brief Store time and time dependent quantities
208    # to the underlying data file.
209    def store_timestep(self):
210        """Store time and time dependent quantities
211        """
212
213        from Scientific.IO.NetCDF import NetCDFFile
214        import types
215        from time import sleep
216        from os import stat
217
218        # Get NetCDF
219        retries = 0
220        file_open = False
221        while not file_open and retries < 10:
222            try:
223                # Open existing file
224                fid = NetCDFFile(self.filename, netcdf_mode_a)
225            except IOError:
226                # This could happen if someone was reading the file.
227                # In that case, wait a while and try again
228                msg = 'Warning (store_timestep): File %s could not be opened' \
229                      % self.filename
230                msg += ' - trying step %s again' % self.domain.time
231                log.critical(msg)
232                retries += 1
233                sleep(1)
234            else:
235                file_open = True
236
237        if not file_open:
238            msg = 'File %s could not be opened for append' % self.filename
239            raise DataFileNotOpenError, msg
240
241        # Check to see if the file is already too big:
242        time = fid.variables['time']
243        i = len(time) + 1
244        file_size = stat(self.filename)[6]
245        file_size_increase = file_size / i
246        if file_size + file_size_increase > self.max_size * 2**self.recursion:
247            # In order to get the file name and start time correct,
248            # I change the domain.filename and domain.starttime.
249            # This is the only way to do this without changing
250            # other modules (I think).
251
252            # Write a filename addon that won't break the anuga viewers
253            # (10.sww is bad)
254            filename_ext = '_time_%s' % self.domain.time
255            filename_ext = filename_ext.replace('.', '_')
256
257            # Remember the old filename, then give domain a
258            # name with the extension
259            old_domain_filename = self.domain.get_name()
260            if not self.recursion:
261                self.domain.set_name(old_domain_filename + filename_ext)
262
263            # Temporarily change the domain starttime to the current time
264            old_domain_starttime = self.domain.starttime
265            self.domain.starttime = self.domain.get_time()
266
267            # Build a new data_structure.
268            next_data_structure = SWW_file(self.domain, mode=self.mode,
269                                           max_size=self.max_size,
270                                           recursion=self.recursion+1)
271            if not self.recursion:
272                log.critical('    file_size = %s' % file_size)
273                log.critical('    saving file to %s'
274                             % next_data_structure.filename) 
275
276            # Set up the new data_structure
277            self.domain.writer = next_data_structure
278
279            # Store connectivity and first timestep
280            next_data_structure.store_connectivity()
281            next_data_structure.store_timestep()
282            fid.sync()
283            fid.close()
284
285            # Restore the old starttime and filename
286            self.domain.starttime = old_domain_starttime
287            self.domain.set_name(old_domain_filename)
288        else:
289            self.recursion = False
290            domain = self.domain
291
292            # Get the variables
293            time = fid.variables['time']
294            i = len(time)
295             
296            if 'stage' in self.writer.dynamic_quantities:           
297                # Select only those values for stage,
298                # xmomentum and ymomentum (if stored) where
299                # depth exceeds minimum_storable_height
300                #
301                # In this branch it is assumed that elevation
302                # is also available as a quantity           
303           
304                Q = domain.quantities['stage']
305                w, _ = Q.get_vertex_values(xy=False)
306               
307                Q = domain.quantities['elevation']
308                z, _ = Q.get_vertex_values(xy=False)               
309               
310                storable_indices = (w-z >= self.minimum_storable_height)
311            else:
312                # Very unlikely branch
313                storable_indices = None # This means take all
314           
315           
316            # Now store dynamic quantities
317            dynamic_quantities = {}
318            for name in self.writer.dynamic_quantities:
319                netcdf_array = fid.variables[name]
320               
321                Q = domain.quantities[name]
322                A, _ = Q.get_vertex_values(xy=False,
323                                           precision=self.precision)
324
325                if storable_indices is not None:
326                    if name == 'stage':
327                        A = num.choose(storable_indices, (z, A))
328
329                    if name in ['xmomentum', 'ymomentum']:
330                        # Get xmomentum where depth exceeds
331                        # minimum_storable_height
332                       
333                        # Define a zero vector of same size and type as A
334                        # for use with momenta
335                        null = num.zeros(num.size(A), A.dtype.char)
336                        A = num.choose(storable_indices, (null, A))
337               
338                dynamic_quantities[name] = A
339               
340                                       
341            # Store dynamic quantities
342            self.writer.store_quantities(fid,
343                                         time=self.domain.time,
344                                         sww_precision=self.precision,
345                                         **dynamic_quantities)
346
347
348            # Update extrema if requested
349            domain = self.domain
350            if domain.quantities_to_be_monitored is not None:
351                for q, info in domain.quantities_to_be_monitored.items():
352                    if info['min'] is not None:
353                        fid.variables[q + '.extrema'][0] = info['min']
354                        fid.variables[q + '.min_location'][:] = \
355                                        info['min_location']
356                        fid.variables[q + '.min_time'][0] = info['min_time']
357
358                    if info['max'] is not None:
359                        fid.variables[q + '.extrema'][1] = info['max']
360                        fid.variables[q + '.max_location'][:] = \
361                                        info['max_location']
362                        fid.variables[q + '.max_time'][0] = info['max_time']
363
364            # Flush and close
365            fid.sync()
366            fid.close()
367
368
369##
370# @brief Class to open an sww file so that domain can be populated with quantity values
371class Read_sww:
372
373    def __init__(self, source):
374        """The source parameter is assumed to be a NetCDF sww file.
375        """
376
377        self.source = source
378
379        self.frame_number = 0
380
381        fin = NetCDFFile(self.source, 'r')
382
383        self.time = num.array(fin.variables['time'], num.float)
384        self.last_frame_number = self.time.shape[0] - 1
385
386        self.frames = num.arange(self.last_frame_number+1)
387
388        fin.close()
389       
390        self.read_mesh()
391
392        self.quantities = {}
393
394        self.read_quantities()
395
396
397    def read_mesh(self):
398        """ Read and store the mesh data contained within this sww file.
399        """
400        fin = NetCDFFile(self.source, 'r')
401
402        self.vertices = num.array(fin.variables['volumes'], num.int)
403       
404        self.x = x = num.array(fin.variables['x'], num.float)
405        self.y = y = num.array(fin.variables['y'], num.float)
406
407        assert len(self.x) == len(self.y)
408       
409        self.xmin = num.min(x)
410        self.xmax = num.max(x)
411        self.ymin = num.min(y)
412        self.ymax = num.max(y)
413
414
415        fin.close()
416       
417    def read_quantities(self, frame_number=0):
418        """
419        Read the quantities contained in this file.
420        frame_number is the time index to load.
421        """
422        assert frame_number >= 0 and frame_number <= self.last_frame_number
423
424        self.frame_number = frame_number
425
426        M = len(self.x)/3
427       
428        fin = NetCDFFile(self.source, 'r')
429       
430        for q in filter(lambda n:n != 'x' and n != 'y' and n != 'time' and n != 'volumes' and \
431                        '_range' not in n, \
432                        fin.variables.keys()):
433            if len(fin.variables[q].shape) == 1: # Not a time-varying quantity
434                self.quantities[q] = num.ravel(num.array(fin.variables[q], num.float)).reshape(M,3)
435            else: # Time-varying, get the current timestep data
436                self.quantities[q] = num.array(fin.variables[q][self.frame_number], num.float).reshape(M,3)
437        fin.close()
438        return self.quantities
439
440    def get_bounds(self):
441        """
442            Get the bounding rect around the mesh.
443        """
444        return [self.xmin, self.xmax, self.ymin, self.ymax]
445
446    def get_last_frame_number(self):
447        """
448            Return the last loaded frame index.
449        """
450        return self.last_frame_number
451
452    def get_time(self):
453        """
454            Get time at the current frame num, in secs.
455        """
456        return self.time[self.frame_number]
457
458
459class Write_sww(Write_sts):
460    """
461        A class to write an SWW file.
462       
463        It is domain agnostic, and requires all the data to be fed in
464        manually.
465    """
466
467    def __init__(self, static_quantities, dynamic_quantities):
468        """Initialise Write_sww with two list af quantity names:
469       
470        static_quantities (e.g. elevation or friction):
471            Stored once at the beginning of the simulation in a 1D array
472            of length number_of_points   
473        dynamic_quantities (e.g stage):
474            Stored every timestep in a 2D array with
475            dimensions number_of_points X number_of_timesteps       
476       
477        """
478        self.static_quantities = static_quantities   
479        self.dynamic_quantities = dynamic_quantities
480
481
482    def store_header(self,
483                     outfile,
484                     times,
485                     number_of_volumes,
486                     number_of_points,
487                     description='Generated by ANUGA',
488                     smoothing=True,
489                     order=1,
490                     sww_precision=netcdf_float32,
491                     verbose=False):
492        """Write an SWW file header.
493
494        Writes the first section of the .sww file.
495
496        outfile - the open file that will be written
497        times - A list of the time slice times OR a start time
498        Note, if a list is given the info will be made relative.
499        number_of_volumes - the number of triangles
500        number_of_points - the number of vertices in the mesh
501        """
502
503        from anuga.abstract_2d_finite_volumes.util \
504            import get_revision_number
505
506        outfile.institution = 'Geoscience Australia'
507        outfile.description = description
508
509        # For sww compatibility
510        if smoothing is True:
511            # Smoothing to be depreciated
512            outfile.smoothing = 'Yes'
513            outfile.vertices_are_stored_uniquely = 'False'
514        else:
515            # Smoothing to be depreciated
516            outfile.smoothing = 'No'
517            outfile.vertices_are_stored_uniquely = 'True'
518        outfile.order = order
519
520        try:
521            revision_number = get_revision_number()
522        except:
523            # This will be triggered if the system cannot get the SVN
524            # revision number.
525            revision_number = None
526        # Allow None to be stored as a string
527        outfile.revision_number = str(revision_number)
528
529        # This is being used to seperate one number from a list.
530        # what it is actually doing is sorting lists from numeric arrays.
531        if isinstance(times, (list, num.ndarray)):
532            number_of_times = len(times)
533            times = ensure_numeric(times)
534            if number_of_times == 0:
535                starttime = 0
536            else:
537                starttime = times[0]
538                times = times - starttime  #Store relative times
539        else:
540            number_of_times = 0
541            starttime = times
542
543
544        outfile.starttime = starttime
545
546        # dimension definitions
547        outfile.createDimension('number_of_volumes', number_of_volumes)
548        outfile.createDimension('number_of_vertices', 3)
549        outfile.createDimension('numbers_in_range', 2)
550
551        if smoothing is True:
552            outfile.createDimension('number_of_points', number_of_points)
553            # FIXME(Ole): This will cause sww files for parallel domains to
554            # have ghost nodes stored (but not used by triangles).
555            # To clean this up, we have to change get_vertex_values and
556            # friends in quantity.py (but I can't be bothered right now)
557        else:
558            outfile.createDimension('number_of_points', 3*number_of_volumes)
559
560        outfile.createDimension('number_of_timesteps', number_of_times)
561
562        # variable definitions
563        outfile.createVariable('x', sww_precision, ('number_of_points',))
564        outfile.createVariable('y', sww_precision, ('number_of_points',))
565
566        outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',
567                                                       'number_of_vertices'))
568
569
570        for q in self.static_quantities:
571           
572            outfile.createVariable(q, sww_precision,
573                                   ('number_of_points',))
574           
575            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
576                                   ('numbers_in_range',))
577                                   
578            # Initialise ranges with small and large sentinels.
579            # If this was in pure Python we could have used None sensibly
580            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
581            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
582
583       
584        self.write_dynamic_quantities(outfile, self.dynamic_quantities, times)
585
586
587
588    def store_triangulation(self,
589                            outfile,
590                            points_utm,
591                            volumes,
592                            zone=None, 
593                            new_origin=None,
594                            points_georeference=None, 
595                            verbose=False):
596        """
597        Store triangulation data in the underlying file.
598       
599        Stores the points and triangle indices in the sww file
600       
601        outfile Open handle to underlying file.
602
603        new_origin georeference that the points can be set to.
604
605        points_georeference The georeference of the points_utm.
606
607        verbose True if this function is to be verbose.
608
609        new_origin - qa georeference that the points can be set to. (Maybe
610        do this before calling this function.)
611
612        points_utm - currently a list or array of the points in UTM.
613        points_georeference - the georeference of the points_utm
614
615        How about passing new_origin and current_origin.
616        If you get both, do a convertion from the old to the new.
617
618        If you only get new_origin, the points are absolute,
619        convert to relative
620
621        if you only get the current_origin the points are relative, store
622        as relative.
623
624        if you get no georefs create a new georef based on the minimums of
625        points_utm.  (Another option would be to default to absolute)
626
627        Yes, and this is done in another part of the code.
628        Probably geospatial.
629
630        If you don't supply either geo_refs, then supply a zone. If not
631        the default zone will be used.
632
633        precon:
634            header has been called.
635        """
636
637        number_of_points = len(points_utm)
638        volumes = num.array(volumes)
639        points_utm = num.array(points_utm)
640
641        # Given the two geo_refs and the points, do the stuff
642        # described in the method header
643        # if this is needed else where, pull out as a function
644        points_georeference = ensure_geo_reference(points_georeference)
645        new_origin = ensure_geo_reference(new_origin)
646        if new_origin is None and points_georeference is not None:
647            points = points_utm
648            geo_ref = points_georeference
649        else:
650            if new_origin is None:
651                new_origin = Geo_reference(zone, min(points_utm[:,0]),
652                                                 min(points_utm[:,1]))
653            points = new_origin.change_points_geo_ref(points_utm,
654                                                      points_georeference)
655            geo_ref = new_origin
656
657        # At this stage I need a georef and points
658        # the points are relative to the georef
659        geo_ref.write_NetCDF(outfile)
660
661        # This will put the geo ref in the middle
662        #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
663
664        x =  points[:,0]
665        y =  points[:,1]
666
667        if verbose:
668            log.critical('------------------------------------------------')
669            log.critical('More Statistics:')
670            log.critical('  Extent (/lon):')
671            log.critical('    x in [%f, %f], len(lat) == %d'
672                         % (min(x), max(x), len(x)))
673            log.critical('    y in [%f, %f], len(lon) == %d'
674                         % (min(y), max(y), len(y)))
675            #log.critical('    z in [%f, %f], len(z) == %d'
676            #             % (min(elevation), max(elevation), len(elevation)))
677            log.critical('geo_ref: %s' % str(geo_ref))
678            log.critical('------------------------------------------------')
679
680        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
681        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
682        outfile.variables['volumes'][:] = volumes.astype(num.int32) #On Opteron 64
683
684
685    def store_static_quantities(self, 
686                                outfile, 
687                                sww_precision=num.float32,
688                                verbose=False, 
689                                **quant):
690        """
691        Write the static quantity info.
692
693        **quant is extra keyword arguments passed in. These must be
694          the numpy arrays to be stored in the sww file at each timestep.
695
696        The argument sww_precision allows for storing as either
697        * single precision (default): num.float32
698        * double precision: num.float64 or num.float
699
700        Precondition:
701            store_triangulation and
702            store_header have been called.
703        """
704
705        # The dictionary quant must contain numpy arrays for each name.
706        # These will typically be quantities from Domain such as friction
707        #
708        # Arrays not listed in static_quantitiues will be ignored, silently.
709        #
710        # This method will also write the ranges for each quantity,
711        # e.g. stage_range, xmomentum_range and ymomentum_range
712        for q in self.static_quantities:
713            if not quant.has_key(q):
714                msg = 'Values for quantity %s was not specified in ' % q
715                msg += 'store_quantities so they cannot be stored.'
716                raise NewQuantity, msg
717            else:
718                q_values = ensure_numeric(quant[q])
719               
720                x = q_values.astype(sww_precision)
721                outfile.variables[q][:] = x
722       
723                # This populates the _range values
724                outfile.variables[q + Write_sww.RANGE][0] = num.min(x)
725                outfile.variables[q + Write_sww.RANGE][1] = num.max(x)
726                   
727        # FIXME: Hack for backwards compatibility with old viewer
728        #if 'elevation' in self.static_quantities:
729        #    outfile.variables['z'][:] = outfile.variables['elevation'][:]
730
731                   
732                   
733       
734       
735    def store_quantities(self, 
736                         outfile, 
737                         sww_precision=num.float32,
738                         slice_index=None,
739                         time=None,
740                         verbose=False, 
741                         **quant):
742        """
743        Write the quantity info at each timestep.
744
745        **quant is extra keyword arguments passed in. These must be
746          the numpy arrays to be stored in the sww file at each timestep.
747
748        if the time array is already been built, use the slice_index
749        to specify the index.
750
751        Otherwise, use time to increase the time dimension
752
753        Maybe make this general, but the viewer assumes these quantities,
754        so maybe we don't want it general - unless the viewer is general
755       
756        The argument sww_precision allows for storing as either
757        * single precision (default): num.float32
758        * double precision: num.float64 or num.float
759
760        Precondition:
761            store_triangulation and
762            store_header have been called.
763        """
764
765        if time is not None:
766            file_time = outfile.variables['time']
767            slice_index = len(file_time)
768            file_time[slice_index] = time
769        else:
770            slice_index = int(slice_index) # Has to be cast in case it was numpy.int   
771
772        # Write the named dynamic quantities
773        # The dictionary quant must contain numpy arrays for each name.
774        # These will typically be the conserved quantities from Domain
775        # (Typically stage,  xmomentum, ymomentum).
776        #
777        # Arrays not listed in dynamic_quantitiues will be ignored, silently.
778        #
779        # This method will also write the ranges for each quantity,
780        # e.g. stage_range, xmomentum_range and ymomentum_range
781        for q in self.dynamic_quantities:
782            if not quant.has_key(q):
783                msg = 'Values for quantity %s was not specified in ' % q
784                msg += 'store_quantities so they cannot be stored.'
785                raise NewQuantity, msg
786            else:
787                q_values = ensure_numeric(quant[q])
788               
789                x = q_values.astype(sww_precision)
790                outfile.variables[q][slice_index] = x
791                   
792                # This updates the _range values
793                q_range = outfile.variables[q + Write_sww.RANGE][:]
794                q_values_min = num.min(q_values)
795                if q_values_min < q_range[0]:
796                    outfile.variables[q + Write_sww.RANGE][0] = q_values_min
797                q_values_max = num.max(q_values)
798                if q_values_max > q_range[1]:
799                    outfile.variables[q + Write_sww.RANGE][1] = q_values_max
800
801    ##
802    # @brief Print the quantities in the underlying file.
803    # @param outfile UNUSED.
804    def verbose_quantities(self, outfile):
805        log.critical('------------------------------------------------')
806        log.critical('More Statistics:')
807        for q in self.dynamic_quantities:
808            log.critical(%s in [%f, %f]'
809                         % (q, outfile.variables[q+Write_sww.RANGE][0],
810                            outfile.variables[q+Write_sww.RANGE][1]))
811        log.critical('------------------------------------------------')
812
813
814
815
816def extent_sww(file_name):
817    """Read in an sww file, then get its extents
818
819    Input:
820    file_name - the sww file
821
822    Output:
823    A list: [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
824    """
825
826    from Scientific.IO.NetCDF import NetCDFFile
827
828    #Get NetCDF
829    fid = NetCDFFile(file_name, netcdf_mode_r)
830
831    # Get the variables
832    x = fid.variables['x'][:]
833    y = fid.variables['y'][:]
834    stage = fid.variables['stage'][:]
835
836    fid.close()
837
838    return [min(x), max(x), min(y), max(y), num.min(stage), num.max(stage)]
839
840
841def load_sww_as_domain(filename, boundary=None, t=None,
842               fail_if_NaN=True, NaN_filler=0,
843               verbose=False, very_verbose=False):
844    """
845    Load an sww file into a domain.
846
847    Usage: domain = load_sww_as_domain('file.sww',
848                        t=time (default = last time in file))
849
850    Boundary is not recommended if domain.smooth is not selected, as it
851    uses unique coordinates, but not unique boundaries. This means that
852    the boundary file will not be compatable with the coordinates, and will
853    give a different final boundary, or crash.
854    """
855   
856    from Scientific.IO.NetCDF import NetCDFFile
857    from anuga.shallow_water.shallow_water_domain import Domain
858
859    # initialise NaN.
860    NaN = 9.969209968386869e+036
861
862    if verbose: log.critical('Reading from %s' % filename)
863
864    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
865    time = fid.variables['time']       # Timesteps
866    if t is None:
867        t = time[-1]
868    time_interp = get_time_interp(time,t)
869
870    # Get the variables as numeric arrays
871    x = fid.variables['x'][:]                   # x-coordinates of vertices
872    y = fid.variables['y'][:]                   # y-coordinates of vertices
873    elevation = fid.variables['elevation']      # Elevation
874    stage = fid.variables['stage']              # Water level
875    xmomentum = fid.variables['xmomentum']      # Momentum in the x-direction
876    ymomentum = fid.variables['ymomentum']      # Momentum in the y-direction
877
878    starttime = fid.starttime[0]
879    volumes = fid.variables['volumes'][:]       # Connectivity
880    coordinates = num.transpose(num.asarray([x.tolist(), y.tolist()]))
881    # FIXME (Ole): Something like this might be better:
882    #                 concatenate((x, y), axis=1)
883    # or              concatenate((x[:,num.newaxis], x[:,num.newaxis]), axis=1)
884
885    conserved_quantities = []
886    interpolated_quantities = {}
887    other_quantities = []
888
889    # get geo_reference
890    try:                             # sww files don't have to have a geo_ref
891        geo_reference = Geo_reference(NetCDFObject=fid)
892    except: # AttributeError, e:
893        geo_reference = None
894
895    if verbose: log.critical('    getting quantities')
896
897    for quantity in fid.variables.keys():
898        dimensions = fid.variables[quantity].dimensions
899        if 'number_of_timesteps' in dimensions:
900            conserved_quantities.append(quantity)
901            interpolated_quantities[quantity] = \
902                  interpolated_quantity(fid.variables[quantity][:], time_interp)
903        else:
904            other_quantities.append(quantity)
905
906    other_quantities.remove('x')
907    other_quantities.remove('y')
908    #other_quantities.remove('z')
909    other_quantities.remove('volumes')
910    try:
911        other_quantities.remove('stage_range')
912        other_quantities.remove('xmomentum_range')
913        other_quantities.remove('ymomentum_range')
914        other_quantities.remove('elevation_range')
915    except:
916        pass
917
918    conserved_quantities.remove('time')
919
920    if verbose: log.critical('    building domain')
921
922    #    From domain.Domain:
923    #    domain = Domain(coordinates, volumes,\
924    #                    conserved_quantities = conserved_quantities,\
925    #                    other_quantities = other_quantities,zone=zone,\
926    #                    xllcorner=xllcorner, yllcorner=yllcorner)
927
928    # From shallow_water.Domain:
929    coordinates = coordinates.tolist()
930    volumes = volumes.tolist()
931    # FIXME:should this be in mesh? (peter row)
932    if fid.smoothing == 'Yes':
933        unique = False
934    else:
935        unique = True
936    if unique:
937        coordinates, volumes, boundary = weed(coordinates, volumes,boundary)
938
939     
940   
941    try:
942        domain = Domain(coordinates, volumes, boundary)
943    except AssertionError, e:
944        fid.close()
945        msg = 'Domain could not be created: %s. ' \
946              'Perhaps use "fail_if_NaN=False and NaN_filler = ..."' % e
947        raise DataDomainError, msg
948
949    if not boundary is None:
950        domain.boundary = boundary
951
952    domain.geo_reference = geo_reference
953
954    domain.starttime = float(starttime) + float(t)
955    domain.time = 0.0
956
957    for quantity in other_quantities:
958        try:
959            NaN = fid.variables[quantity].missing_value
960        except:
961            pass                       # quantity has no missing_value number
962        X = fid.variables[quantity][:]
963        if very_verbose:
964            log.critical('       %s' % str(quantity))
965            log.critical('        NaN = %s' % str(NaN))
966            log.critical('        max(X)')
967            log.critical('       %s' % str(max(X)))
968            log.critical('        max(X)==NaN')
969            log.critical('       %s' % str(max(X)==NaN))
970            log.critical('')
971        if max(X) == NaN or min(X) == NaN:
972            if fail_if_NaN:
973                msg = 'quantity "%s" contains no_data entry' % quantity
974                raise DataMissingValuesError, msg
975            else:
976                data = (X != NaN)
977                X = (X*data) + (data==0)*NaN_filler
978        if unique:
979            X = num.resize(X, (len(X)/3, 3))
980        domain.set_quantity(quantity, X)
981    #
982    for quantity in conserved_quantities:
983        try:
984            NaN = fid.variables[quantity].missing_value
985        except:
986            pass                       # quantity has no missing_value number
987        X = interpolated_quantities[quantity]
988        if very_verbose:
989            log.critical('       %s' % str(quantity))
990            log.critical('        NaN = %s' % str(NaN))
991            log.critical('        max(X)')
992            log.critical('       %s' % str(max(X)))
993            log.critical('        max(X)==NaN')
994            log.critical('       %s' % str(max(X)==NaN))
995            log.critical('')
996        if max(X) == NaN or min(X) == NaN:
997            if fail_if_NaN:
998                msg = 'quantity "%s" contains no_data entry' % quantity
999                raise DataMissingValuesError, msg
1000            else:
1001                data = (X != NaN)
1002                X = (X*data) + (data==0)*NaN_filler
1003        if unique:
1004            X = num.resize(X, (X.shape[0]/3, 3))
1005        domain.set_quantity(quantity, X)
1006
1007    fid.close()
1008
1009    return domain
1010
1011
1012def get_mesh_and_quantities_from_file(filename,
1013                                      quantities=None,
1014                                      verbose=False):
1015    """Get and rebuild mesh structure and associated quantities from sww file
1016
1017    Input:
1018        filename - Name os sww file
1019        quantities - Names of quantities to load
1020
1021    Output:
1022        mesh - instance of class Interpolate
1023               (including mesh and interpolation functionality)
1024        quantities - arrays with quantity values at each mesh node
1025        time - vector of stored timesteps
1026
1027    This function is used by e.g.:
1028        get_interpolated_quantities_at_polyline_midpoints
1029    """
1030
1031    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
1032
1033    import types
1034    from Scientific.IO.NetCDF import NetCDFFile
1035    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
1036
1037    if verbose: log.critical('Reading from %s' % filename)
1038
1039    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1040    time = fid.variables['time'][:]    # Time vector
1041    time += fid.starttime[0]
1042
1043    # Get the variables as numeric arrays
1044    x = fid.variables['x'][:]                   # x-coordinates of nodes
1045    y = fid.variables['y'][:]                   # y-coordinates of nodes
1046    elevation = fid.variables['elevation'][:]   # Elevation
1047    stage = fid.variables['stage'][:]           # Water level
1048    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
1049    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
1050
1051    # Mesh (nodes (Mx2), triangles (Nx3))
1052    nodes = num.concatenate((x[:,num.newaxis], y[:,num.newaxis]), axis=1)
1053    triangles = fid.variables['volumes'][:]
1054
1055    # Get geo_reference
1056    try:
1057        geo_reference = Geo_reference(NetCDFObject=fid)
1058    except: #AttributeError, e:
1059        # Sww files don't have to have a geo_ref
1060        geo_reference = None
1061
1062    if verbose: log.critical('    building mesh from sww file %s' % filename)
1063
1064    boundary = None
1065
1066    #FIXME (Peter Row): Should this be in mesh?
1067    if fid.smoothing != 'Yes':
1068        nodes = nodes.tolist()
1069        triangles = triangles.tolist()
1070        nodes, triangles, boundary = weed(nodes, triangles, boundary)
1071
1072    try:
1073        mesh = Mesh(nodes, triangles, boundary, geo_reference=geo_reference)
1074    except AssertionError, e:
1075        fid.close()
1076        msg = 'Domain could not be created: %s. "' % e
1077        raise DataDomainError, msg
1078
1079    quantities = {}
1080    quantities['elevation'] = elevation
1081    quantities['stage'] = stage
1082    quantities['xmomentum'] = xmomentum
1083    quantities['ymomentum'] = ymomentum
1084
1085    fid.close()
1086
1087    return mesh, quantities, time
1088
1089
1090def get_time_interp(time, t=None):
1091    """Finds the ratio and index for time interpolation.
1092        time is an array of time steps
1093        t is the sample time.
1094        returns a tuple containing index into time, and ratio
1095    """
1096    if t is None:
1097        t=time[-1]
1098        index = -1
1099        ratio = 0.
1100    else:
1101        T = time
1102        tau = t
1103        index=0
1104        msg = 'Time interval derived from file %s [%s:%s]' \
1105              % ('FIXMEfilename', T[0], T[-1])
1106        msg += ' does not match model time: %s' % tau
1107        if tau < time[0]: raise DataTimeError, msg
1108        if tau > time[-1]: raise DataTimeError, msg
1109        while tau > time[index]: index += 1
1110        while tau < time[index]: index -= 1
1111        if tau == time[index]:
1112            #Protect against case where tau == time[-1] (last time)
1113            # - also works in general when tau == time[i]
1114            ratio = 0
1115        else:
1116            #t is now between index and index+1
1117            ratio = (tau - time[index])/(time[index+1] - time[index])
1118
1119    return (index, ratio)
1120
1121
1122
1123##
1124# @brief Interpolate a quantity wrt time.
1125# @param saved_quantity The quantity to interpolate.
1126# @param time_interp (index, ratio)
1127# @return A vector of interpolated values.
1128def interpolated_quantity(saved_quantity, time_interp):
1129    '''Given an index and ratio, interpolate quantity with respect to time.'''
1130
1131    index, ratio = time_interp
1132
1133    Q = saved_quantity
1134
1135    if ratio > 0:
1136        q = (1-ratio)*Q[index] + ratio*Q[index+1]
1137    else:
1138        q = Q[index]
1139
1140    #Return vector of interpolated values
1141    return q
1142
1143
1144def weed(coordinates, volumes, boundary=None):
1145    """ Excise all duplicate points.
1146    """
1147    if isinstance(coordinates, num.ndarray):
1148        coordinates = coordinates.tolist()
1149    if isinstance(volumes, num.ndarray):
1150        volumes = volumes.tolist()
1151
1152    unique = False
1153    point_dict = {}
1154    same_point = {}
1155    for i in range(len(coordinates)):
1156        point = tuple(coordinates[i])
1157        if point_dict.has_key(point):
1158            unique = True
1159            same_point[i] = point
1160            #to change all point i references to point j
1161        else:
1162            point_dict[point] = i
1163            same_point[i] = point
1164
1165    coordinates = []
1166    i = 0
1167    for point in point_dict.keys():
1168        point = tuple(point)
1169        coordinates.append(list(point))
1170        point_dict[point] = i
1171        i += 1
1172
1173    for volume in volumes:
1174        for i in range(len(volume)):
1175            index = volume[i]
1176            if index > -1:
1177                volume[i] = point_dict[same_point[index]]
1178
1179    new_boundary = {}
1180    if not boundary is None:
1181        for segment in boundary.keys():
1182            point0 = point_dict[same_point[segment[0]]]
1183            point1 = point_dict[same_point[segment[1]]]
1184            label = boundary[segment]
1185            #FIXME should the bounday attributes be concaterated
1186            #('exterior, pond') or replaced ('pond')(peter row)
1187
1188            if new_boundary.has_key((point0, point1)):
1189                new_boundary[(point0,point1)] = new_boundary[(point0, point1)]
1190
1191            elif new_boundary.has_key((point1, point0)):
1192                new_boundary[(point1,point0)] = new_boundary[(point1, point0)]
1193            else: new_boundary[(point0, point1)] = label
1194
1195        boundary = new_boundary
1196
1197    return coordinates, volumes, boundary
1198
1199
1200
Note: See TracBrowser for help on using the repository browser.