source: trunk/anuga_core/source/anuga/file/sww.py @ 8068

Last change on this file since 8068 was 8068, checked in by habili, 13 years ago

Fixed bug where starttime and duration in the evolve function did not produce correct results. Starttime is now set in the constructor of Domain and is 0 by default. time is is to starttime. Also a bug was fixed where duration did not correctly calculate the finaltime. Associated unit tests have also been fixed to reflect the change.

File size: 43.2 KB
Line 
1""" Classes to read an SWW file.
2"""
3
4from anuga.coordinate_transforms.geo_reference import Geo_reference
5from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
6from anuga.config import netcdf_float, netcdf_float32, netcdf_int
7from anuga.config import max_float
8from anuga.utilities.numerical_tools import ensure_numeric
9import anuga.utilities.log as log
10from Scientific.IO.NetCDF import NetCDFFile
11
12from sts import Write_sts
13
14from anuga.coordinate_transforms.geo_reference import \
15        ensure_geo_reference
16
17from file_utils import create_filename
18import numpy as num
19
20##
21# @brief Generic class for storing output to e.g. visualisation or checkpointing
22class Data_format:
23    """Generic interface to data formats
24    """
25
26    ##
27    # @brief Instantiate this instance.
28    # @param domain
29    # @param extension
30    # @param mode The mode of the underlying file.
31    def __init__(self, domain, extension, mode=netcdf_mode_w):
32        assert mode[0] in ['r', 'w', 'a'], \
33               "Mode %s must be either:\n" % mode + \
34               "   'w' (write)\n" + \
35               "   'r' (read)\n" + \
36               "   'a' (append)"
37
38        #Create filename
39        self.filename = create_filename(domain.get_datadir(),
40                                        domain.get_name(), extension)
41
42        self.timestep = 0
43        self.domain = domain
44
45        # Exclude ghosts in case this is a parallel domain
46        self.number_of_nodes = domain.number_of_full_nodes
47        self.number_of_volumes = domain.number_of_full_triangles
48        #self.number_of_volumes = len(domain)
49
50        #FIXME: Should we have a general set_precision function?
51
52
53class SWW_file(Data_format):
54    """Interface to native NetCDF format (.sww) for storing model output
55
56    There are two kinds of data
57
58    1: Constant data: Vertex coordinates and field values. Stored once
59    2: Variable data: Conserved quantities. Stored once per timestep.
60
61    All data is assumed to reside at vertex locations.
62    """
63
64    ##
65    # @brief Instantiate this instance.
66    # @param domain ??
67    # @param mode Mode of the underlying data file.
68    # @param max_size ??
69    # @param recursion ??
70    # @note Prepare the underlying data file if mode starts with 'w'.
71    def __init__(self, domain, 
72                 mode=netcdf_mode_w, max_size=2000000000, recursion=False):
73        from Scientific.IO.NetCDF import NetCDFFile
74
75        self.precision = netcdf_float32 # Use single precision for quantities
76        self.recursion = recursion
77        self.mode = mode
78        if hasattr(domain, 'max_size'):
79            self.max_size = domain.max_size # File size max is 2Gig
80        else:
81            self.max_size = max_size
82        if hasattr(domain, 'minimum_storable_height'):
83            self.minimum_storable_height = domain.minimum_storable_height
84        else:
85            self.minimum_storable_height = default_minimum_storable_height
86
87        # Call parent constructor
88        Data_format.__init__(self, domain, 'sww', mode)
89
90        # Get static and dynamic quantities from domain
91        static_quantities = []
92        dynamic_quantities = []
93       
94        for q in domain.quantities_to_be_stored:
95            flag = domain.quantities_to_be_stored[q]
96       
97            msg = 'Quantity %s is requested to be stored ' % q
98            msg += 'but it does not exist in domain.quantities'
99            assert q in domain.quantities, msg
100       
101            assert flag in [1,2]
102            if flag == 1: static_quantities.append(q)
103            if flag == 2: dynamic_quantities.append(q)               
104                       
105       
106        # NetCDF file definition
107        fid = NetCDFFile(self.filename, mode)
108        if mode[0] == 'w':
109            description = 'Output from anuga.abstract_2d_finite_volumes ' \
110                          'suitable for plotting'
111                         
112            self.writer = Write_sww(static_quantities, dynamic_quantities)
113            self.writer.store_header(fid,
114                                     domain.starttime,
115                                     self.number_of_volumes,
116                                     self.domain.number_of_full_nodes,
117                                     description=description,
118                                     smoothing=domain.smooth,
119                                     order=domain.default_order,
120                                     sww_precision=self.precision)
121
122            # Extra optional information
123            if hasattr(domain, 'texture'):
124                fid.texture = domain.texture
125
126            if domain.quantities_to_be_monitored is not None:
127                fid.createDimension('singleton', 1)
128                fid.createDimension('two', 2)
129
130                poly = domain.monitor_polygon
131                if poly is not None:
132                    N = len(poly)
133                    fid.createDimension('polygon_length', N)
134                    fid.createVariable('extrema.polygon',
135                                       self.precision,
136                                       ('polygon_length', 'two'))
137                    fid.variables['extrema.polygon'][:] = poly
138
139                interval = domain.monitor_time_interval
140                if interval is not None:
141                    fid.createVariable('extrema.time_interval',
142                                       self.precision,
143                                       ('two',))
144                    fid.variables['extrema.time_interval'][:] = interval
145
146                for q in domain.quantities_to_be_monitored:
147                    fid.createVariable(q + '.extrema', self.precision,
148                                       ('numbers_in_range',))
149                    fid.createVariable(q + '.min_location', self.precision,
150                                       ('numbers_in_range',))
151                    fid.createVariable(q + '.max_location', self.precision,
152                                       ('numbers_in_range',))
153                    fid.createVariable(q + '.min_time', self.precision,
154                                       ('singleton',))
155                    fid.createVariable(q + '.max_time', self.precision,
156                                       ('singleton',))
157
158        fid.close()
159
160    ##
161    # @brief Store connectivity data into the underlying data file.
162    def store_connectivity(self):
163        """Store information about nodes, triangles and static quantities
164
165        Writes x,y coordinates of triangles and their connectivity.
166       
167        Store also any quantity that has been identified as static.
168        """
169
170        # FIXME: Change name to reflect the fact thta this function
171        # stores both connectivity (triangulation) and static quantities
172       
173        from Scientific.IO.NetCDF import NetCDFFile
174
175        domain = self.domain
176
177        # append to the NetCDF file
178        fid = NetCDFFile(self.filename, netcdf_mode_a)
179
180        # Get X, Y from one (any) of the quantities
181        Q = domain.quantities.values()[0]
182        X,Y,_,V = Q.get_vertex_values(xy=True, precision=self.precision)
183
184        # store the connectivity data
185        points = num.concatenate((X[:,num.newaxis],Y[:,num.newaxis]), axis=1)
186        self.writer.store_triangulation(fid,
187                                        points,
188                                        V.astype(num.float32),
189                                        points_georeference=\
190                                            domain.geo_reference)
191
192
193        # Get names of static quantities
194        static_quantities = {}
195        for name in self.writer.static_quantities:
196            Q = domain.quantities[name]
197            A, _ = Q.get_vertex_values(xy=False, 
198                                       precision=self.precision)
199            static_quantities[name] = A
200       
201        # Store static quantities       
202        self.writer.store_static_quantities(fid, **static_quantities)
203                                           
204        fid.close()
205
206    ##
207    # @brief Store time and time dependent quantities
208    # to the underlying data file.
209    def store_timestep(self):
210        """Store time and time dependent quantities
211        """
212
213        from Scientific.IO.NetCDF import NetCDFFile
214        import types
215        from time import sleep
216        from os import stat
217
218        # Get NetCDF
219        retries = 0
220        file_open = False
221        while not file_open and retries < 10:
222            try:
223                # Open existing file
224                fid = NetCDFFile(self.filename, netcdf_mode_a)
225            except IOError:
226                # This could happen if someone was reading the file.
227                # In that case, wait a while and try again
228                msg = 'Warning (store_timestep): File %s could not be opened' \
229                      % self.filename
230                msg += ' - trying step %s again' % self.domain.time
231                log.critical(msg)
232                retries += 1
233                sleep(1)
234            else:
235                file_open = True
236
237        if not file_open:
238            msg = 'File %s could not be opened for append' % self.filename
239            raise DataFileNotOpenError, msg
240
241        # Check to see if the file is already too big:
242        time = fid.variables['time']
243        i = len(time) + 1
244        file_size = stat(self.filename)[6]
245        file_size_increase = file_size / i
246        if file_size + file_size_increase > self.max_size * 2**self.recursion:
247            # In order to get the file name and start time correct,
248            # I change the domain.filename and domain.starttime.
249            # This is the only way to do this without changing
250            # other modules (I think).
251
252            # Write a filename addon that won't break the anuga viewers
253            # (10.sww is bad)
254            filename_ext = '_time_%s' % self.domain.time
255            filename_ext = filename_ext.replace('.', '_')
256
257            # Remember the old filename, then give domain a
258            # name with the extension
259            old_domain_filename = self.domain.get_name()
260            if not self.recursion:
261                self.domain.set_name(old_domain_filename + filename_ext)
262
263            # Temporarily change the domain starttime to the current time
264            old_domain_starttime = self.domain.starttime
265            self.domain.starttime = self.domain.get_time()
266
267            # Build a new data_structure.
268            next_data_structure = SWW_file(self.domain, mode=self.mode,
269                                           max_size=self.max_size,
270                                           recursion=self.recursion+1)
271            if not self.recursion:
272                log.critical('    file_size = %s' % file_size)
273                log.critical('    saving file to %s'
274                             % next_data_structure.filename) 
275
276            # Set up the new data_structure
277            self.domain.writer = next_data_structure
278
279            # Store connectivity and first timestep
280            next_data_structure.store_connectivity()
281            next_data_structure.store_timestep()
282            fid.sync()
283            fid.close()
284
285            # Restore the old starttime and filename
286            self.domain.starttime = old_domain_starttime
287            self.domain.set_name(old_domain_filename)
288        else:
289            self.recursion = False
290            domain = self.domain
291
292            # Get the variables
293            time = fid.variables['time']
294            i = len(time)
295             
296            if 'stage' in self.writer.dynamic_quantities:           
297                # Select only those values for stage,
298                # xmomentum and ymomentum (if stored) where
299                # depth exceeds minimum_storable_height
300                #
301                # In this branch it is assumed that elevation
302                # is also available as a quantity           
303           
304                Q = domain.quantities['stage']
305                w, _ = Q.get_vertex_values(xy=False)
306               
307                Q = domain.quantities['elevation']
308                z, _ = Q.get_vertex_values(xy=False)               
309               
310                storable_indices = (w-z >= self.minimum_storable_height)
311            else:
312                # Very unlikely branch
313                storable_indices = None # This means take all
314           
315           
316            # Now store dynamic quantities
317            dynamic_quantities = {}
318            for name in self.writer.dynamic_quantities:
319                netcdf_array = fid.variables[name]
320               
321                Q = domain.quantities[name]
322                A, _ = Q.get_vertex_values(xy=False,
323                                           precision=self.precision)
324
325                if storable_indices is not None:
326                    if name == 'stage':
327                        A = num.choose(storable_indices, (z, A))
328
329                    if name in ['xmomentum', 'ymomentum']:
330                        # Get xmomentum where depth exceeds
331                        # minimum_storable_height
332                       
333                        # Define a zero vector of same size and type as A
334                        # for use with momenta
335                        null = num.zeros(num.size(A), A.dtype.char)
336                        A = num.choose(storable_indices, (null, A))
337               
338                dynamic_quantities[name] = A
339               
340                                       
341            # Store dynamic quantities
342            self.writer.store_quantities(fid,
343                                         time=self.domain.time,
344                                         sww_precision=self.precision,
345                                         **dynamic_quantities)
346
347
348            # Update extrema if requested
349            domain = self.domain
350            if domain.quantities_to_be_monitored is not None:
351                for q, info in domain.quantities_to_be_monitored.items():
352                    if info['min'] is not None:
353                        fid.variables[q + '.extrema'][0] = info['min']
354                        fid.variables[q + '.min_location'][:] = \
355                                        info['min_location']
356                        fid.variables[q + '.min_time'][0] = info['min_time']
357
358                    if info['max'] is not None:
359                        fid.variables[q + '.extrema'][1] = info['max']
360                        fid.variables[q + '.max_location'][:] = \
361                                        info['max_location']
362                        fid.variables[q + '.max_time'][0] = info['max_time']
363
364            # Flush and close
365            fid.sync()
366            fid.close()
367
368
369##
370# @brief Class to open an sww file so that domain can be populated with quantity values
371class Read_sww:
372
373    def __init__(self, source):
374        """The source parameter is assumed to be a NetCDF sww file.
375        """
376
377        self.source = source
378
379        self.frame_number = 0
380
381        fin = NetCDFFile(self.source, 'r')
382
383        self.time = num.array(fin.variables['time'], num.float)
384        self.last_frame_number = self.time.shape[0] - 1
385
386        self.frames = num.arange(self.last_frame_number+1)
387
388        fin.close()
389       
390        self.read_mesh()
391
392        self.quantities = {}
393
394        self.read_quantities()
395
396
397    def read_mesh(self):
398        """ Read and store the mesh data contained within this sww file.
399        """
400        fin = NetCDFFile(self.source, 'r')
401
402        self.vertices = num.array(fin.variables['volumes'], num.int)
403       
404        self.x = x = num.array(fin.variables['x'], num.float)
405        self.y = y = num.array(fin.variables['y'], num.float)
406
407        assert len(self.x) == len(self.y)
408       
409        self.xmin = num.min(x)
410        self.xmax = num.max(x)
411        self.ymin = num.min(y)
412        self.ymax = num.max(y)
413
414
415        fin.close()
416       
417    def read_quantities(self, frame_number=0):
418        """
419        Read the quantities contained in this file.
420        frame_number is the time index to load.
421        """
422        assert frame_number >= 0 and frame_number <= self.last_frame_number
423
424        self.frame_number = frame_number
425
426        M = len(self.x)/3
427       
428        fin = NetCDFFile(self.source, 'r')
429       
430        for q in filter(lambda n:n != 'x' and n != 'y' and n != 'time' and n != 'volumes' and \
431                        '_range' not in n, \
432                        fin.variables.keys()):
433            if len(fin.variables[q].shape) == 1: # Not a time-varying quantity
434                self.quantities[q] = num.ravel(num.array(fin.variables[q], num.float)).reshape(M,3)
435            else: # Time-varying, get the current timestep data
436                self.quantities[q] = num.array(fin.variables[q][self.frame_number], num.float).reshape(M,3)
437        fin.close()
438        return self.quantities
439
440    def get_bounds(self):
441        """
442            Get the bounding rect around the mesh.
443        """
444        return [self.xmin, self.xmax, self.ymin, self.ymax]
445
446    def get_last_frame_number(self):
447        """
448            Return the last loaded frame index.
449        """
450        return self.last_frame_number
451
452    def get_time(self):
453        """
454            Get time at the current frame num, in secs.
455        """
456        return self.time[self.frame_number]
457
458
459class Write_sww(Write_sts):
460    """
461        A class to write an SWW file.
462       
463        It is domain agnostic, and requires all the data to be fed in
464        manually.
465    """
466
467    def __init__(self, static_quantities, dynamic_quantities):
468        """Initialise Write_sww with two list af quantity names:
469       
470        static_quantities (e.g. elevation or friction):
471            Stored once at the beginning of the simulation in a 1D array
472            of length number_of_points   
473        dynamic_quantities (e.g stage):
474            Stored every timestep in a 2D array with
475            dimensions number_of_points X number_of_timesteps       
476       
477        """
478        self.static_quantities = static_quantities   
479        self.dynamic_quantities = dynamic_quantities
480
481
482    def store_header(self,
483                     outfile,
484                     times,
485                     number_of_volumes,
486                     number_of_points,
487                     description='Generated by ANUGA',
488                     smoothing=True,
489                     order=1,
490                     sww_precision=netcdf_float32,
491                     verbose=False):
492        """Write an SWW file header.
493
494        Writes the first section of the .sww file.
495
496        outfile - the open file that will be written
497        times - A list of the time slice times OR a start time
498        Note, if a list is given the info will be made relative.
499        number_of_volumes - the number of triangles
500        number_of_points - the number of vertices in the mesh
501        """
502
503        from anuga.abstract_2d_finite_volumes.util \
504            import get_revision_number
505
506        outfile.institution = 'Geoscience Australia'
507        outfile.description = description
508
509        # For sww compatibility
510        if smoothing is True:
511            # Smoothing to be depreciated
512            outfile.smoothing = 'Yes'
513            outfile.vertices_are_stored_uniquely = 'False'
514        else:
515            # Smoothing to be depreciated
516            outfile.smoothing = 'No'
517            outfile.vertices_are_stored_uniquely = 'True'
518        outfile.order = order
519
520        try:
521            revision_number = get_revision_number()
522        except:
523            # This will be triggered if the system cannot get the SVN
524            # revision number.
525            revision_number = None
526        # Allow None to be stored as a string
527        outfile.revision_number = str(revision_number)
528
529        # This is being used to seperate one number from a list.
530        # what it is actually doing is sorting lists from numeric arrays.
531        if isinstance(times, (list, num.ndarray)):
532            number_of_times = len(times)
533            times = ensure_numeric(times)
534            if number_of_times == 0:
535                starttime = 0
536            else:
537                starttime = times[0]
538                times = times - starttime  #Store relative times
539        else:
540            number_of_times = 0
541            starttime = times
542
543
544        outfile.starttime = starttime
545
546        # dimension definitions
547        outfile.createDimension('number_of_volumes', number_of_volumes)
548        outfile.createDimension('number_of_vertices', 3)
549        outfile.createDimension('numbers_in_range', 2)
550
551        if smoothing is True:
552            outfile.createDimension('number_of_points', number_of_points)
553            # FIXME(Ole): This will cause sww files for parallel domains to
554            # have ghost nodes stored (but not used by triangles).
555            # To clean this up, we have to change get_vertex_values and
556            # friends in quantity.py (but I can't be bothered right now)
557        else:
558            outfile.createDimension('number_of_points', 3*number_of_volumes)
559
560        outfile.createDimension('number_of_timesteps', number_of_times)
561
562        # variable definitions
563        outfile.createVariable('x', sww_precision, ('number_of_points',))
564        outfile.createVariable('y', sww_precision, ('number_of_points',))
565
566        outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',
567                                                       'number_of_vertices'))
568
569
570        for q in self.static_quantities:
571           
572            outfile.createVariable(q, sww_precision,
573                                   ('number_of_points',))
574           
575            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
576                                   ('numbers_in_range',))
577                                   
578            # Initialise ranges with small and large sentinels.
579            # If this was in pure Python we could have used None sensibly
580            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
581            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
582
583       
584        self.write_dynamic_quantities(outfile, self.dynamic_quantities, times, \
585                                        precis = sww_precision)
586
587
588
589    def store_triangulation(self,
590                            outfile,
591                            points_utm,
592                            volumes,
593                            zone=None, 
594                            new_origin=None,
595                            points_georeference=None, 
596                            verbose=False):
597        """
598        Store triangulation data in the underlying file.
599       
600        Stores the points and triangle indices in the sww file
601       
602        outfile Open handle to underlying file.
603
604        new_origin georeference that the points can be set to.
605
606        points_georeference The georeference of the points_utm.
607
608        verbose True if this function is to be verbose.
609
610        new_origin - qa georeference that the points can be set to. (Maybe
611        do this before calling this function.)
612
613        points_utm - currently a list or array of the points in UTM.
614        points_georeference - the georeference of the points_utm
615
616        How about passing new_origin and current_origin.
617        If you get both, do a convertion from the old to the new.
618
619        If you only get new_origin, the points are absolute,
620        convert to relative
621
622        if you only get the current_origin the points are relative, store
623        as relative.
624
625        if you get no georefs create a new georef based on the minimums of
626        points_utm.  (Another option would be to default to absolute)
627
628        Yes, and this is done in another part of the code.
629        Probably geospatial.
630
631        If you don't supply either geo_refs, then supply a zone. If not
632        the default zone will be used.
633
634        precon:
635            header has been called.
636        """
637
638        number_of_points = len(points_utm)
639        volumes = num.array(volumes)
640        points_utm = num.array(points_utm)
641
642        # Given the two geo_refs and the points, do the stuff
643        # described in the method header
644        # if this is needed else where, pull out as a function
645        points_georeference = ensure_geo_reference(points_georeference)
646        new_origin = ensure_geo_reference(new_origin)
647        if new_origin is None and points_georeference is not None:
648            points = points_utm
649            geo_ref = points_georeference
650        else:
651            if new_origin is None:
652                new_origin = Geo_reference(zone, min(points_utm[:,0]),
653                                                 min(points_utm[:,1]))
654            points = new_origin.change_points_geo_ref(points_utm,
655                                                      points_georeference)
656            geo_ref = new_origin
657
658        # At this stage I need a georef and points
659        # the points are relative to the georef
660        geo_ref.write_NetCDF(outfile)
661
662        # This will put the geo ref in the middle
663        #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
664
665        x =  points[:,0]
666        y =  points[:,1]
667
668        if verbose:
669            log.critical('------------------------------------------------')
670            log.critical('More Statistics:')
671            log.critical('  Extent (/lon):')
672            log.critical('    x in [%f, %f], len(lat) == %d'
673                         % (min(x), max(x), len(x)))
674            log.critical('    y in [%f, %f], len(lon) == %d'
675                         % (min(y), max(y), len(y)))
676            #log.critical('    z in [%f, %f], len(z) == %d'
677            #             % (min(elevation), max(elevation), len(elevation)))
678            log.critical('geo_ref: %s' % str(geo_ref))
679            log.critical('------------------------------------------------')
680
681        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
682        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
683        outfile.variables['volumes'][:] = volumes.astype(num.int32) #On Opteron 64
684
685
686    def store_static_quantities(self, 
687                                outfile, 
688                                sww_precision=num.float32,
689                                verbose=False, 
690                                **quant):
691        """
692        Write the static quantity info.
693
694        **quant is extra keyword arguments passed in. These must be
695          the numpy arrays to be stored in the sww file at each timestep.
696
697        The argument sww_precision allows for storing as either
698        * single precision (default): num.float32
699        * double precision: num.float64 or num.float
700
701        Precondition:
702            store_triangulation and
703            store_header have been called.
704        """
705
706        # The dictionary quant must contain numpy arrays for each name.
707        # These will typically be quantities from Domain such as friction
708        #
709        # Arrays not listed in static_quantitiues will be ignored, silently.
710        #
711        # This method will also write the ranges for each quantity,
712        # e.g. stage_range, xmomentum_range and ymomentum_range
713        for q in self.static_quantities:
714            if not quant.has_key(q):
715                msg = 'Values for quantity %s was not specified in ' % q
716                msg += 'store_quantities so they cannot be stored.'
717                raise NewQuantity, msg
718            else:
719                q_values = ensure_numeric(quant[q])
720               
721                x = q_values.astype(sww_precision)
722                outfile.variables[q][:] = x
723       
724                # This populates the _range values
725                outfile.variables[q + Write_sww.RANGE][0] = num.min(x)
726                outfile.variables[q + Write_sww.RANGE][1] = num.max(x)
727                   
728        # FIXME: Hack for backwards compatibility with old viewer
729        #if 'elevation' in self.static_quantities:
730        #    outfile.variables['z'][:] = outfile.variables['elevation'][:]
731
732                   
733                   
734       
735       
736    def store_quantities(self, 
737                         outfile, 
738                         sww_precision=num.float32,
739                         slice_index=None,
740                         time=None,
741                         verbose=False, 
742                         **quant):
743        """
744        Write the quantity info at each timestep.
745
746        **quant is extra keyword arguments passed in. These must be
747          the numpy arrays to be stored in the sww file at each timestep.
748
749        if the time array is already been built, use the slice_index
750        to specify the index.
751
752        Otherwise, use time to increase the time dimension
753
754        Maybe make this general, but the viewer assumes these quantities,
755        so maybe we don't want it general - unless the viewer is general
756       
757        The argument sww_precision allows for storing as either
758        * single precision (default): num.float32
759        * double precision: num.float64 or num.float
760
761        Precondition:
762            store_triangulation and
763            store_header have been called.
764        """
765
766        if time is not None:
767            file_time = outfile.variables['time']
768            slice_index = len(file_time)
769            file_time[slice_index] = time
770        else:
771            slice_index = int(slice_index) # Has to be cast in case it was numpy.int   
772
773        # Write the named dynamic quantities
774        # The dictionary quant must contain numpy arrays for each name.
775        # These will typically be the conserved quantities from Domain
776        # (Typically stage,  xmomentum, ymomentum).
777        #
778        # Arrays not listed in dynamic_quantitiues will be ignored, silently.
779        #
780        # This method will also write the ranges for each quantity,
781        # e.g. stage_range, xmomentum_range and ymomentum_range
782        for q in self.dynamic_quantities:
783            if not quant.has_key(q):
784                msg = 'Values for quantity %s was not specified in ' % q
785                msg += 'store_quantities so they cannot be stored.'
786                raise NewQuantity, msg
787            else:
788                q_values = ensure_numeric(quant[q])
789               
790                q_retyped = q_values.astype(sww_precision)
791                outfile.variables[q][slice_index] = q_retyped
792                   
793                # This updates the _range values
794                q_range = outfile.variables[q + Write_sww.RANGE][:]
795                q_values_min = num.min(q_values)
796                if q_values_min < q_range[0]:
797                    outfile.variables[q + Write_sww.RANGE][0] = q_values_min
798                q_values_max = num.max(q_values)
799                if q_values_max > q_range[1]:
800                    outfile.variables[q + Write_sww.RANGE][1] = q_values_max
801
802    ##
803    # @brief Print the quantities in the underlying file.
804    # @param outfile UNUSED.
805    def verbose_quantities(self, outfile):
806        log.critical('------------------------------------------------')
807        log.critical('More Statistics:')
808        for q in self.dynamic_quantities:
809            log.critical(%s in [%f, %f]'
810                         % (q, outfile.variables[q+Write_sww.RANGE][0],
811                            outfile.variables[q+Write_sww.RANGE][1]))
812        log.critical('------------------------------------------------')
813
814
815
816
817def extent_sww(file_name):
818    """Read in an sww file, then get its extents
819
820    Input:
821    file_name - the sww file
822
823    Output:
824    A list: [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
825    """
826
827    from Scientific.IO.NetCDF import NetCDFFile
828
829    #Get NetCDF
830    fid = NetCDFFile(file_name, netcdf_mode_r)
831
832    # Get the variables
833    x = fid.variables['x'][:]
834    y = fid.variables['y'][:]
835    stage = fid.variables['stage'][:]
836
837    fid.close()
838
839    return [min(x), max(x), min(y), max(y), num.min(stage), num.max(stage)]
840
841
842def load_sww_as_domain(filename, boundary=None, t=None,
843               fail_if_NaN=True, NaN_filler=0,
844               verbose=False, very_verbose=False):
845    """
846    Load an sww file into a domain.
847
848    Usage: domain = load_sww_as_domain('file.sww',
849                        t=time (default = last time in file))
850
851    Boundary is not recommended if domain.smooth is not selected, as it
852    uses unique coordinates, but not unique boundaries. This means that
853    the boundary file will not be compatable with the coordinates, and will
854    give a different final boundary, or crash.
855    """
856   
857    from Scientific.IO.NetCDF import NetCDFFile
858    from anuga.shallow_water.shallow_water_domain import Domain
859
860    # initialise NaN.
861    NaN = 9.969209968386869e+036
862
863    if verbose: log.critical('Reading from %s' % filename)
864
865    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
866    time = fid.variables['time']       # Timesteps
867    if t is None:
868        t = time[-1]
869    time_interp = get_time_interp(time,t)
870
871    # Get the variables as numeric arrays
872    x = fid.variables['x'][:]                   # x-coordinates of vertices
873    y = fid.variables['y'][:]                   # y-coordinates of vertices
874    elevation = fid.variables['elevation']      # Elevation
875    stage = fid.variables['stage']              # Water level
876    xmomentum = fid.variables['xmomentum']      # Momentum in the x-direction
877    ymomentum = fid.variables['ymomentum']      # Momentum in the y-direction
878
879    starttime = fid.starttime[0]
880    volumes = fid.variables['volumes'][:]       # Connectivity
881    coordinates = num.transpose(num.asarray([x.tolist(), y.tolist()]))
882    # FIXME (Ole): Something like this might be better:
883    #                 concatenate((x, y), axis=1)
884    # or              concatenate((x[:,num.newaxis], x[:,num.newaxis]), axis=1)
885
886    conserved_quantities = []
887    interpolated_quantities = {}
888    other_quantities = []
889
890    # get geo_reference
891    try:                             # sww files don't have to have a geo_ref
892        geo_reference = Geo_reference(NetCDFObject=fid)
893    except: # AttributeError, e:
894        geo_reference = None
895
896    if verbose: log.critical('    getting quantities')
897
898    for quantity in fid.variables.keys():
899        dimensions = fid.variables[quantity].dimensions
900        if 'number_of_timesteps' in dimensions:
901            conserved_quantities.append(quantity)
902            interpolated_quantities[quantity] = \
903                  interpolated_quantity(fid.variables[quantity][:], time_interp)
904        else:
905            other_quantities.append(quantity)
906
907    other_quantities.remove('x')
908    other_quantities.remove('y')
909    #other_quantities.remove('z')
910    other_quantities.remove('volumes')
911    try:
912        other_quantities.remove('stage_range')
913        other_quantities.remove('xmomentum_range')
914        other_quantities.remove('ymomentum_range')
915        other_quantities.remove('elevation_range')
916    except:
917        pass
918
919    conserved_quantities.remove('time')
920
921    if verbose: log.critical('    building domain')
922
923    #    From domain.Domain:
924    #    domain = Domain(coordinates, volumes,\
925    #                    conserved_quantities = conserved_quantities,\
926    #                    other_quantities = other_quantities,zone=zone,\
927    #                    xllcorner=xllcorner, yllcorner=yllcorner)
928
929    # From shallow_water.Domain:
930    coordinates = coordinates.tolist()
931    volumes = volumes.tolist()
932    # FIXME:should this be in mesh? (peter row)
933    if fid.smoothing == 'Yes':
934        unique = False
935    else:
936        unique = True
937    if unique:
938        coordinates, volumes, boundary = weed(coordinates, volumes,boundary)
939
940     
941   
942    try:
943        domain = Domain(coordinates, volumes, boundary, starttime=(float(starttime) + float(t)))
944    except AssertionError, e:
945        fid.close()
946        msg = 'Domain could not be created: %s. ' \
947              'Perhaps use "fail_if_NaN=False and NaN_filler = ..."' % e
948        raise DataDomainError, msg
949
950    if not boundary is None:
951        domain.boundary = boundary
952
953    domain.geo_reference = geo_reference
954
955    for quantity in other_quantities:
956        try:
957            NaN = fid.variables[quantity].missing_value
958        except:
959            pass                       # quantity has no missing_value number
960        X = fid.variables[quantity][:]
961        if very_verbose:
962            log.critical('       %s' % str(quantity))
963            log.critical('        NaN = %s' % str(NaN))
964            log.critical('        max(X)')
965            log.critical('       %s' % str(max(X)))
966            log.critical('        max(X)==NaN')
967            log.critical('       %s' % str(max(X)==NaN))
968            log.critical('')
969        if max(X) == NaN or min(X) == NaN:
970            if fail_if_NaN:
971                msg = 'quantity "%s" contains no_data entry' % quantity
972                raise DataMissingValuesError, msg
973            else:
974                data = (X != NaN)
975                X = (X*data) + (data==0)*NaN_filler
976        if unique:
977            X = num.resize(X, (len(X)/3, 3))
978        domain.set_quantity(quantity, X)
979    #
980    for quantity in conserved_quantities:
981        try:
982            NaN = fid.variables[quantity].missing_value
983        except:
984            pass                       # quantity has no missing_value number
985        X = interpolated_quantities[quantity]
986        if very_verbose:
987            log.critical('       %s' % str(quantity))
988            log.critical('        NaN = %s' % str(NaN))
989            log.critical('        max(X)')
990            log.critical('       %s' % str(max(X)))
991            log.critical('        max(X)==NaN')
992            log.critical('       %s' % str(max(X)==NaN))
993            log.critical('')
994        if max(X) == NaN or min(X) == NaN:
995            if fail_if_NaN:
996                msg = 'quantity "%s" contains no_data entry' % quantity
997                raise DataMissingValuesError, msg
998            else:
999                data = (X != NaN)
1000                X = (X*data) + (data==0)*NaN_filler
1001        if unique:
1002            X = num.resize(X, (X.shape[0]/3, 3))
1003        domain.set_quantity(quantity, X)
1004
1005    fid.close()
1006
1007    return domain
1008
1009
1010def get_mesh_and_quantities_from_file(filename,
1011                                      quantities=None,
1012                                      verbose=False):
1013    """Get and rebuild mesh structure and associated quantities from sww file
1014
1015    Input:
1016        filename - Name os sww file
1017        quantities - Names of quantities to load
1018
1019    Output:
1020        mesh - instance of class Interpolate
1021               (including mesh and interpolation functionality)
1022        quantities - arrays with quantity values at each mesh node
1023        time - vector of stored timesteps
1024
1025    This function is used by e.g.:
1026        get_interpolated_quantities_at_polyline_midpoints
1027    """
1028
1029    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
1030
1031    import types
1032    from Scientific.IO.NetCDF import NetCDFFile
1033    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
1034
1035    if verbose: log.critical('Reading from %s' % filename)
1036
1037    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1038    time = fid.variables['time'][:]    # Time vector
1039    time += fid.starttime[0]
1040
1041    # Get the variables as numeric arrays
1042    x = fid.variables['x'][:]                   # x-coordinates of nodes
1043    y = fid.variables['y'][:]                   # y-coordinates of nodes
1044    elevation = fid.variables['elevation'][:]   # Elevation
1045    stage = fid.variables['stage'][:]           # Water level
1046    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
1047    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
1048
1049    # Mesh (nodes (Mx2), triangles (Nx3))
1050    nodes = num.concatenate((x[:,num.newaxis], y[:,num.newaxis]), axis=1)
1051    triangles = fid.variables['volumes'][:]
1052
1053    # Get geo_reference
1054    try:
1055        geo_reference = Geo_reference(NetCDFObject=fid)
1056    except: #AttributeError, e:
1057        # Sww files don't have to have a geo_ref
1058        geo_reference = None
1059
1060    if verbose: log.critical('    building mesh from sww file %s' % filename)
1061
1062    boundary = None
1063
1064    #FIXME (Peter Row): Should this be in mesh?
1065    if fid.smoothing != 'Yes':
1066        nodes = nodes.tolist()
1067        triangles = triangles.tolist()
1068        nodes, triangles, boundary = weed(nodes, triangles, boundary)
1069
1070    try:
1071        mesh = Mesh(nodes, triangles, boundary, geo_reference=geo_reference)
1072    except AssertionError, e:
1073        fid.close()
1074        msg = 'Domain could not be created: %s. "' % e
1075        raise DataDomainError, msg
1076
1077    quantities = {}
1078    quantities['elevation'] = elevation
1079    quantities['stage'] = stage
1080    quantities['xmomentum'] = xmomentum
1081    quantities['ymomentum'] = ymomentum
1082
1083    fid.close()
1084
1085    return mesh, quantities, time
1086
1087
1088def get_time_interp(time, t=None):
1089    """Finds the ratio and index for time interpolation.
1090        time is an array of time steps
1091        t is the sample time.
1092        returns a tuple containing index into time, and ratio
1093    """
1094    if t is None:
1095        t=time[-1]
1096        index = -1
1097        ratio = 0.
1098    else:
1099        T = time
1100        tau = t
1101        index=0
1102        msg = 'Time interval derived from file %s [%s:%s]' \
1103              % ('FIXMEfilename', T[0], T[-1])
1104        msg += ' does not match model time: %s' % tau
1105        if tau < time[0]: raise DataTimeError, msg
1106        if tau > time[-1]: raise DataTimeError, msg
1107        while tau > time[index]: index += 1
1108        while tau < time[index]: index -= 1
1109        if tau == time[index]:
1110            #Protect against case where tau == time[-1] (last time)
1111            # - also works in general when tau == time[i]
1112            ratio = 0
1113        else:
1114            #t is now between index and index+1
1115            ratio = (tau - time[index])/(time[index+1] - time[index])
1116
1117    return (index, ratio)
1118
1119
1120
1121##
1122# @brief Interpolate a quantity wrt time.
1123# @param saved_quantity The quantity to interpolate.
1124# @param time_interp (index, ratio)
1125# @return A vector of interpolated values.
1126def interpolated_quantity(saved_quantity, time_interp):
1127    '''Given an index and ratio, interpolate quantity with respect to time.'''
1128
1129    index, ratio = time_interp
1130
1131    Q = saved_quantity
1132
1133    if ratio > 0:
1134        q = (1-ratio)*Q[index] + ratio*Q[index+1]
1135    else:
1136        q = Q[index]
1137
1138    #Return vector of interpolated values
1139    return q
1140
1141
1142def weed(coordinates, volumes, boundary=None):
1143    """ Excise all duplicate points.
1144    """
1145    if isinstance(coordinates, num.ndarray):
1146        coordinates = coordinates.tolist()
1147    if isinstance(volumes, num.ndarray):
1148        volumes = volumes.tolist()
1149
1150    unique = False
1151    point_dict = {}
1152    same_point = {}
1153    for i in range(len(coordinates)):
1154        point = tuple(coordinates[i])
1155        if point_dict.has_key(point):
1156            unique = True
1157            same_point[i] = point
1158            #to change all point i references to point j
1159        else:
1160            point_dict[point] = i
1161            same_point[i] = point
1162
1163    coordinates = []
1164    i = 0
1165    for point in point_dict.keys():
1166        point = tuple(point)
1167        coordinates.append(list(point))
1168        point_dict[point] = i
1169        i += 1
1170
1171    for volume in volumes:
1172        for i in range(len(volume)):
1173            index = volume[i]
1174            if index > -1:
1175                volume[i] = point_dict[same_point[index]]
1176
1177    new_boundary = {}
1178    if not boundary is None:
1179        for segment in boundary.keys():
1180            point0 = point_dict[same_point[segment[0]]]
1181            point1 = point_dict[same_point[segment[1]]]
1182            label = boundary[segment]
1183            #FIXME should the bounday attributes be concaterated
1184            #('exterior, pond') or replaced ('pond')(peter row)
1185
1186            if new_boundary.has_key((point0, point1)):
1187                new_boundary[(point0,point1)] = new_boundary[(point0, point1)]
1188
1189            elif new_boundary.has_key((point1, point0)):
1190                new_boundary[(point1,point0)] = new_boundary[(point1, point0)]
1191            else: new_boundary[(point0, point1)] = label
1192
1193        boundary = new_boundary
1194
1195    return coordinates, volumes, boundary
1196
1197
1198
Note: See TracBrowser for help on using the repository browser.