source: trunk/anuga_core/source/anuga/file/sww.py @ 7858

Last change on this file since 7858 was 7858, checked in by hudson, 12 years ago

Refactorings to increase code quality, fixed missing log import from sww_interrogate.

File size: 43.1 KB
Line 
1""" Classes to read an SWW file.
2"""
3
4from anuga.coordinate_transforms.geo_reference import Geo_reference
5from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
6from anuga.config import netcdf_float, netcdf_float32, netcdf_int
7from anuga.config import max_float
8from anuga.utilities.numerical_tools import ensure_numeric
9import anuga.utilities.log as log
10from Scientific.IO.NetCDF import NetCDFFile
11
12from sts import Write_sts
13
14from anuga.coordinate_transforms.geo_reference import \
15        ensure_geo_reference
16
17from file_utils import create_filename
18import numpy as num
19
20##
21# @brief Generic class for storing output to e.g. visualisation or checkpointing
22class Data_format:
23    """Generic interface to data formats
24    """
25
26    ##
27    # @brief Instantiate this instance.
28    # @param domain
29    # @param extension
30    # @param mode The mode of the underlying file.
31    def __init__(self, domain, extension, mode=netcdf_mode_w):
32        assert mode[0] in ['r', 'w', 'a'], \
33               "Mode %s must be either:\n" % mode + \
34               "   'w' (write)\n" + \
35               "   'r' (read)\n" + \
36               "   'a' (append)"
37
38        #Create filename
39        self.filename = create_filename(domain.get_datadir(),
40                                        domain.get_name(), extension)
41
42        self.timestep = 0
43        self.domain = domain
44
45        # Exclude ghosts in case this is a parallel domain
46        self.number_of_nodes = domain.number_of_full_nodes
47        self.number_of_volumes = domain.number_of_full_triangles
48        #self.number_of_volumes = len(domain)
49
50        #FIXME: Should we have a general set_precision function?
51
52
53class SWW_file(Data_format):
54    """Interface to native NetCDF format (.sww) for storing model output
55
56    There are two kinds of data
57
58    1: Constant data: Vertex coordinates and field values. Stored once
59    2: Variable data: Conserved quantities. Stored once per timestep.
60
61    All data is assumed to reside at vertex locations.
62    """
63
64    ##
65    # @brief Instantiate this instance.
66    # @param domain ??
67    # @param mode Mode of the underlying data file.
68    # @param max_size ??
69    # @param recursion ??
70    # @note Prepare the underlying data file if mode starts with 'w'.
71    def __init__(self, domain, 
72                 mode=netcdf_mode_w, max_size=2000000000, recursion=False):
73        from Scientific.IO.NetCDF import NetCDFFile
74
75        self.precision = netcdf_float32 # Use single precision for quantities
76        self.recursion = recursion
77        self.mode = mode
78        if hasattr(domain, 'max_size'):
79            self.max_size = domain.max_size # File size max is 2Gig
80        else:
81            self.max_size = max_size
82        if hasattr(domain, 'minimum_storable_height'):
83            self.minimum_storable_height = domain.minimum_storable_height
84        else:
85            self.minimum_storable_height = default_minimum_storable_height
86
87        # Call parent constructor
88        Data_format.__init__(self, domain, 'sww', mode)
89
90        # Get static and dynamic quantities from domain
91        static_quantities = []
92        dynamic_quantities = []
93       
94        for q in domain.quantities_to_be_stored:
95            flag = domain.quantities_to_be_stored[q]
96       
97            msg = 'Quantity %s is requested to be stored ' % q
98            msg += 'but it does not exist in domain.quantities'
99            assert q in domain.quantities, msg
100       
101            assert flag in [1,2]
102            if flag == 1: static_quantities.append(q)
103            if flag == 2: dynamic_quantities.append(q)               
104                       
105       
106        # NetCDF file definition
107        fid = NetCDFFile(self.filename, mode)
108        if mode[0] == 'w':
109            description = 'Output from anuga.abstract_2d_finite_volumes ' \
110                          'suitable for plotting'
111                         
112            self.writer = Write_sww(static_quantities, dynamic_quantities)
113            self.writer.store_header(fid,
114                                     domain.starttime,
115                                     self.number_of_volumes,
116                                     self.domain.number_of_full_nodes,
117                                     description=description,
118                                     smoothing=domain.smooth,
119                                     order=domain.default_order,
120                                     sww_precision=self.precision)
121
122            # Extra optional information
123            if hasattr(domain, 'texture'):
124                fid.texture = domain.texture
125
126            if domain.quantities_to_be_monitored is not None:
127                fid.createDimension('singleton', 1)
128                fid.createDimension('two', 2)
129
130                poly = domain.monitor_polygon
131                if poly is not None:
132                    N = len(poly)
133                    fid.createDimension('polygon_length', N)
134                    fid.createVariable('extrema.polygon',
135                                       self.precision,
136                                       ('polygon_length', 'two'))
137                    fid.variables['extrema.polygon'][:] = poly
138
139                interval = domain.monitor_time_interval
140                if interval is not None:
141                    fid.createVariable('extrema.time_interval',
142                                       self.precision,
143                                       ('two',))
144                    fid.variables['extrema.time_interval'][:] = interval
145
146                for q in domain.quantities_to_be_monitored:
147                    fid.createVariable(q + '.extrema', self.precision,
148                                       ('numbers_in_range',))
149                    fid.createVariable(q + '.min_location', self.precision,
150                                       ('numbers_in_range',))
151                    fid.createVariable(q + '.max_location', self.precision,
152                                       ('numbers_in_range',))
153                    fid.createVariable(q + '.min_time', self.precision,
154                                       ('singleton',))
155                    fid.createVariable(q + '.max_time', self.precision,
156                                       ('singleton',))
157
158        fid.close()
159
160    ##
161    # @brief Store connectivity data into the underlying data file.
162    def store_connectivity(self):
163        """Store information about nodes, triangles and static quantities
164
165        Writes x,y coordinates of triangles and their connectivity.
166       
167        Store also any quantity that has been identified as static.
168        """
169
170        # FIXME: Change name to reflect the fact thta this function
171        # stores both connectivity (triangulation) and static quantities
172       
173        from Scientific.IO.NetCDF import NetCDFFile
174
175        domain = self.domain
176
177        # append to the NetCDF file
178        fid = NetCDFFile(self.filename, netcdf_mode_a)
179
180        # Get X, Y from one (any) of the quantities
181        Q = domain.quantities.values()[0]
182        X,Y,_,V = Q.get_vertex_values(xy=True, precision=self.precision)
183
184        # store the connectivity data
185        points = num.concatenate((X[:,num.newaxis],Y[:,num.newaxis]), axis=1)
186        self.writer.store_triangulation(fid,
187                                        points,
188                                        V.astype(num.float32),
189                                        points_georeference=\
190                                            domain.geo_reference)
191
192
193        # Get names of static quantities
194        static_quantities = {}
195        for name in self.writer.static_quantities:
196            Q = domain.quantities[name]
197            A, _ = Q.get_vertex_values(xy=False, 
198                                       precision=self.precision)
199            static_quantities[name] = A
200       
201        # Store static quantities       
202        self.writer.store_static_quantities(fid, **static_quantities)
203                                           
204        fid.close()
205
206    ##
207    # @brief Store time and time dependent quantities
208    # to the underlying data file.
209    def store_timestep(self):
210        """Store time and time dependent quantities
211        """
212
213        from Scientific.IO.NetCDF import NetCDFFile
214        import types
215        from time import sleep
216        from os import stat
217
218        # Get NetCDF
219        retries = 0
220        file_open = False
221        while not file_open and retries < 10:
222            try:
223                # Open existing file
224                fid = NetCDFFile(self.filename, netcdf_mode_a)
225            except IOError:
226                # This could happen if someone was reading the file.
227                # In that case, wait a while and try again
228                msg = 'Warning (store_timestep): File %s could not be opened' \
229                      % self.filename
230                msg += ' - trying step %s again' % self.domain.time
231                log.critical(msg)
232                retries += 1
233                sleep(1)
234            else:
235                file_open = True
236
237        if not file_open:
238            msg = 'File %s could not be opened for append' % self.filename
239            raise DataFileNotOpenError, msg
240
241        # Check to see if the file is already too big:
242        time = fid.variables['time']
243        i = len(time) + 1
244        file_size = stat(self.filename)[6]
245        file_size_increase = file_size / i
246        if file_size + file_size_increase > self.max_size * 2**self.recursion:
247            # In order to get the file name and start time correct,
248            # I change the domain.filename and domain.starttime.
249            # This is the only way to do this without changing
250            # other modules (I think).
251
252            # Write a filename addon that won't break the anuga viewers
253            # (10.sww is bad)
254            filename_ext = '_time_%s' % self.domain.time
255            filename_ext = filename_ext.replace('.', '_')
256
257            # Remember the old filename, then give domain a
258            # name with the extension
259            old_domain_filename = self.domain.get_name()
260            if not self.recursion:
261                self.domain.set_name(old_domain_filename + filename_ext)
262
263            # Temporarily change the domain starttime to the current time
264            old_domain_starttime = self.domain.starttime
265            self.domain.starttime = self.domain.get_time()
266
267            # Build a new data_structure.
268            next_data_structure = SWW_file(self.domain, mode=self.mode,
269                                           max_size=self.max_size,
270                                           recursion=self.recursion+1)
271            if not self.recursion:
272                log.critical('    file_size = %s' % file_size)
273                log.critical('    saving file to %s'
274                             % next_data_structure.filename) 
275
276            # Set up the new data_structure
277            self.domain.writer = next_data_structure
278
279            # Store connectivity and first timestep
280            next_data_structure.store_connectivity()
281            next_data_structure.store_timestep()
282            fid.sync()
283            fid.close()
284
285            # Restore the old starttime and filename
286            self.domain.starttime = old_domain_starttime
287            self.domain.set_name(old_domain_filename)
288        else:
289            self.recursion = False
290            domain = self.domain
291
292            # Get the variables
293            time = fid.variables['time']
294            i = len(time)
295             
296            if 'stage' in self.writer.dynamic_quantities:           
297                # Select only those values for stage,
298                # xmomentum and ymomentum (if stored) where
299                # depth exceeds minimum_storable_height
300                #
301                # In this branch it is assumed that elevation
302                # is also available as a quantity           
303           
304                Q = domain.quantities['stage']
305                w, _ = Q.get_vertex_values(xy=False)
306               
307                Q = domain.quantities['elevation']
308                z, _ = Q.get_vertex_values(xy=False)               
309               
310                storable_indices = (w-z >= self.minimum_storable_height)
311            else:
312                # Very unlikely branch
313                storable_indices = None # This means take all
314           
315           
316            # Now store dynamic quantities
317            dynamic_quantities = {}
318            for name in self.writer.dynamic_quantities:
319                netcdf_array = fid.variables[name]
320               
321                Q = domain.quantities[name]
322                A, _ = Q.get_vertex_values(xy=False,
323                                           precision=self.precision)
324
325                if storable_indices is not None:
326                    if name == 'stage':
327                        A = num.choose(storable_indices, (z, A))
328
329                    if name in ['xmomentum', 'ymomentum']:
330                        # Get xmomentum where depth exceeds
331                        # minimum_storable_height
332                       
333                        # Define a zero vector of same size and type as A
334                        # for use with momenta
335                        null = num.zeros(num.size(A), A.dtype.char)
336                        A = num.choose(storable_indices, (null, A))
337               
338                dynamic_quantities[name] = A
339               
340                                       
341            # Store dynamic quantities
342            self.writer.store_quantities(fid,
343                                         time=self.domain.time,
344                                         sww_precision=self.precision,
345                                         **dynamic_quantities)
346
347
348            # Update extrema if requested
349            domain = self.domain
350            if domain.quantities_to_be_monitored is not None:
351                for q, info in domain.quantities_to_be_monitored.items():
352                    if info['min'] is not None:
353                        fid.variables[q + '.extrema'][0] = info['min']
354                        fid.variables[q + '.min_location'][:] = \
355                                        info['min_location']
356                        fid.variables[q + '.min_time'][0] = info['min_time']
357
358                    if info['max'] is not None:
359                        fid.variables[q + '.extrema'][1] = info['max']
360                        fid.variables[q + '.max_location'][:] = \
361                                        info['max_location']
362                        fid.variables[q + '.max_time'][0] = info['max_time']
363
364            # Flush and close
365            fid.sync()
366            fid.close()
367
368
369##
370# @brief Class to open an sww file so that domain can be populated with quantity values
371class Read_sww:
372
373    def __init__(self, source):
374        """The source parameter is assumed to be a NetCDF sww file.
375        """
376
377        self.source = source
378
379        self.frame_number = 0
380
381        fin = NetCDFFile(self.source, 'r')
382
383        self.time = num.array(fin.variables['time'], num.float)
384        self.last_frame_number = self.time.shape[0] - 1
385
386        self.frames = num.arange(self.last_frame_number+1)
387
388        fin.close()
389       
390        self.read_mesh()
391
392        self.quantities = {}
393
394        self.read_quantities()
395
396
397    def read_mesh(self):
398        """ Read and store the mesh data contained within this sww file.
399        """
400        fin = NetCDFFile(self.source, 'r')
401
402        self.vertices = num.array(fin.variables['volumes'], num.int)
403       
404        self.x = x = num.array(fin.variables['x'], num.float)
405        self.y = y = num.array(fin.variables['y'], num.float)
406
407        assert len(self.x) == len(self.y)
408       
409        self.xmin = num.min(x)
410        self.xmax = num.max(x)
411        self.ymin = num.min(y)
412        self.ymax = num.max(y)
413
414
415        fin.close()
416       
417    def read_quantities(self, frame_number=0):
418        """
419        Read the quantities contained in this file.
420        frame_number is the time index to load.
421        """
422        assert frame_number >= 0 and frame_number <= self.last_frame_number
423
424        self.frame_number = frame_number
425
426        M = len(self.x)/3
427       
428        fin = NetCDFFile(self.source, 'r')
429       
430        for q in filter(lambda n:n != 'x' and n != 'y' and n != 'time' and n != 'volumes' and \
431                        '_range' not in n, \
432                        fin.variables.keys()):
433            if len(fin.variables[q].shape) == 1: # Not a time-varying quantity
434                self.quantities[q] = num.ravel(num.array(fin.variables[q], num.float)).reshape(M,3)
435            else: # Time-varying, get the current timestep data
436                self.quantities[q] = num.array(fin.variables[q][self.frame_number], num.float).reshape(M,3)
437        fin.close()
438        return self.quantities
439
440    def get_bounds(self):
441        """
442            Get the bounding rect around the mesh.
443        """
444        return [self.xmin, self.xmax, self.ymin, self.ymax]
445
446    def get_last_frame_number(self):
447        """
448            Return the last loaded frame index.
449        """
450        return self.last_frame_number
451
452    def get_time(self):
453        """
454            Get time at the current frame num, in secs.
455        """
456        return self.time[self.frame_number]
457
458
459class Write_sww(Write_sts):
460    """
461        A class to write an SWW file.
462       
463        It is domain agnostic, and requires all the data to be fed in
464        manually.
465    """
466
467    def __init__(self, static_quantities, dynamic_quantities):
468        """Initialise Write_sww with two list af quantity names:
469       
470        static_quantities (e.g. elevation or friction):
471            Stored once at the beginning of the simulation in a 1D array
472            of length number_of_points   
473        dynamic_quantities (e.g stage):
474            Stored every timestep in a 2D array with
475            dimensions number_of_points X number_of_timesteps       
476       
477        """
478        self.static_quantities = static_quantities   
479        self.dynamic_quantities = dynamic_quantities
480
481
482    def store_header(self,
483                     outfile,
484                     times,
485                     number_of_volumes,
486                     number_of_points,
487                     description='Generated by ANUGA',
488                     smoothing=True,
489                     order=1,
490                     sww_precision=netcdf_float32,
491                     verbose=False):
492        """Write an SWW file header.
493
494        Writes the first section of the .sww file.
495
496        outfile - the open file that will be written
497        times - A list of the time slice times OR a start time
498        Note, if a list is given the info will be made relative.
499        number_of_volumes - the number of triangles
500        number_of_points - the number of vertices in the mesh
501        """
502
503        from anuga.abstract_2d_finite_volumes.util \
504            import get_revision_number
505
506        outfile.institution = 'Geoscience Australia'
507        outfile.description = description
508
509        # For sww compatibility
510        if smoothing is True:
511            # Smoothing to be depreciated
512            outfile.smoothing = 'Yes'
513            outfile.vertices_are_stored_uniquely = 'False'
514        else:
515            # Smoothing to be depreciated
516            outfile.smoothing = 'No'
517            outfile.vertices_are_stored_uniquely = 'True'
518        outfile.order = order
519
520        try:
521            revision_number = get_revision_number()
522        except:
523            # This will be triggered if the system cannot get the SVN
524            # revision number.
525            revision_number = None
526        # Allow None to be stored as a string
527        outfile.revision_number = str(revision_number)
528
529        # This is being used to seperate one number from a list.
530        # what it is actually doing is sorting lists from numeric arrays.
531        if isinstance(times, (list, num.ndarray)):
532            number_of_times = len(times)
533            times = ensure_numeric(times)
534            if number_of_times == 0:
535                starttime = 0
536            else:
537                starttime = times[0]
538                times = times - starttime  #Store relative times
539        else:
540            number_of_times = 0
541            starttime = times
542
543
544        outfile.starttime = starttime
545
546        # dimension definitions
547        outfile.createDimension('number_of_volumes', number_of_volumes)
548        outfile.createDimension('number_of_vertices', 3)
549        outfile.createDimension('numbers_in_range', 2)
550
551        if smoothing is True:
552            outfile.createDimension('number_of_points', number_of_points)
553            # FIXME(Ole): This will cause sww files for parallel domains to
554            # have ghost nodes stored (but not used by triangles).
555            # To clean this up, we have to change get_vertex_values and
556            # friends in quantity.py (but I can't be bothered right now)
557        else:
558            outfile.createDimension('number_of_points', 3*number_of_volumes)
559
560        outfile.createDimension('number_of_timesteps', number_of_times)
561
562        # variable definitions
563        outfile.createVariable('x', sww_precision, ('number_of_points',))
564        outfile.createVariable('y', sww_precision, ('number_of_points',))
565
566        outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',
567                                                       'number_of_vertices'))
568
569
570        for q in self.static_quantities:
571           
572            outfile.createVariable(q, sww_precision,
573                                   ('number_of_points',))
574           
575            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
576                                   ('numbers_in_range',))
577                                   
578            # Initialise ranges with small and large sentinels.
579            # If this was in pure Python we could have used None sensibly
580            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
581            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
582
583       
584        self.write_dynamic_quantities(outfile, self.dynamic_quantities, times)
585
586
587
588    def store_triangulation(self,
589                            outfile,
590                            points_utm,
591                            volumes,
592                            zone=None, 
593                            new_origin=None,
594                            points_georeference=None, 
595                            verbose=False):
596        """
597        Store triangulation data in the underlying file.
598       
599        Stores the points and triangle indices in the sww file
600       
601        outfile Open handle to underlying file.
602
603        new_origin georeference that the points can be set to.
604
605        points_georeference The georeference of the points_utm.
606
607        verbose True if this function is to be verbose.
608
609        new_origin - qa georeference that the points can be set to. (Maybe
610        do this before calling this function.)
611
612        points_utm - currently a list or array of the points in UTM.
613        points_georeference - the georeference of the points_utm
614
615        How about passing new_origin and current_origin.
616        If you get both, do a convertion from the old to the new.
617
618        If you only get new_origin, the points are absolute,
619        convert to relative
620
621        if you only get the current_origin the points are relative, store
622        as relative.
623
624        if you get no georefs create a new georef based on the minimums of
625        points_utm.  (Another option would be to default to absolute)
626
627        Yes, and this is done in another part of the code.
628        Probably geospatial.
629
630        If you don't supply either geo_refs, then supply a zone. If not
631        the default zone will be used.
632
633        precon:
634            header has been called.
635        """
636
637        number_of_points = len(points_utm)
638        volumes = num.array(volumes)
639        points_utm = num.array(points_utm)
640
641        # Given the two geo_refs and the points, do the stuff
642        # described in the method header
643        # if this is needed else where, pull out as a function
644        points_georeference = ensure_geo_reference(points_georeference)
645        new_origin = ensure_geo_reference(new_origin)
646        if new_origin is None and points_georeference is not None:
647            points = points_utm
648            geo_ref = points_georeference
649        else:
650            if new_origin is None:
651                new_origin = Geo_reference(zone, min(points_utm[:,0]),
652                                                 min(points_utm[:,1]))
653            points = new_origin.change_points_geo_ref(points_utm,
654                                                      points_georeference)
655            geo_ref = new_origin
656
657        # At this stage I need a georef and points
658        # the points are relative to the georef
659        geo_ref.write_NetCDF(outfile)
660
661        # This will put the geo ref in the middle
662        #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
663
664        x =  points[:,0]
665        y =  points[:,1]
666
667        if verbose:
668            log.critical('------------------------------------------------')
669            log.critical('More Statistics:')
670            log.critical('  Extent (/lon):')
671            log.critical('    x in [%f, %f], len(lat) == %d'
672                         % (min(x), max(x), len(x)))
673            log.critical('    y in [%f, %f], len(lon) == %d'
674                         % (min(y), max(y), len(y)))
675            #log.critical('    z in [%f, %f], len(z) == %d'
676            #             % (min(elevation), max(elevation), len(elevation)))
677            log.critical('geo_ref: %s' % str(geo_ref))
678            log.critical('------------------------------------------------')
679
680        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
681        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
682        outfile.variables['volumes'][:] = volumes.astype(num.int32) #On Opteron 64
683
684
685    def store_static_quantities(self, 
686                                outfile, 
687                                sww_precision=num.float32,
688                                verbose=False, 
689                                **quant):
690        """
691        Write the static quantity info.
692
693        **quant is extra keyword arguments passed in. These must be
694          the numpy arrays to be stored in the sww file at each timestep.
695
696        The argument sww_precision allows for storing as either
697        * single precision (default): num.float32
698        * double precision: num.float64 or num.float
699
700        Precondition:
701            store_triangulation and
702            store_header have been called.
703        """
704
705        # The dictionary quant must contain numpy arrays for each name.
706        # These will typically be quantities from Domain such as friction
707        #
708        # Arrays not listed in static_quantitiues will be ignored, silently.
709        #
710        # This method will also write the ranges for each quantity,
711        # e.g. stage_range, xmomentum_range and ymomentum_range
712        for q in self.static_quantities:
713            if not quant.has_key(q):
714                msg = 'Values for quantity %s was not specified in ' % q
715                msg += 'store_quantities so they cannot be stored.'
716                raise NewQuantity, msg
717            else:
718                q_values = ensure_numeric(quant[q])
719               
720                x = q_values.astype(sww_precision)
721                outfile.variables[q][:] = x
722       
723                # This populates the _range values
724                outfile.variables[q + Write_sww.RANGE][0] = num.min(x)
725                outfile.variables[q + Write_sww.RANGE][1] = num.max(x)
726                   
727        # FIXME: Hack for backwards compatibility with old viewer
728        #if 'elevation' in self.static_quantities:
729        #    outfile.variables['z'][:] = outfile.variables['elevation'][:]
730
731                   
732                   
733       
734       
735    def store_quantities(self, 
736                         outfile, 
737                         sww_precision=num.float32,
738                         slice_index=None,
739                         time=None,
740                         verbose=False, 
741                         **quant):
742        """
743        Write the quantity info at each timestep.
744
745        **quant is extra keyword arguments passed in. These must be
746          the numpy arrays to be stored in the sww file at each timestep.
747
748        if the time array is already been built, use the slice_index
749        to specify the index.
750
751        Otherwise, use time to increase the time dimension
752
753        Maybe make this general, but the viewer assumes these quantities,
754        so maybe we don't want it general - unless the viewer is general
755       
756        The argument sww_precision allows for storing as either
757        * single precision (default): num.float32
758        * double precision: num.float64 or num.float
759
760        Precondition:
761            store_triangulation and
762            store_header have been called.
763        """
764
765        if time is not None:
766            file_time = outfile.variables['time']
767            slice_index = len(file_time)
768            file_time[slice_index] = time
769        else:
770            slice_index = int(slice_index) # Has to be cast in case it was numpy.int   
771
772        # Write the named dynamic quantities
773        # The dictionary quant must contain numpy arrays for each name.
774        # These will typically be the conserved quantities from Domain
775        # (Typically stage,  xmomentum, ymomentum).
776        #
777        # Arrays not listed in dynamic_quantitiues will be ignored, silently.
778        #
779        # This method will also write the ranges for each quantity,
780        # e.g. stage_range, xmomentum_range and ymomentum_range
781        for q in self.dynamic_quantities:
782            if not quant.has_key(q):
783                msg = 'Values for quantity %s was not specified in ' % q
784                msg += 'store_quantities so they cannot be stored.'
785                raise NewQuantity, msg
786            else:
787                q_values = ensure_numeric(quant[q])
788               
789                x = q_values.astype(sww_precision)
790                outfile.variables[q][slice_index] = x
791                   
792       
793                # This updates the _range values
794                q_range = outfile.variables[q + Write_sww.RANGE][:]
795                q_values_min = num.min(q_values)
796                if q_values_min < q_range[0]:
797                    outfile.variables[q + Write_sww.RANGE][0] = q_values_min
798                q_values_max = num.max(q_values)
799                if q_values_max > q_range[1]:
800                    outfile.variables[q + Write_sww.RANGE][1] = q_values_max
801
802    ##
803    # @brief Print the quantities in the underlying file.
804    # @param outfile UNUSED.
805    def verbose_quantities(self, outfile):
806        log.critical('------------------------------------------------')
807        log.critical('More Statistics:')
808        for q in self.dynamic_quantities:
809            log.critical(%s in [%f, %f]'
810                         % (q, outfile.variables[q+Write_sww.RANGE][0],
811                            outfile.variables[q+Write_sww.RANGE][1]))
812        log.critical('------------------------------------------------')
813
814
815
816
817def extent_sww(file_name):
818    """Read in an sww file, then get its extents
819
820    Input:
821    file_name - the sww file
822
823    Output:
824    A list: [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
825    """
826
827    from Scientific.IO.NetCDF import NetCDFFile
828
829    #Get NetCDF
830    fid = NetCDFFile(file_name, netcdf_mode_r)
831
832    # Get the variables
833    x = fid.variables['x'][:]
834    y = fid.variables['y'][:]
835    stage = fid.variables['stage'][:]
836
837    fid.close()
838
839    return [min(x), max(x), min(y), max(y), num.min(stage), num.max(stage)]
840
841
842def load_sww_as_domain(filename, boundary=None, t=None,
843               fail_if_NaN=True, NaN_filler=0,
844               verbose=False, very_verbose=False):
845    """
846    Load an sww file into a domain.
847
848    Usage: domain = load_sww_as_domain('file.sww',
849                        t=time (default = last time in file))
850
851    Boundary is not recommended if domain.smooth is not selected, as it
852    uses unique coordinates, but not unique boundaries. This means that
853    the boundary file will not be compatable with the coordinates, and will
854    give a different final boundary, or crash.
855    """
856   
857    from Scientific.IO.NetCDF import NetCDFFile
858    from anuga.shallow_water.shallow_water_domain import Domain
859
860    # initialise NaN.
861    NaN = 9.969209968386869e+036
862
863    if verbose: log.critical('Reading from %s' % filename)
864
865    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
866    time = fid.variables['time']       # Timesteps
867    if t is None:
868        t = time[-1]
869    time_interp = get_time_interp(time,t)
870
871    # Get the variables as numeric arrays
872    x = fid.variables['x'][:]                   # x-coordinates of vertices
873    y = fid.variables['y'][:]                   # y-coordinates of vertices
874    elevation = fid.variables['elevation']      # Elevation
875    stage = fid.variables['stage']              # Water level
876    xmomentum = fid.variables['xmomentum']      # Momentum in the x-direction
877    ymomentum = fid.variables['ymomentum']      # Momentum in the y-direction
878
879    starttime = fid.starttime[0]
880    volumes = fid.variables['volumes'][:]       # Connectivity
881    coordinates = num.transpose(num.asarray([x.tolist(), y.tolist()]))
882    # FIXME (Ole): Something like this might be better:
883    #                 concatenate((x, y), axis=1)
884    # or              concatenate((x[:,num.newaxis], x[:,num.newaxis]), axis=1)
885
886    conserved_quantities = []
887    interpolated_quantities = {}
888    other_quantities = []
889
890    # get geo_reference
891    try:                             # sww files don't have to have a geo_ref
892        geo_reference = Geo_reference(NetCDFObject=fid)
893    except: # AttributeError, e:
894        geo_reference = None
895
896    if verbose: log.critical('    getting quantities')
897
898    for quantity in fid.variables.keys():
899        dimensions = fid.variables[quantity].dimensions
900        if 'number_of_timesteps' in dimensions:
901            conserved_quantities.append(quantity)
902            interpolated_quantities[quantity] = \
903                  interpolated_quantity(fid.variables[quantity][:], time_interp)
904        else:
905            other_quantities.append(quantity)
906
907    other_quantities.remove('x')
908    other_quantities.remove('y')
909    #other_quantities.remove('z')
910    other_quantities.remove('volumes')
911    try:
912        other_quantities.remove('stage_range')
913        other_quantities.remove('xmomentum_range')
914        other_quantities.remove('ymomentum_range')
915        other_quantities.remove('elevation_range')
916    except:
917        pass
918
919    conserved_quantities.remove('time')
920
921    if verbose: log.critical('    building domain')
922
923    #    From domain.Domain:
924    #    domain = Domain(coordinates, volumes,\
925    #                    conserved_quantities = conserved_quantities,\
926    #                    other_quantities = other_quantities,zone=zone,\
927    #                    xllcorner=xllcorner, yllcorner=yllcorner)
928
929    # From shallow_water.Domain:
930    coordinates = coordinates.tolist()
931    volumes = volumes.tolist()
932    # FIXME:should this be in mesh? (peter row)
933    if fid.smoothing == 'Yes':
934        unique = False
935    else:
936        unique = True
937    if unique:
938        coordinates, volumes, boundary = weed(coordinates, volumes,boundary)
939
940     
941   
942    try:
943        domain = Domain(coordinates, volumes, boundary)
944    except AssertionError, e:
945        fid.close()
946        msg = 'Domain could not be created: %s. ' \
947              'Perhaps use "fail_if_NaN=False and NaN_filler = ..."' % e
948        raise DataDomainError, msg
949
950    if not boundary is None:
951        domain.boundary = boundary
952
953    domain.geo_reference = geo_reference
954
955    domain.starttime = float(starttime) + float(t)
956    domain.time = 0.0
957
958    for quantity in other_quantities:
959        try:
960            NaN = fid.variables[quantity].missing_value
961        except:
962            pass                       # quantity has no missing_value number
963        X = fid.variables[quantity][:]
964        if very_verbose:
965            log.critical('       %s' % str(quantity))
966            log.critical('        NaN = %s' % str(NaN))
967            log.critical('        max(X)')
968            log.critical('       %s' % str(max(X)))
969            log.critical('        max(X)==NaN')
970            log.critical('       %s' % str(max(X)==NaN))
971            log.critical('')
972        if max(X) == NaN or min(X) == NaN:
973            if fail_if_NaN:
974                msg = 'quantity "%s" contains no_data entry' % quantity
975                raise DataMissingValuesError, msg
976            else:
977                data = (X != NaN)
978                X = (X*data) + (data==0)*NaN_filler
979        if unique:
980            X = num.resize(X, (len(X)/3, 3))
981        domain.set_quantity(quantity, X)
982    #
983    for quantity in conserved_quantities:
984        try:
985            NaN = fid.variables[quantity].missing_value
986        except:
987            pass                       # quantity has no missing_value number
988        X = interpolated_quantities[quantity]
989        if very_verbose:
990            log.critical('       %s' % str(quantity))
991            log.critical('        NaN = %s' % str(NaN))
992            log.critical('        max(X)')
993            log.critical('       %s' % str(max(X)))
994            log.critical('        max(X)==NaN')
995            log.critical('       %s' % str(max(X)==NaN))
996            log.critical('')
997        if max(X) == NaN or min(X) == NaN:
998            if fail_if_NaN:
999                msg = 'quantity "%s" contains no_data entry' % quantity
1000                raise DataMissingValuesError, msg
1001            else:
1002                data = (X != NaN)
1003                X = (X*data) + (data==0)*NaN_filler
1004        if unique:
1005            X = num.resize(X, (X.shape[0]/3, 3))
1006        domain.set_quantity(quantity, X)
1007
1008    fid.close()
1009
1010    return domain
1011
1012
1013def get_mesh_and_quantities_from_file(filename,
1014                                      quantities=None,
1015                                      verbose=False):
1016    """Get and rebuild mesh structure and associated quantities from sww file
1017
1018    Input:
1019        filename - Name os sww file
1020        quantities - Names of quantities to load
1021
1022    Output:
1023        mesh - instance of class Interpolate
1024               (including mesh and interpolation functionality)
1025        quantities - arrays with quantity values at each mesh node
1026        time - vector of stored timesteps
1027
1028    This function is used by e.g.:
1029        get_interpolated_quantities_at_polyline_midpoints
1030    """
1031
1032    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
1033
1034    import types
1035    from Scientific.IO.NetCDF import NetCDFFile
1036    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
1037
1038    if verbose: log.critical('Reading from %s' % filename)
1039
1040    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1041    time = fid.variables['time'][:]    # Time vector
1042    time += fid.starttime[0]
1043
1044    # Get the variables as numeric arrays
1045    x = fid.variables['x'][:]                   # x-coordinates of nodes
1046    y = fid.variables['y'][:]                   # y-coordinates of nodes
1047    elevation = fid.variables['elevation'][:]   # Elevation
1048    stage = fid.variables['stage'][:]           # Water level
1049    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
1050    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
1051
1052    # Mesh (nodes (Mx2), triangles (Nx3))
1053    nodes = num.concatenate((x[:,num.newaxis], y[:,num.newaxis]), axis=1)
1054    triangles = fid.variables['volumes'][:]
1055
1056    # Get geo_reference
1057    try:
1058        geo_reference = Geo_reference(NetCDFObject=fid)
1059    except: #AttributeError, e:
1060        # Sww files don't have to have a geo_ref
1061        geo_reference = None
1062
1063    if verbose: log.critical('    building mesh from sww file %s' % filename)
1064
1065    boundary = None
1066
1067    #FIXME (Peter Row): Should this be in mesh?
1068    if fid.smoothing != 'Yes':
1069        nodes = nodes.tolist()
1070        triangles = triangles.tolist()
1071        nodes, triangles, boundary = weed(nodes, triangles, boundary)
1072
1073    try:
1074        mesh = Mesh(nodes, triangles, boundary, geo_reference=geo_reference)
1075    except AssertionError, e:
1076        fid.close()
1077        msg = 'Domain could not be created: %s. "' % e
1078        raise DataDomainError, msg
1079
1080    quantities = {}
1081    quantities['elevation'] = elevation
1082    quantities['stage'] = stage
1083    quantities['xmomentum'] = xmomentum
1084    quantities['ymomentum'] = ymomentum
1085
1086    fid.close()
1087
1088    return mesh, quantities, time
1089
1090
1091def get_time_interp(time, t=None):
1092    """Finds the ratio and index for time interpolation.
1093        time is an array of time steps
1094        t is the sample time.
1095        returns a tuple containing index into time, and ratio
1096    """
1097    if t is None:
1098        t=time[-1]
1099        index = -1
1100        ratio = 0.
1101    else:
1102        T = time
1103        tau = t
1104        index=0
1105        msg = 'Time interval derived from file %s [%s:%s]' \
1106              % ('FIXMEfilename', T[0], T[-1])
1107        msg += ' does not match model time: %s' % tau
1108        if tau < time[0]: raise DataTimeError, msg
1109        if tau > time[-1]: raise DataTimeError, msg
1110        while tau > time[index]: index += 1
1111        while tau < time[index]: index -= 1
1112        if tau == time[index]:
1113            #Protect against case where tau == time[-1] (last time)
1114            # - also works in general when tau == time[i]
1115            ratio = 0
1116        else:
1117            #t is now between index and index+1
1118            ratio = (tau - time[index])/(time[index+1] - time[index])
1119
1120    return (index, ratio)
1121
1122
1123
1124##
1125# @brief Interpolate a quantity wrt time.
1126# @param saved_quantity The quantity to interpolate.
1127# @param time_interp (index, ratio)
1128# @return A vector of interpolated values.
1129def interpolated_quantity(saved_quantity, time_interp):
1130    '''Given an index and ratio, interpolate quantity with respect to time.'''
1131
1132    index, ratio = time_interp
1133
1134    Q = saved_quantity
1135
1136    if ratio > 0:
1137        q = (1-ratio)*Q[index] + ratio*Q[index+1]
1138    else:
1139        q = Q[index]
1140
1141    #Return vector of interpolated values
1142    return q
1143
1144
1145def weed(coordinates, volumes, boundary=None):
1146    """ Excise all duplicate points.
1147    """
1148    if isinstance(coordinates, num.ndarray):
1149        coordinates = coordinates.tolist()
1150    if isinstance(volumes, num.ndarray):
1151        volumes = volumes.tolist()
1152
1153    unique = False
1154    point_dict = {}
1155    same_point = {}
1156    for i in range(len(coordinates)):
1157        point = tuple(coordinates[i])
1158        if point_dict.has_key(point):
1159            unique = True
1160            same_point[i] = point
1161            #to change all point i references to point j
1162        else:
1163            point_dict[point] = i
1164            same_point[i] = point
1165
1166    coordinates = []
1167    i = 0
1168    for point in point_dict.keys():
1169        point = tuple(point)
1170        coordinates.append(list(point))
1171        point_dict[point] = i
1172        i += 1
1173
1174    for volume in volumes:
1175        for i in range(len(volume)):
1176            index = volume[i]
1177            if index > -1:
1178                volume[i] = point_dict[same_point[index]]
1179
1180    new_boundary = {}
1181    if not boundary is None:
1182        for segment in boundary.keys():
1183            point0 = point_dict[same_point[segment[0]]]
1184            point1 = point_dict[same_point[segment[1]]]
1185            label = boundary[segment]
1186            #FIXME should the bounday attributes be concaterated
1187            #('exterior, pond') or replaced ('pond')(peter row)
1188
1189            if new_boundary.has_key((point0, point1)):
1190                new_boundary[(point0,point1)] = new_boundary[(point0, point1)]
1191
1192            elif new_boundary.has_key((point1, point0)):
1193                new_boundary[(point1,point0)] = new_boundary[(point1, point0)]
1194            else: new_boundary[(point0, point1)] = label
1195
1196        boundary = new_boundary
1197
1198    return coordinates, volumes, boundary
1199
1200
1201
Note: See TracBrowser for help on using the repository browser.