source: trunk/anuga_core/source/anuga/file/sww.py @ 8278

Last change on this file since 8278 was 8278, checked in by steve, 12 years ago

sww_merge now saves as float32

File size: 42.7 KB
Line 
1""" Classes to read an SWW file.
2"""
3
4import exceptions
5class DataFileNotOpenError(exceptions.Exception): pass
6class DataMissingValuesError(exceptions.Exception): pass
7class NewQuantity(exceptions.Exception): pass
8class DataDomainError(exceptions.Exception): pass
9class DataMissingValuesError(exceptions.Exception): pass
10class DataTimeError(exceptions.Exception): pass
11
12
13from anuga.coordinate_transforms.geo_reference import Geo_reference
14from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
15from anuga.config import netcdf_float, netcdf_float32, netcdf_int
16from anuga.config import max_float
17from anuga.utilities.numerical_tools import ensure_numeric
18import anuga.utilities.log as log
19from Scientific.IO.NetCDF import NetCDFFile
20
21from sts import Write_sts
22
23from anuga.coordinate_transforms.geo_reference import \
24        ensure_geo_reference
25
26from file_utils import create_filename
27import numpy as num
28
29class Data_format:
30    """Generic interface to data formats
31    """
32
33    def __init__(self, domain, extension, mode=netcdf_mode_w):
34        assert mode[0] in ['r', 'w', 'a'], \
35               "Mode %s must be either:\n" % mode + \
36               "   'w' (write)\n" + \
37               "   'r' (read)\n" + \
38               "   'a' (append)"
39
40        #Create filename
41        self.filename = create_filename(domain.get_datadir(),
42                                        domain.get_name(), extension)
43
44        self.timestep = 0
45        self.domain = domain
46
47        # Probably should exclude ghosts in case this is a parallel domain
48       
49        self.number_of_nodes   = domain.number_of_nodes
50        self.number_of_volumes = domain.number_of_triangles
51        #self.number_of_volumes = len(domain)
52
53        #FIXME: Should we have a general set_precision function?
54
55
56class SWW_file(Data_format):
57    """Interface to native NetCDF format (.sww) for storing model output
58
59    There are two kinds of data
60
61    1: Constant data: Vertex coordinates and field values. Stored once
62    2: Variable data: Conserved quantities. Stored once per timestep.
63
64    All data is assumed to reside at vertex locations.
65    """
66
67    def __init__(self, domain, 
68                 mode=netcdf_mode_w, max_size=2000000000, recursion=False):
69        from Scientific.IO.NetCDF import NetCDFFile
70
71        self.precision = netcdf_float32 # Use single precision for quantities
72        self.recursion = recursion
73        self.mode = mode
74        if hasattr(domain, 'max_size'):
75            self.max_size = domain.max_size # File size max is 2Gig
76        else:
77            self.max_size = max_size
78        if hasattr(domain, 'minimum_storable_height'):
79            self.minimum_storable_height = domain.minimum_storable_height
80        else:
81            self.minimum_storable_height = default_minimum_storable_height
82
83        # Call parent constructor
84        Data_format.__init__(self, domain, 'sww', mode)
85
86        # Get static and dynamic quantities from domain
87        static_quantities = []
88        dynamic_quantities = []
89       
90        for q in domain.quantities_to_be_stored:
91            flag = domain.quantities_to_be_stored[q]
92       
93            msg = 'Quantity %s is requested to be stored ' % q
94            msg += 'but it does not exist in domain.quantities'
95            assert q in domain.quantities, msg
96       
97            assert flag in [1,2]
98            if flag == 1: static_quantities.append(q)
99            if flag == 2: dynamic_quantities.append(q)               
100                       
101       
102        # NetCDF file definition
103        fid = NetCDFFile(self.filename, mode)
104        if mode[0] == 'w':
105            description = 'Output from anuga.abstract_2d_finite_volumes ' \
106                          'suitable for plotting'
107                         
108            self.writer = Write_sww(static_quantities, dynamic_quantities)
109            self.writer.store_header(fid,
110                                     domain.starttime,
111                                     self.number_of_volumes,
112                                     self.domain.number_of_nodes,
113                                     description=description,
114                                     smoothing=domain.smooth,
115                                     order=domain.default_order,
116                                     sww_precision=self.precision)
117
118            # Extra optional information
119            if hasattr(domain, 'texture'):
120                fid.texture = domain.texture
121
122            if domain.quantities_to_be_monitored is not None:
123                fid.createDimension('singleton', 1)
124                fid.createDimension('two', 2)
125
126                poly = domain.monitor_polygon
127                if poly is not None:
128                    N = len(poly)
129                    fid.createDimension('polygon_length', N)
130                    fid.createVariable('extrema.polygon',
131                                       self.precision,
132                                       ('polygon_length', 'two'))
133                    fid.variables['extrema.polygon'][:] = poly
134
135                interval = domain.monitor_time_interval
136                if interval is not None:
137                    fid.createVariable('extrema.time_interval',
138                                       self.precision,
139                                       ('two',))
140                    fid.variables['extrema.time_interval'][:] = interval
141
142                for q in domain.quantities_to_be_monitored:
143                    fid.createVariable(q + '.extrema', self.precision,
144                                       ('numbers_in_range',))
145                    fid.createVariable(q + '.min_location', self.precision,
146                                       ('numbers_in_range',))
147                    fid.createVariable(q + '.max_location', self.precision,
148                                       ('numbers_in_range',))
149                    fid.createVariable(q + '.min_time', self.precision,
150                                       ('singleton',))
151                    fid.createVariable(q + '.max_time', self.precision,
152                                       ('singleton',))
153
154        fid.close()
155
156    def store_connectivity(self):
157        """Store information about nodes, triangles and static quantities
158
159        Writes x,y coordinates of triangles and their connectivity.
160       
161        Store also any quantity that has been identified as static.
162        """
163
164        # FIXME: Change name to reflect the fact thta this function
165        # stores both connectivity (triangulation) and static quantities
166       
167        from Scientific.IO.NetCDF import NetCDFFile
168
169        domain = self.domain
170
171        # append to the NetCDF file
172        fid = NetCDFFile(self.filename, netcdf_mode_a)
173
174        # Get X, Y from one (any) of the quantities
175        Q = domain.quantities.values()[0]
176        X,Y,_,V = Q.get_vertex_values(xy=True, precision=self.precision)
177
178        # store the connectivity data
179        points = num.concatenate((X[:,num.newaxis],Y[:,num.newaxis]), axis=1)
180        self.writer.store_triangulation(fid,
181                                        points,
182                                        V.astype(num.float32),
183                                        points_georeference=\
184                                            domain.geo_reference)
185
186
187        # Get names of static quantities
188        static_quantities = {}
189        for name in self.writer.static_quantities:
190            Q = domain.quantities[name]
191            A, _ = Q.get_vertex_values(xy=False, 
192                                       precision=self.precision)
193            static_quantities[name] = A
194       
195        # Store static quantities       
196        self.writer.store_static_quantities(fid, **static_quantities)
197                                           
198        fid.close()
199
200    def store_timestep(self):
201        """Store time and time dependent quantities
202        """
203
204        from Scientific.IO.NetCDF import NetCDFFile
205        import types
206        from time import sleep
207        from os import stat
208
209        # Get NetCDF
210        retries = 0
211        file_open = False
212        while not file_open and retries < 10:
213            try:
214                # Open existing file
215                fid = NetCDFFile(self.filename, netcdf_mode_a)
216            except IOError:
217                # This could happen if someone was reading the file.
218                # In that case, wait a while and try again
219                msg = 'Warning (store_timestep): File %s could not be opened' \
220                      % self.filename
221                msg += ' - trying step %s again' % self.domain.time
222                log.critical(msg)
223                retries += 1
224                sleep(1)
225            else:
226                file_open = True
227
228        if not file_open:
229            msg = 'File %s could not be opened for append' % self.filename
230            raise DataFileNotOpenError, msg
231
232        # Check to see if the file is already too big:
233        time = fid.variables['time']
234        i = len(time) + 1
235        file_size = stat(self.filename)[6]
236        file_size_increase = file_size / i
237        if file_size + file_size_increase > self.max_size * 2**self.recursion:
238            # In order to get the file name and start time correct,
239            # I change the domain.filename and domain.starttime.
240            # This is the only way to do this without changing
241            # other modules (I think).
242
243            # Write a filename addon that won't break the anuga viewers
244            # (10.sww is bad)
245            filename_ext = '_time_%s' % self.domain.time
246            filename_ext = filename_ext.replace('.', '_')
247
248            # Remember the old filename, then give domain a
249            # name with the extension
250            old_domain_filename = self.domain.get_name()
251            if not self.recursion:
252                self.domain.set_name(old_domain_filename + filename_ext)
253
254            # Temporarily change the domain starttime to the current time
255            old_domain_starttime = self.domain.starttime
256            self.domain.starttime = self.domain.get_time()
257
258            # Build a new data_structure.
259            next_data_structure = SWW_file(self.domain, mode=self.mode,
260                                           max_size=self.max_size,
261                                           recursion=self.recursion+1)
262            if not self.recursion:
263                log.critical('    file_size = %s' % file_size)
264                log.critical('    saving file to %s'
265                             % next_data_structure.filename) 
266
267            # Set up the new data_structure
268            self.domain.writer = next_data_structure
269
270            # Store connectivity and first timestep
271            next_data_structure.store_connectivity()
272            next_data_structure.store_timestep()
273            fid.sync()
274            fid.close()
275
276            # Restore the old starttime and filename
277            self.domain.starttime = old_domain_starttime
278            self.domain.set_name(old_domain_filename)
279        else:
280            self.recursion = False
281            domain = self.domain
282
283            # Get the variables
284            time = fid.variables['time']
285            i = len(time)
286             
287            if 'stage' in self.writer.dynamic_quantities:           
288                # Select only those values for stage,
289                # xmomentum and ymomentum (if stored) where
290                # depth exceeds minimum_storable_height
291                #
292                # In this branch it is assumed that elevation
293                # is also available as a quantity           
294           
295                Q = domain.quantities['stage']
296                w, _ = Q.get_vertex_values(xy=False)
297               
298                Q = domain.quantities['elevation']
299                z, _ = Q.get_vertex_values(xy=False)               
300               
301                storable_indices = (w-z >= self.minimum_storable_height)
302            else:
303                # Very unlikely branch
304                storable_indices = None # This means take all
305           
306           
307            # Now store dynamic quantities
308            dynamic_quantities = {}
309            for name in self.writer.dynamic_quantities:
310                netcdf_array = fid.variables[name]
311               
312                Q = domain.quantities[name]
313                A, _ = Q.get_vertex_values(xy=False,
314                                           precision=self.precision)
315
316                if storable_indices is not None:
317                    if name == 'stage':
318                        A = num.choose(storable_indices, (z, A))
319
320                    if name in ['xmomentum', 'ymomentum']:
321                        # Get xmomentum where depth exceeds
322                        # minimum_storable_height
323                       
324                        # Define a zero vector of same size and type as A
325                        # for use with momenta
326                        null = num.zeros(num.size(A), A.dtype.char)
327                        A = num.choose(storable_indices, (null, A))
328               
329                dynamic_quantities[name] = A
330               
331                                       
332            # Store dynamic quantities
333            self.writer.store_quantities(fid,
334                                         time=self.domain.time,
335                                         sww_precision=self.precision,
336                                         **dynamic_quantities)
337
338
339            # Update extrema if requested
340            domain = self.domain
341            if domain.quantities_to_be_monitored is not None:
342                for q, info in domain.quantities_to_be_monitored.items():
343                    if info['min'] is not None:
344                        fid.variables[q + '.extrema'][0] = info['min']
345                        fid.variables[q + '.min_location'][:] = \
346                                        info['min_location']
347                        fid.variables[q + '.min_time'][0] = info['min_time']
348
349                    if info['max'] is not None:
350                        fid.variables[q + '.extrema'][1] = info['max']
351                        fid.variables[q + '.max_location'][:] = \
352                                        info['max_location']
353                        fid.variables[q + '.max_time'][0] = info['max_time']
354
355            # Flush and close
356            fid.sync()
357            fid.close()
358
359
360class Read_sww:
361
362    def __init__(self, source):
363        """The source parameter is assumed to be a NetCDF sww file.
364        """
365
366        self.source = source
367
368        self.frame_number = 0
369
370        fin = NetCDFFile(self.source, 'r')
371
372        self.time = num.array(fin.variables['time'], num.float)
373        self.last_frame_number = self.time.shape[0] - 1
374
375        self.frames = num.arange(self.last_frame_number+1)
376
377        fin.close()
378       
379        self.read_mesh()
380
381        self.quantities = {}
382
383        self.read_quantities()
384
385
386    def read_mesh(self):
387        """ Read and store the mesh data contained within this sww file.
388        """
389        fin = NetCDFFile(self.source, 'r')
390
391        self.vertices = num.array(fin.variables['volumes'], num.int)
392       
393        self.x = x = num.array(fin.variables['x'], num.float)
394        self.y = y = num.array(fin.variables['y'], num.float)
395
396        assert len(self.x) == len(self.y)
397       
398        self.xmin = num.min(x)
399        self.xmax = num.max(x)
400        self.ymin = num.min(y)
401        self.ymax = num.max(y)
402
403
404        fin.close()
405       
406    def read_quantities(self, frame_number=0):
407        """
408        Read the quantities contained in this file.
409        frame_number is the time index to load.
410        """
411        assert frame_number >= 0 and frame_number <= self.last_frame_number
412
413        self.frame_number = frame_number
414
415        M = len(self.x)/3
416       
417        fin = NetCDFFile(self.source, 'r')
418       
419        for q in filter(lambda n:n != 'x' and n != 'y' and n != 'time' and n != 'volumes' and \
420                        '_range' not in n, \
421                        fin.variables.keys()):
422            if len(fin.variables[q].shape) == 1: # Not a time-varying quantity
423                self.quantities[q] = num.ravel(num.array(fin.variables[q], num.float)).reshape(M,3)
424            else: # Time-varying, get the current timestep data
425                self.quantities[q] = num.array(fin.variables[q][self.frame_number], num.float).reshape(M,3)
426        fin.close()
427        return self.quantities
428
429    def get_bounds(self):
430        """
431            Get the bounding rect around the mesh.
432        """
433        return [self.xmin, self.xmax, self.ymin, self.ymax]
434
435    def get_last_frame_number(self):
436        """
437            Return the last loaded frame index.
438        """
439        return self.last_frame_number
440
441    def get_time(self):
442        """
443            Get time at the current frame num, in secs.
444        """
445        return self.time[self.frame_number]
446
447
448class Write_sww(Write_sts):
449    """
450        A class to write an SWW file.
451       
452        It is domain agnostic, and requires all the data to be fed in
453        manually.
454    """
455
456    def __init__(self, static_quantities, dynamic_quantities):
457        """Initialise Write_sww with two list af quantity names:
458       
459        static_quantities (e.g. elevation or friction):
460            Stored once at the beginning of the simulation in a 1D array
461            of length number_of_points   
462        dynamic_quantities (e.g stage):
463            Stored every timestep in a 2D array with
464            dimensions number_of_points X number_of_timesteps       
465       
466        """
467        self.static_quantities = static_quantities   
468        self.dynamic_quantities = dynamic_quantities
469
470
471    def store_header(self,
472                     outfile,
473                     times,
474                     number_of_volumes,
475                     number_of_points,
476                     description='Generated by ANUGA',
477                     smoothing=True,
478                     order=1,
479                     sww_precision=netcdf_float32,
480                     verbose=False):
481        """Write an SWW file header.
482
483        Writes the first section of the .sww file.
484
485        outfile - the open file that will be written
486        times - A list of the time slice times OR a start time
487        Note, if a list is given the info will be made relative.
488        number_of_volumes - the number of triangles
489        number_of_points - the number of vertices in the mesh
490        """
491
492        from anuga.abstract_2d_finite_volumes.util \
493            import get_revision_number
494
495        outfile.institution = 'Geoscience Australia'
496        outfile.description = description
497
498        # For sww compatibility
499        if smoothing is True:
500            # Smoothing to be depreciated
501            outfile.smoothing = 'Yes'
502            outfile.vertices_are_stored_uniquely = 'False'
503        else:
504            # Smoothing to be depreciated
505            outfile.smoothing = 'No'
506            outfile.vertices_are_stored_uniquely = 'True'
507        outfile.order = order
508
509        try:
510            revision_number = get_revision_number()
511        except:
512            # This will be triggered if the system cannot get the SVN
513            # revision number.
514            revision_number = None
515        # Allow None to be stored as a string
516        outfile.revision_number = str(revision_number)
517
518        # This is being used to seperate one number from a list.
519        # what it is actually doing is sorting lists from numeric arrays.
520        if isinstance(times, (list, num.ndarray)):
521            number_of_times = len(times)
522            times = ensure_numeric(times)
523            if number_of_times == 0:
524                starttime = 0
525            else:
526                starttime = times[0]
527                times = times - starttime  #Store relative times
528        else:
529            number_of_times = 0
530            starttime = times
531
532
533        outfile.starttime = starttime
534
535        # dimension definitions
536        outfile.createDimension('number_of_volumes', number_of_volumes)
537        outfile.createDimension('number_of_vertices', 3)
538        outfile.createDimension('numbers_in_range', 2)
539
540        if smoothing is True:
541            outfile.createDimension('number_of_points', number_of_points)
542            # FIXME(Ole): This will cause sww files for parallel domains to
543            # have ghost nodes stored (but not used by triangles).
544            # To clean this up, we have to change get_vertex_values and
545            # friends in quantity.py (but I can't be bothered right now)
546        else:
547            outfile.createDimension('number_of_points', 3*number_of_volumes)
548
549        outfile.createDimension('number_of_timesteps', number_of_times)
550
551        # variable definitions
552        outfile.createVariable('x', sww_precision, ('number_of_points',))
553        outfile.createVariable('y', sww_precision, ('number_of_points',))
554
555        outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',
556                                                       'number_of_vertices'))
557
558
559        for q in self.static_quantities:
560           
561            outfile.createVariable(q, sww_precision,
562                                   ('number_of_points',))
563           
564            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
565                                   ('numbers_in_range',))
566                                   
567            # Initialise ranges with small and large sentinels.
568            # If this was in pure Python we could have used None sensibly
569            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
570            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
571
572       
573        self.write_dynamic_quantities(outfile, self.dynamic_quantities, times, \
574                                        precis = sww_precision)
575
576
577
578    def store_triangulation(self,
579                            outfile,
580                            points_utm,
581                            volumes,
582                            zone=None, 
583                            new_origin=None,
584                            points_georeference=None, 
585                            verbose=False):
586        """
587        Store triangulation data in the underlying file.
588       
589        Stores the points and triangle indices in the sww file
590       
591        outfile Open handle to underlying file.
592
593        new_origin georeference that the points can be set to.
594
595        points_georeference The georeference of the points_utm.
596
597        verbose True if this function is to be verbose.
598
599        new_origin - qa georeference that the points can be set to. (Maybe
600        do this before calling this function.)
601
602        points_utm - currently a list or array of the points in UTM.
603        points_georeference - the georeference of the points_utm
604
605        How about passing new_origin and current_origin.
606        If you get both, do a convertion from the old to the new.
607
608        If you only get new_origin, the points are absolute,
609        convert to relative
610
611        if you only get the current_origin the points are relative, store
612        as relative.
613
614        if you get no georefs create a new georef based on the minimums of
615        points_utm.  (Another option would be to default to absolute)
616
617        Yes, and this is done in another part of the code.
618        Probably geospatial.
619
620        If you don't supply either geo_refs, then supply a zone. If not
621        the default zone will be used.
622
623        precon:
624            header has been called.
625        """
626
627        number_of_points = len(points_utm)
628        volumes = num.array(volumes)
629        points_utm = num.array(points_utm)
630
631        # Given the two geo_refs and the points, do the stuff
632        # described in the method header
633        # if this is needed else where, pull out as a function
634        points_georeference = ensure_geo_reference(points_georeference)
635        new_origin = ensure_geo_reference(new_origin)
636        if new_origin is None and points_georeference is not None:
637            points = points_utm
638            geo_ref = points_georeference
639        else:
640            if new_origin is None:
641                new_origin = Geo_reference(zone, min(points_utm[:,0]),
642                                                 min(points_utm[:,1]))
643            points = new_origin.change_points_geo_ref(points_utm,
644                                                      points_georeference)
645            geo_ref = new_origin
646
647        # At this stage I need a georef and points
648        # the points are relative to the georef
649        geo_ref.write_NetCDF(outfile)
650
651        # This will put the geo ref in the middle
652        #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
653
654        x =  points[:,0]
655        y =  points[:,1]
656
657        x = x.astype(netcdf_float32)
658        y = y.astype(netcdf_float32)
659       
660
661        if verbose:
662            log.critical('------------------------------------------------')
663            log.critical('More Statistics:')
664            log.critical('  Extent (/lon):')
665            log.critical('    x in [%f, %f], len(lat) == %d'
666                         % (min(x), max(x), len(x)))
667            log.critical('    y in [%f, %f], len(lon) == %d'
668                         % (min(y), max(y), len(y)))
669            #log.critical('    z in [%f, %f], len(z) == %d'
670            #             % (min(elevation), max(elevation), len(elevation)))
671            log.critical('geo_ref: %s' % str(geo_ref))
672            log.critical('------------------------------------------------')
673
674
675        outfile.variables['x'][:] = x #- geo_ref.get_xllcorner()
676        outfile.variables['y'][:] = y #- geo_ref.get_yllcorner()
677        outfile.variables['volumes'][:] = volumes.astype(num.int32) #On Opteron 64
678
679
680    def store_static_quantities(self, 
681                                outfile, 
682                                sww_precision=num.float32,
683                                verbose=False, 
684                                **quant):
685        """
686        Write the static quantity info.
687
688        **quant is extra keyword arguments passed in. These must be
689          the numpy arrays to be stored in the sww file at each timestep.
690
691        The argument sww_precision allows for storing as either
692        * single precision (default): num.float32
693        * double precision: num.float64 or num.float
694
695        Precondition:
696            store_triangulation and
697            store_header have been called.
698        """
699
700        # The dictionary quant must contain numpy arrays for each name.
701        # These will typically be quantities from Domain such as friction
702        #
703        # Arrays not listed in static_quantitiues will be ignored, silently.
704        #
705        # This method will also write the ranges for each quantity,
706        # e.g. stage_range, xmomentum_range and ymomentum_range
707        for q in self.static_quantities:
708            if not quant.has_key(q):
709                msg = 'Values for quantity %s was not specified in ' % q
710                msg += 'store_quantities so they cannot be stored.'
711                raise NewQuantity, msg
712            else:
713                q_values = ensure_numeric(quant[q])
714               
715                x = q_values.astype(sww_precision)
716                outfile.variables[q][:] = x
717       
718                # This populates the _range values
719                outfile.variables[q + Write_sww.RANGE][0] = num.min(x)
720                outfile.variables[q + Write_sww.RANGE][1] = num.max(x)
721                   
722        # FIXME: Hack for backwards compatibility with old viewer
723        #if 'elevation' in self.static_quantities:
724        #    outfile.variables['z'][:] = outfile.variables['elevation'][:]
725
726                   
727                   
728       
729       
730    def store_quantities(self, 
731                         outfile, 
732                         sww_precision=num.float32,
733                         slice_index=None,
734                         time=None,
735                         verbose=False, 
736                         **quant):
737        """
738        Write the quantity info at each timestep.
739
740        **quant is extra keyword arguments passed in. These must be
741          the numpy arrays to be stored in the sww file at each timestep.
742
743        if the time array is already been built, use the slice_index
744        to specify the index.
745
746        Otherwise, use time to increase the time dimension
747
748        Maybe make this general, but the viewer assumes these quantities,
749        so maybe we don't want it general - unless the viewer is general
750       
751        The argument sww_precision allows for storing as either
752        * single precision (default): num.float32
753        * double precision: num.float64 or num.float
754
755        Precondition:
756            store_triangulation and
757            store_header have been called.
758        """
759
760        if time is not None:
761            file_time = outfile.variables['time']
762            slice_index = len(file_time)
763            file_time[slice_index] = time
764        else:
765            slice_index = int(slice_index) # Has to be cast in case it was numpy.int   
766
767        # Write the named dynamic quantities
768        # The dictionary quant must contain numpy arrays for each name.
769        # These will typically be the conserved quantities from Domain
770        # (Typically stage,  xmomentum, ymomentum).
771        #
772        # Arrays not listed in dynamic_quantitiues will be ignored, silently.
773        #
774        # This method will also write the ranges for each quantity,
775        # e.g. stage_range, xmomentum_range and ymomentum_range
776        for q in self.dynamic_quantities:
777            if not quant.has_key(q):
778                msg = 'Values for quantity %s was not specified in ' % q
779                msg += 'store_quantities so they cannot be stored.'
780                raise NewQuantity, msg
781            else:
782                q_values = ensure_numeric(quant[q])
783               
784                q_retyped = q_values.astype(sww_precision)
785                outfile.variables[q][slice_index] = q_retyped
786                   
787                # This updates the _range values
788                q_range = outfile.variables[q + Write_sww.RANGE][:]
789                q_values_min = num.min(q_values)
790                if q_values_min < q_range[0]:
791                    outfile.variables[q + Write_sww.RANGE][0] = q_values_min
792                q_values_max = num.max(q_values)
793                if q_values_max > q_range[1]:
794                    outfile.variables[q + Write_sww.RANGE][1] = q_values_max
795
796    def verbose_quantities(self, outfile):
797        log.critical('------------------------------------------------')
798        log.critical('More Statistics:')
799        for q in self.dynamic_quantities:
800            log.critical(%s in [%f, %f]'
801                         % (q, outfile.variables[q+Write_sww.RANGE][0],
802                            outfile.variables[q+Write_sww.RANGE][1]))
803        log.critical('------------------------------------------------')
804
805
806
807
808def extent_sww(file_name):
809    """Read in an sww file, then get its extents
810
811    Input:
812    file_name - the sww file
813
814    Output:
815    A list: [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
816    """
817
818    from Scientific.IO.NetCDF import NetCDFFile
819
820    #Get NetCDF
821    fid = NetCDFFile(file_name, netcdf_mode_r)
822
823    # Get the variables
824    x = fid.variables['x'][:]
825    y = fid.variables['y'][:]
826    stage = fid.variables['stage'][:]
827
828    fid.close()
829
830    return [min(x), max(x), min(y), max(y), num.min(stage), num.max(stage)]
831
832
833def load_sww_as_domain(filename, boundary=None, t=None,
834               fail_if_NaN=True, NaN_filler=0,
835               verbose=False, very_verbose=False):
836    """
837    Load an sww file into a domain.
838
839    Usage: domain = load_sww_as_domain('file.sww',
840                        t=time (default = last time in file))
841
842    Boundary is not recommended if domain.smooth is not selected, as it
843    uses unique coordinates, but not unique boundaries. This means that
844    the boundary file will not be compatable with the coordinates, and will
845    give a different final boundary, or crash.
846    """
847   
848    from Scientific.IO.NetCDF import NetCDFFile
849    from anuga.shallow_water.shallow_water_domain import Domain
850
851    # initialise NaN.
852    NaN = 9.969209968386869e+036
853
854    if verbose: log.critical('Reading from %s' % filename)
855
856    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
857    time = fid.variables['time']       # Timesteps
858    if t is None:
859        t = time[-1]
860    time_interp = get_time_interp(time,t)
861
862    # Get the variables as numeric arrays
863    x = fid.variables['x'][:]                   # x-coordinates of vertices
864    y = fid.variables['y'][:]                   # y-coordinates of vertices
865    elevation = fid.variables['elevation']      # Elevation
866    stage = fid.variables['stage']              # Water level
867    xmomentum = fid.variables['xmomentum']      # Momentum in the x-direction
868    ymomentum = fid.variables['ymomentum']      # Momentum in the y-direction
869
870    starttime = fid.starttime[0]
871    volumes = fid.variables['volumes'][:]       # Connectivity
872    coordinates = num.transpose(num.asarray([x.tolist(), y.tolist()]))
873    # FIXME (Ole): Something like this might be better:
874    #                 concatenate((x, y), axis=1)
875    # or              concatenate((x[:,num.newaxis], x[:,num.newaxis]), axis=1)
876
877    conserved_quantities = []
878    interpolated_quantities = {}
879    other_quantities = []
880
881    # get geo_reference
882    try:                             # sww files don't have to have a geo_ref
883        geo_reference = Geo_reference(NetCDFObject=fid)
884    except: # AttributeError, e:
885        geo_reference = None
886
887    if verbose: log.critical('    getting quantities')
888
889    for quantity in fid.variables.keys():
890        dimensions = fid.variables[quantity].dimensions
891        if 'number_of_timesteps' in dimensions:
892            conserved_quantities.append(quantity)
893            interpolated_quantities[quantity] = \
894                  interpolated_quantity(fid.variables[quantity][:], time_interp)
895        else:
896            other_quantities.append(quantity)
897
898    other_quantities.remove('x')
899    other_quantities.remove('y')
900    #other_quantities.remove('z')
901    other_quantities.remove('volumes')
902    try:
903        other_quantities.remove('stage_range')
904        other_quantities.remove('xmomentum_range')
905        other_quantities.remove('ymomentum_range')
906        other_quantities.remove('elevation_range')
907    except:
908        pass
909
910    conserved_quantities.remove('time')
911
912    if verbose: log.critical('    building domain')
913
914    #    From domain.Domain:
915    #    domain = Domain(coordinates, volumes,\
916    #                    conserved_quantities = conserved_quantities,\
917    #                    other_quantities = other_quantities,zone=zone,\
918    #                    xllcorner=xllcorner, yllcorner=yllcorner)
919
920    # From shallow_water.Domain:
921    coordinates = coordinates.tolist()
922    volumes = volumes.tolist()
923    # FIXME:should this be in mesh? (peter row)
924    if fid.smoothing == 'Yes':
925        unique = False
926    else:
927        unique = True
928    if unique:
929        coordinates, volumes, boundary = weed(coordinates, volumes,boundary)
930
931     
932   
933    try:
934        domain = Domain(coordinates, volumes, boundary, starttime=(float(starttime) + float(t)))
935    except AssertionError, e:
936        fid.close()
937        msg = 'Domain could not be created: %s. ' \
938              'Perhaps use "fail_if_NaN=False and NaN_filler = ..."' % e
939        raise DataDomainError, msg
940
941    if not boundary is None:
942        domain.boundary = boundary
943
944    domain.geo_reference = geo_reference
945
946    for quantity in other_quantities:
947        try:
948            NaN = fid.variables[quantity].missing_value
949        except:
950            pass                       # quantity has no missing_value number
951        X = fid.variables[quantity][:]
952        if very_verbose:
953            log.critical('       %s' % str(quantity))
954            log.critical('        NaN = %s' % str(NaN))
955            log.critical('        max(X)')
956            log.critical('       %s' % str(max(X)))
957            log.critical('        max(X)==NaN')
958            log.critical('       %s' % str(max(X)==NaN))
959            log.critical('')
960        if max(X) == NaN or min(X) == NaN:
961            if fail_if_NaN:
962                msg = 'quantity "%s" contains no_data entry' % quantity
963                raise DataMissingValuesError, msg
964            else:
965                data = (X != NaN)
966                X = (X*data) + (data==0)*NaN_filler
967        if unique:
968            X = num.resize(X, (len(X)/3, 3))
969        domain.set_quantity(quantity, X)
970    #
971    for quantity in conserved_quantities:
972        try:
973            NaN = fid.variables[quantity].missing_value
974        except:
975            pass                       # quantity has no missing_value number
976        X = interpolated_quantities[quantity]
977        if very_verbose:
978            log.critical('       %s' % str(quantity))
979            log.critical('        NaN = %s' % str(NaN))
980            log.critical('        max(X)')
981            log.critical('       %s' % str(max(X)))
982            log.critical('        max(X)==NaN')
983            log.critical('       %s' % str(max(X)==NaN))
984            log.critical('')
985        if max(X) == NaN or min(X) == NaN:
986            if fail_if_NaN:
987                msg = 'quantity "%s" contains no_data entry' % quantity
988                raise DataMissingValuesError, msg
989            else:
990                data = (X != NaN)
991                X = (X*data) + (data==0)*NaN_filler
992        if unique:
993            X = num.resize(X, (X.shape[0]/3, 3))
994        domain.set_quantity(quantity, X)
995
996    fid.close()
997
998    return domain
999
1000
1001def get_mesh_and_quantities_from_file(filename,
1002                                      quantities=None,
1003                                      verbose=False):
1004    """Get and rebuild mesh structure and associated quantities from sww file
1005
1006    Input:
1007        filename - Name os sww file
1008        quantities - Names of quantities to load
1009
1010    Output:
1011        mesh - instance of class Interpolate
1012               (including mesh and interpolation functionality)
1013        quantities - arrays with quantity values at each mesh node
1014        time - vector of stored timesteps
1015
1016    This function is used by e.g.:
1017        get_interpolated_quantities_at_polyline_midpoints
1018    """
1019
1020    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
1021
1022    import types
1023    from Scientific.IO.NetCDF import NetCDFFile
1024    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
1025
1026    if verbose: log.critical('Reading from %s' % filename)
1027
1028    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1029    time = fid.variables['time'][:]    # Time vector
1030    time += fid.starttime[0]
1031
1032    # Get the variables as numeric arrays
1033    x = fid.variables['x'][:]                   # x-coordinates of nodes
1034    y = fid.variables['y'][:]                   # y-coordinates of nodes
1035    elevation = fid.variables['elevation'][:]   # Elevation
1036    stage = fid.variables['stage'][:]           # Water level
1037    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
1038    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
1039
1040    # Mesh (nodes (Mx2), triangles (Nx3))
1041    nodes = num.concatenate((x[:,num.newaxis], y[:,num.newaxis]), axis=1)
1042    triangles = fid.variables['volumes'][:]
1043
1044    # Get geo_reference
1045    try:
1046        geo_reference = Geo_reference(NetCDFObject=fid)
1047    except: #AttributeError, e:
1048        # Sww files don't have to have a geo_ref
1049        geo_reference = None
1050
1051    if verbose: log.critical('    building mesh from sww file %s' % filename)
1052
1053    boundary = None
1054
1055    #FIXME (Peter Row): Should this be in mesh?
1056    if fid.smoothing != 'Yes':
1057        nodes = nodes.tolist()
1058        triangles = triangles.tolist()
1059        nodes, triangles, boundary = weed(nodes, triangles, boundary)
1060
1061    try:
1062        mesh = Mesh(nodes, triangles, boundary, geo_reference=geo_reference)
1063    except AssertionError, e:
1064        fid.close()
1065        msg = 'Domain could not be created: %s. "' % e
1066        raise DataDomainError, msg
1067
1068    quantities = {}
1069    quantities['elevation'] = elevation
1070    quantities['stage'] = stage
1071    quantities['xmomentum'] = xmomentum
1072    quantities['ymomentum'] = ymomentum
1073
1074    fid.close()
1075
1076    return mesh, quantities, time
1077
1078
1079def get_time_interp(time, t=None):
1080    """Finds the ratio and index for time interpolation.
1081        time is an array of time steps
1082        t is the sample time.
1083        returns a tuple containing index into time, and ratio
1084    """
1085    if t is None:
1086        t=time[-1]
1087        index = -1
1088        ratio = 0.
1089    else:
1090        T = time
1091        tau = t
1092        index=0
1093        msg = 'Time interval derived from file %s [%s:%s]' \
1094              % ('FIXMEfilename', T[0], T[-1])
1095        msg += ' does not match model time: %s' % tau
1096        if tau < time[0]: raise DataTimeError, msg
1097        if tau > time[-1]: raise DataTimeError, msg
1098        while tau > time[index]: index += 1
1099        while tau < time[index]: index -= 1
1100        if tau == time[index]:
1101            #Protect against case where tau == time[-1] (last time)
1102            # - also works in general when tau == time[i]
1103            ratio = 0
1104        else:
1105            #t is now between index and index+1
1106            ratio = (tau - time[index])/(time[index+1] - time[index])
1107
1108    return (index, ratio)
1109
1110
1111
1112def interpolated_quantity(saved_quantity, time_interp):
1113    """Interpolate a quantity with respect to time.
1114
1115    saved_quantity  the quantity to interpolate
1116    time_interp     (index, ratio)
1117
1118    Returns a vector of interpolated values.
1119    """
1120
1121    index, ratio = time_interp
1122
1123    Q = saved_quantity
1124
1125    if ratio > 0:
1126        q = (1-ratio)*Q[index] + ratio*Q[index+1]
1127    else:
1128        q = Q[index]
1129
1130    #Return vector of interpolated values
1131    return q
1132
1133
1134def weed(coordinates, volumes, boundary=None):
1135    """ Excise all duplicate points.
1136    """
1137    if isinstance(coordinates, num.ndarray):
1138        coordinates = coordinates.tolist()
1139    if isinstance(volumes, num.ndarray):
1140        volumes = volumes.tolist()
1141
1142    unique = False
1143    point_dict = {}
1144    same_point = {}
1145    for i in range(len(coordinates)):
1146        point = tuple(coordinates[i])
1147        if point_dict.has_key(point):
1148            unique = True
1149            same_point[i] = point
1150            #to change all point i references to point j
1151        else:
1152            point_dict[point] = i
1153            same_point[i] = point
1154
1155    coordinates = []
1156    i = 0
1157    for point in point_dict.keys():
1158        point = tuple(point)
1159        coordinates.append(list(point))
1160        point_dict[point] = i
1161        i += 1
1162
1163    for volume in volumes:
1164        for i in range(len(volume)):
1165            index = volume[i]
1166            if index > -1:
1167                volume[i] = point_dict[same_point[index]]
1168
1169    new_boundary = {}
1170    if not boundary is None:
1171        for segment in boundary.keys():
1172            point0 = point_dict[same_point[segment[0]]]
1173            point1 = point_dict[same_point[segment[1]]]
1174            label = boundary[segment]
1175            #FIXME should the bounday attributes be concaterated
1176            #('exterior, pond') or replaced ('pond')(peter row)
1177
1178            if new_boundary.has_key((point0, point1)):
1179                new_boundary[(point0,point1)] = new_boundary[(point0, point1)]
1180
1181            elif new_boundary.has_key((point1, point0)):
1182                new_boundary[(point1,point0)] = new_boundary[(point1, point0)]
1183            else: new_boundary[(point0, point1)] = label
1184
1185        boundary = new_boundary
1186
1187    return coordinates, volumes, boundary
1188
1189
1190
Note: See TracBrowser for help on using the repository browser.