source: trunk/anuga_core/source/anuga/file/sww.py @ 8813

Last change on this file since 8813 was 8810, checked in by steve, 12 years ago

Some changes to netcdf config

File size: 44.8 KB
Line 
1""" Classes to read an SWW file.
2"""
3
4import exceptions
5class DataFileNotOpenError(exceptions.Exception): pass
6class DataMissingValuesError(exceptions.Exception): pass
7class NewQuantity(exceptions.Exception): pass
8class DataDomainError(exceptions.Exception): pass
9class DataMissingValuesError(exceptions.Exception): pass
10class DataTimeError(exceptions.Exception): pass
11
12
13from anuga.coordinate_transforms.geo_reference import Geo_reference
14from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
15from anuga.config import netcdf_float, netcdf_float32, netcdf_int
16from anuga.config import max_float
17from anuga.utilities.numerical_tools import ensure_numeric
18import anuga.utilities.log as log
19from anuga.file.netcdf import NetCDFFile
20
21from sts import Write_sts
22
23from anuga.coordinate_transforms.geo_reference import \
24        ensure_geo_reference
25
26from file_utils import create_filename
27import numpy as num
28
29class Data_format:
30    """Generic interface to data formats
31    """
32
33    def __init__(self, domain, extension, mode=netcdf_mode_w):
34        assert mode[0] in ['r', 'w', 'a'], \
35               "Mode %s must be either:\n" % mode + \
36               "   'w' (write)\n" + \
37               "   'r' (read)\n" + \
38               "   'a' (append)"
39
40        #Create filename
41        self.filename = create_filename(domain.get_datadir(),
42                                        domain.get_name(), extension)
43
44        self.timestep = 0
45        self.domain = domain
46
47        # Probably should exclude ghosts in case this is a parallel domain
48       
49        self.number_of_nodes   = domain.number_of_nodes
50        self.number_of_volumes = domain.number_of_triangles
51        #self.number_of_volumes = len(domain)
52
53        #FIXME: Should we have a general set_precision function?
54
55
56class SWW_file(Data_format):
57    """Interface to native NetCDF format (.sww) for storing model output
58
59    There are two kinds of data
60
61    1: Constant data: Vertex coordinates and field values. Stored once
62    2: Variable data: Conserved quantities. Stored once per timestep.
63
64    All data is assumed to reside at vertex locations.
65    """
66
67    def __init__(self, domain, 
68                 mode=netcdf_mode_w, max_size=2000000000, recursion=False):
69
70        self.precision = netcdf_float32 # Use single precision for quantities
71        self.recursion = recursion
72        self.mode = mode
73        if hasattr(domain, 'max_size'):
74            self.max_size = domain.max_size # File size max is 2Gig
75        else:
76            self.max_size = max_size
77        if hasattr(domain, 'minimum_storable_height'):
78            self.minimum_storable_height = domain.minimum_storable_height
79        else:
80            self.minimum_storable_height = default_minimum_storable_height
81
82        # Call parent constructor
83        Data_format.__init__(self, domain, 'sww', mode)
84
85        # Get static and dynamic quantities from domain
86        static_quantities = []
87        dynamic_quantities = []
88       
89        for q in domain.quantities_to_be_stored:
90            flag = domain.quantities_to_be_stored[q]
91       
92            msg = 'Quantity %s is requested to be stored ' % q
93            msg += 'but it does not exist in domain.quantities'
94            assert q in domain.quantities, msg
95       
96            assert flag in [1,2]
97            if flag == 1: static_quantities.append(q)
98            if flag == 2: dynamic_quantities.append(q)               
99                       
100       
101        # NetCDF file definition
102        fid = NetCDFFile(self.filename, mode)
103        if mode[0] == 'w':
104            description = 'Output from anuga.file.sww ' \
105                          'suitable for plotting'
106                         
107            self.writer = Write_sww(static_quantities, dynamic_quantities)
108            self.writer.store_header(fid,
109                                     domain.starttime,
110                                     self.number_of_volumes,
111                                     self.domain.number_of_nodes,
112                                     description=description,
113                                     smoothing=domain.smooth,
114                                     order=domain.default_order,
115                                     sww_precision=self.precision)
116
117            # Extra optional information
118            if hasattr(domain, 'texture'):
119                fid.texture = domain.texture
120
121            if domain.quantities_to_be_monitored is not None:
122                fid.createDimension('singleton', 1)
123                fid.createDimension('two', 2)
124
125                poly = domain.monitor_polygon
126                if poly is not None:
127                    N = len(poly)
128                    fid.createDimension('polygon_length', N)
129                    fid.createVariable('extrema.polygon',
130                                       self.precision,
131                                       ('polygon_length', 'two'))
132                    fid.variables['extrema.polygon'][:] = poly
133
134                interval = domain.monitor_time_interval
135                if interval is not None:
136                    fid.createVariable('extrema.time_interval',
137                                       self.precision,
138                                       ('two',))
139                    fid.variables['extrema.time_interval'][:] = interval
140
141                for q in domain.quantities_to_be_monitored:
142                    fid.createVariable(q + '.extrema', self.precision,
143                                       ('numbers_in_range',))
144                    fid.createVariable(q + '.min_location', self.precision,
145                                       ('numbers_in_range',))
146                    fid.createVariable(q + '.max_location', self.precision,
147                                       ('numbers_in_range',))
148                    fid.createVariable(q + '.min_time', self.precision,
149                                       ('singleton',))
150                    fid.createVariable(q + '.max_time', self.precision,
151                                       ('singleton',))
152
153        fid.close()
154
155    def store_connectivity(self):
156        """Store information about nodes, triangles and static quantities
157
158        Writes x,y coordinates of triangles and their connectivity.
159       
160        Store also any quantity that has been identified as static.
161        """
162
163        # FIXME: Change name to reflect the fact thta this function
164        # stores both connectivity (triangulation) and static quantities
165       
166
167        domain = self.domain
168
169        # append to the NetCDF file
170        fid = NetCDFFile(self.filename, netcdf_mode_a)
171
172        # Get X, Y from one (any) of the quantities
173        Q = domain.quantities.values()[0]
174        X,Y,_,V = Q.get_vertex_values(xy=True, precision=self.precision)
175
176        # store the connectivity data
177        points = num.concatenate((X[:,num.newaxis],Y[:,num.newaxis]), axis=1)
178        self.writer.store_triangulation(fid,
179                                        points,
180                                        V.astype(num.float32),
181                                        points_georeference=\
182                                            domain.geo_reference)
183
184
185        if domain.parallel:
186            self.writer.store_parallel_data(fid,
187                                        domain.number_of_global_triangles,
188                                        domain.number_of_global_nodes,
189                                        domain.tri_full_flag,
190                                        domain.tri_l2g,
191                                        domain.node_l2g)
192
193
194        # Get names of static quantities
195        static_quantities = {}
196        for name in self.writer.static_quantities:
197            Q = domain.quantities[name]
198            A, _ = Q.get_vertex_values(xy=False, 
199                                       precision=self.precision)
200            static_quantities[name] = A
201       
202        # Store static quantities       
203        self.writer.store_static_quantities(fid, **static_quantities)
204                                           
205        fid.close()
206
207
208    def store_timestep(self):
209        """Store time and time dependent quantities
210        """
211
212        #import types
213        from time import sleep
214        from os import stat
215
216
217        # Get NetCDF
218        retries = 0
219        file_open = False
220        while not file_open and retries < 10:
221            try:
222                # Open existing file
223                fid = NetCDFFile(self.filename, netcdf_mode_a)
224            except IOError:
225                # This could happen if someone was reading the file.
226                # In that case, wait a while and try again
227                msg = 'Warning (store_timestep): File %s could not be opened' \
228                      % self.filename
229                msg += ' - trying step %s again' % self.domain.time
230                log.critical(msg)
231                retries += 1
232                sleep(1)
233            else:
234                file_open = True
235
236        if not file_open:
237            msg = 'File %s could not be opened for append' % self.filename
238            raise DataFileNotOpenError, msg
239
240        # Check to see if the file is already too big:
241        time = fid.variables['time']
242
243        i = len(time) + 1
244        file_size = stat(self.filename)[6]
245        file_size_increase = file_size / i
246        if file_size + file_size_increase > self.max_size * 2**self.recursion:
247            # In order to get the file name and start time correct,
248            # I change the domain.filename and domain.starttime.
249            # This is the only way to do this without changing
250            # other modules (I think).
251
252            # Write a filename addon that won't break the anuga viewers
253            # (10.sww is bad)
254            filename_ext = '_time_%s' % self.domain.time
255            filename_ext = filename_ext.replace('.', '_')
256
257            # Remember the old filename, then give domain a
258            # name with the extension
259            old_domain_filename = self.domain.get_name()
260            if not self.recursion:
261                self.domain.set_name(old_domain_filename + filename_ext)
262
263            # Temporarily change the domain starttime to the current time
264            old_domain_starttime = self.domain.starttime
265            self.domain.starttime = self.domain.get_time()
266
267            # Build a new data_structure.
268            next_data_structure = SWW_file(self.domain, mode=self.mode,
269                                           max_size=self.max_size,
270                                           recursion=self.recursion+1)
271            if not self.recursion:
272                log.critical('    file_size = %s' % file_size)
273                log.critical('    saving file to %s'
274                             % next_data_structure.filename) 
275
276            # Set up the new data_structure
277            self.domain.writer = next_data_structure
278
279            # Store connectivity and first timestep
280            next_data_structure.store_connectivity()
281            next_data_structure.store_timestep()
282            fid.sync()
283            fid.close()
284
285            # Restore the old starttime and filename
286            self.domain.starttime = old_domain_starttime
287            self.domain.set_name(old_domain_filename)
288        else:
289            self.recursion = False
290            domain = self.domain
291
292            # Get the variables
293            time = fid.variables['time']
294            i = len(time)
295             
296            if 'stage' in self.writer.dynamic_quantities:           
297                # Select only those values for stage,
298                # xmomentum and ymomentum (if stored) where
299                # depth exceeds minimum_storable_height
300                #
301                # In this branch it is assumed that elevation
302                # is also available as a quantity
303
304
305                # Smoothing for the get_vertex_values will be obtained
306                # from the smooth setting in domain
307           
308                Q = domain.quantities['stage']
309                w, _ = Q.get_vertex_values(xy=False)
310               
311                Q = domain.quantities['elevation']
312                z, _ = Q.get_vertex_values(xy=False)               
313               
314                storable_indices = num.array(w-z >= self.minimum_storable_height)
315            else:
316                # Very unlikely branch
317                storable_indices = None # This means take all
318           
319            # Now store dynamic quantities
320            dynamic_quantities = {}
321            for name in self.writer.dynamic_quantities:
322                #netcdf_array = fid.variables[name]
323               
324                Q = domain.quantities[name]
325                A, _ = Q.get_vertex_values(xy=False,
326                                           precision=self.precision)
327               
328                if storable_indices is not None:
329                    if name == 'stage':
330                        A = num.choose(storable_indices, (z, A))
331
332                    if name in ['xmomentum', 'ymomentum']:
333                        # Get xmomentum where depth exceeds
334                        # minimum_storable_height
335                       
336                        # Define a zero vector of same size and type as A
337                        # for use with momenta
338                        null = num.zeros(num.size(A), A.dtype.char)
339                        A = num.choose(storable_indices, (null, A))
340               
341                dynamic_quantities[name] = A
342               
343                                       
344            # Store dynamic quantities
345            self.writer.store_quantities(fid,
346                                         time=self.domain.time,
347                                         sww_precision=self.precision,
348                                         **dynamic_quantities)
349
350
351            # Update extrema if requested
352            domain = self.domain
353            if domain.quantities_to_be_monitored is not None:
354                for q, info in domain.quantities_to_be_monitored.items():
355                    if info['min'] is not None:
356                        fid.variables[q + '.extrema'][0] = info['min']
357                        fid.variables[q + '.min_location'][:] = \
358                                        info['min_location']
359                        fid.variables[q + '.min_time'][0] = info['min_time']
360
361                    if info['max'] is not None:
362                        fid.variables[q + '.extrema'][1] = info['max']
363                        fid.variables[q + '.max_location'][:] = \
364                                        info['max_location']
365                        fid.variables[q + '.max_time'][0] = info['max_time']
366
367            # Flush and close
368            #fid.sync()
369            fid.close()
370
371
372class Read_sww:
373
374    def __init__(self, source):
375        """The source parameter is assumed to be a NetCDF sww file.
376        """
377
378        self.source = source
379
380        self.frame_number = 0
381
382        fin = NetCDFFile(self.source, 'r')
383
384        self.time = num.array(fin.variables['time'], num.float)
385        self.last_frame_number = self.time.shape[0] - 1
386
387        self.frames = num.arange(self.last_frame_number+1)
388
389        fin.close()
390       
391        self.read_mesh()
392
393        self.quantities = {}
394
395        self.read_quantities()
396
397
398    def read_mesh(self):
399        """ Read and store the mesh data contained within this sww file.
400        """
401        fin = NetCDFFile(self.source, 'r')
402
403        self.vertices = num.array(fin.variables['volumes'], num.int)
404       
405        self.x = x = num.array(fin.variables['x'], num.float)
406        self.y = y = num.array(fin.variables['y'], num.float)
407
408        assert len(self.x) == len(self.y)
409       
410        self.xmin = num.min(x)
411        self.xmax = num.max(x)
412        self.ymin = num.min(y)
413        self.ymax = num.max(y)
414
415
416        fin.close()
417       
418    def read_quantities(self, frame_number=0):
419        """
420        Read the quantities contained in this file.
421        frame_number is the time index to load.
422        """
423        assert frame_number >= 0 and frame_number <= self.last_frame_number
424
425        self.frame_number = frame_number
426
427        M = len(self.x)/3
428       
429        fin = NetCDFFile(self.source, 'r')
430       
431        for q in filter(lambda n:n != 'x' and n != 'y' and n != 'time' and n != 'volumes' and \
432                        '_range' not in n, \
433                        fin.variables.keys()):
434            if len(fin.variables[q].shape) == 1: # Not a time-varying quantity
435                self.quantities[q] = num.ravel(num.array(fin.variables[q], num.float)).reshape(M,3)
436            else: # Time-varying, get the current timestep data
437                self.quantities[q] = num.array(fin.variables[q][self.frame_number], num.float).reshape(M,3)
438        fin.close()
439        return self.quantities
440
441    def get_bounds(self):
442        """
443            Get the bounding rect around the mesh.
444        """
445        return [self.xmin, self.xmax, self.ymin, self.ymax]
446
447    def get_last_frame_number(self):
448        """
449            Return the last loaded frame index.
450        """
451        return self.last_frame_number
452
453    def get_time(self):
454        """
455            Get time at the current frame num, in secs.
456        """
457        return self.time[self.frame_number]
458
459
460class Write_sww(Write_sts):
461    """
462        A class to write an SWW file.
463       
464        It is domain agnostic, and requires all the data to be fed in
465        manually.
466    """
467
468    def __init__(self, static_quantities, dynamic_quantities):
469        """Initialise Write_sww with two list af quantity names:
470       
471        static_quantities (e.g. elevation or friction):
472            Stored once at the beginning of the simulation in a 1D array
473            of length number_of_points   
474        dynamic_quantities (e.g stage):
475            Stored every timestep in a 2D array with
476            dimensions number_of_points X number_of_timesteps       
477       
478        """
479        self.static_quantities = static_quantities   
480        self.dynamic_quantities = dynamic_quantities
481
482
483    def store_header(self,
484                     outfile,
485                     times,
486                     number_of_volumes,
487                     number_of_points,
488                     description='Generated by ANUGA',
489                     smoothing=True,
490                     order=1,
491                     sww_precision=netcdf_float32,
492                     verbose=False):
493        """Write an SWW file header.
494
495        Writes the first section of the .sww file.
496
497        outfile - the open file that will be written
498        times - A list of the time slice times OR a start time
499        Note, if a list is given the info will be made relative.
500        number_of_volumes - the number of triangles
501        number_of_points - the number of vertices in the mesh
502        """
503
504        from anuga.abstract_2d_finite_volumes.util \
505            import get_revision_number
506
507        outfile.institution = 'Geoscience Australia'
508        outfile.description = description
509
510        # For sww compatibility
511        if smoothing is True:
512            # Smoothing to be depreciated
513            outfile.smoothing = 'Yes'
514            outfile.vertices_are_stored_uniquely = 'False'
515        else:
516            # Smoothing to be depreciated
517            outfile.smoothing = 'No'
518            outfile.vertices_are_stored_uniquely = 'True'
519        outfile.order = order
520
521        try:
522            revision_number = get_revision_number()
523        except:
524            # This will be triggered if the system cannot get the SVN
525            # revision number.
526            revision_number = None
527        # Allow None to be stored as a string
528        outfile.revision_number = str(revision_number)
529
530        # This is being used to seperate one number from a list.
531        # what it is actually doing is sorting lists from numeric arrays.
532        if isinstance(times, (list, num.ndarray)):
533            number_of_times = len(times)
534            times = ensure_numeric(times)
535            if number_of_times == 0:
536                starttime = 0
537            else:
538                starttime = times[0]
539                times = times - starttime  #Store relative times
540        else:
541            number_of_times = 0
542            starttime = times
543
544
545        outfile.starttime = starttime
546
547        # dimension definitions
548        outfile.createDimension('number_of_volumes', number_of_volumes)
549        outfile.createDimension('number_of_triangle_vertices', number_of_points)
550        outfile.createDimension('number_of_vertices', 3)
551        outfile.createDimension('numbers_in_range', 2)
552
553        if smoothing is True:
554            outfile.createDimension('number_of_points', number_of_points)
555            # FIXME(Ole): This will cause sww files for parallel domains to
556            # have ghost nodes stored (but not used by triangles).
557            # To clean this up, we have to change get_vertex_values and
558            # friends in quantity.py (but I can't be bothered right now)
559        else:
560            outfile.createDimension('number_of_points', 3*number_of_volumes)
561
562        outfile.createDimension('number_of_timesteps', number_of_times)
563
564        # variable definitions
565        outfile.createVariable('x', sww_precision, ('number_of_points',))
566        outfile.createVariable('y', sww_precision, ('number_of_points',))
567
568        outfile.createVariable('volumes', netcdf_int , ('number_of_volumes',
569                                                       'number_of_vertices'))
570
571
572        for q in self.static_quantities:
573           
574            outfile.createVariable(q, sww_precision,
575                                   ('number_of_points',))
576           
577            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
578                                   ('numbers_in_range',))
579                                   
580            # Initialise ranges with small and large sentinels.
581            # If this was in pure Python we could have used None sensibly
582            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
583            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
584
585       
586        self.write_dynamic_quantities(outfile, self.dynamic_quantities, times, \
587                                        precis = sww_precision)
588
589
590        outfile.sync()
591
592
593
594    def store_triangulation(self,
595                            outfile,
596                            points_utm,
597                            volumes,
598                            zone=None, 
599                            new_origin=None,
600                            points_georeference=None, 
601                            verbose=False):
602        """
603        Store triangulation data in the underlying file.
604       
605        Stores the points and triangle indices in the sww file
606       
607        outfile Open handle to underlying file.
608
609        new_origin georeference that the points can be set to.
610
611        points_georeference The georeference of the points_utm.
612
613        verbose True if this function is to be verbose.
614
615        new_origin - qa georeference that the points can be set to. (Maybe
616        do this before calling this function.)
617
618        points_utm - currently a list or array of the points in UTM.
619        points_georeference - the georeference of the points_utm
620
621        How about passing new_origin and current_origin.
622        If you get both, do a convertion from the old to the new.
623
624        If you only get new_origin, the points are absolute,
625        convert to relative
626
627        if you only get the current_origin the points are relative, store
628        as relative.
629
630        if you get no georefs create a new georef based on the minimums of
631        points_utm.  (Another option would be to default to absolute)
632
633        Yes, and this is done in another part of the code.
634        Probably geospatial.
635
636        If you don't supply either geo_refs, then supply a zone. If not
637        the default zone will be used.
638
639        precon:
640            header has been called.
641        """
642
643        number_of_points = len(points_utm)
644        volumes = num.array(volumes,num.int32).reshape(-1,3)
645
646
647        points_utm = num.array(points_utm)
648
649        # Given the two geo_refs and the points, do the stuff
650        # described in the method header
651        # if this is needed else where, pull out as a function
652        points_georeference = ensure_geo_reference(points_georeference)
653        new_origin = ensure_geo_reference(new_origin)
654        if new_origin is None and points_georeference is not None:
655            points = points_utm
656            geo_ref = points_georeference
657        else:
658            if new_origin is None:
659                new_origin = Geo_reference(zone, min(points_utm[:,0]),
660                                                 min(points_utm[:,1]))
661            points = new_origin.change_points_geo_ref(points_utm,
662                                                      points_georeference)
663            geo_ref = new_origin
664
665        # At this stage I need a georef and points
666        # the points are relative to the georef
667        geo_ref.write_NetCDF(outfile)
668
669        # This will put the geo ref in the middle
670        #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
671
672        x =  points[:,0]
673        y =  points[:,1]
674
675        #x = x.astype(netcdf_float32)
676        #y = y.astype(netcdf_float32)
677       
678
679        if verbose:
680            log.critical('------------------------------------------------')
681            log.critical('More Statistics:')
682            log.critical('  Extent (/lon):')
683            log.critical('    x in [%f, %f], len(lat) == %d'
684                         % (min(x), max(x), len(x)))
685            log.critical('    y in [%f, %f], len(lon) == %d'
686                         % (min(y), max(y), len(y)))
687            #log.critical('    z in [%f, %f], len(z) == %d'
688            #             % (min(elevation), max(elevation), len(elevation)))
689            log.critical('geo_ref: %s' % str(geo_ref))
690            log.critical('------------------------------------------------')
691
692
693        outfile.variables['x'][:] = x #- geo_ref.get_xllcorner()
694        outfile.variables['y'][:] = y #- geo_ref.get_yllcorner()
695
696
697        msg = 'Mismatch between shape of volumes array and (number_of_volumes , 3)'
698        assert volumes.shape == outfile.variables['volumes'].shape, msg
699
700        outfile.variables['volumes'][:] = volumes
701
702
703
704
705    def store_parallel_data(self,
706                            outfile,
707                            number_of_global_triangles,
708                            number_of_global_nodes,
709                            tri_full_flag = None,
710                            tri_l2g = None,
711                            node_l2g = None,
712                            sww_precision=netcdf_float32,
713                            verbose=False):
714
715
716         # dimension definitions
717        #outfile.createDimension('number_of_volumes', number_of_volumes)
718        #outfile.createDimension('number_of_vertices', 3)
719        #outfile.createDimension('numbers_in_range', 2)
720
721        #print 'store parallel data'
722        outfile.number_of_global_triangles = number_of_global_triangles
723        outfile.number_of_global_nodes = number_of_global_nodes
724
725        # variable definitions
726        outfile.createVariable('tri_l2g',  netcdf_int, ('number_of_volumes',))
727        outfile.createVariable('node_l2g', netcdf_int, ('number_of_triangle_vertices',))
728        outfile.createVariable('tri_full_flag', netcdf_int, ('number_of_volumes',))
729
730        #print tri_l2g.shape
731        #print tri_l2g
732        #print outfile.variables['tri_l2g'].shape
733
734        outfile.variables['tri_l2g'][:] = tri_l2g.astype(num.int32)
735
736        #print node_l2g.shape
737        #print node_l2g
738        #print outfile.variables['node_l2g'].shape
739
740        outfile.variables['node_l2g'][:] = node_l2g.astype(num.int32)
741
742        #print tri_full_flag.shape
743        #print tri_full_flag
744        #print outfile.variables['tri_full_flag'].shape
745
746        outfile.variables['tri_full_flag'][:] = tri_full_flag.astype(num.int32)
747
748
749
750    def store_static_quantities(self, 
751                                outfile, 
752                                sww_precision=num.float32,
753                                verbose=False, 
754                                **quant):
755        """
756        Write the static quantity info.
757
758        **quant is extra keyword arguments passed in. These must be
759          the numpy arrays to be stored in the sww file at each timestep.
760
761        The argument sww_precision allows for storing as either
762        * single precision (default): num.float32
763        * double precision: num.float64 or num.float
764
765        Precondition:
766            store_triangulation and
767            store_header have been called.
768        """
769
770        # The dictionary quant must contain numpy arrays for each name.
771        # These will typically be quantities from Domain such as friction
772        #
773        # Arrays not listed in static_quantitiues will be ignored, silently.
774        #
775        # This method will also write the ranges for each quantity,
776        # e.g. stage_range, xmomentum_range and ymomentum_range
777        for q in self.static_quantities:
778            if not quant.has_key(q):
779                msg = 'Values for quantity %s was not specified in ' % q
780                msg += 'store_quantities so they cannot be stored.'
781                raise NewQuantity, msg
782            else:
783                q_values = ensure_numeric(quant[q])
784               
785                x = q_values.astype(sww_precision)
786                outfile.variables[q][:] = x
787       
788                # This populates the _range values
789                outfile.variables[q + Write_sww.RANGE][0] = num.min(x)
790                outfile.variables[q + Write_sww.RANGE][1] = num.max(x)
791                   
792        # FIXME: Hack for backwards compatibility with old viewer
793        #if 'elevation' in self.static_quantities:
794        #    outfile.variables['z'][:] = outfile.variables['elevation'][:]
795
796                   
797       
798
799    def store_quantities(self, 
800                         outfile, 
801                         sww_precision=num.float32,
802                         slice_index=None,
803                         time=None,
804                         verbose=False, 
805                         **quant):
806        """
807        Write the quantity info at each timestep.
808
809        **quant is extra keyword arguments passed in. These must be
810          the numpy arrays to be stored in the sww file at each timestep.
811
812        if the time array is already been built, use the slice_index
813        to specify the index.
814
815        Otherwise, use time to increase the time dimension
816
817        Maybe make this general, but the viewer assumes these quantities,
818        so maybe we don't want it general - unless the viewer is general
819       
820        The argument sww_precision allows for storing as either
821        * single precision (default): num.float32
822        * double precision: num.float64 or num.float
823
824        Precondition:
825            store_triangulation and
826            store_header have been called.
827        """
828
829        if time is not None:
830            file_time = outfile.variables['time']
831            slice_index = len(file_time)
832            file_time[slice_index] = time
833        else:
834            slice_index = int(slice_index) # Has to be cast in case it was numpy.int   
835
836        # Write the named dynamic quantities
837        # The dictionary quant must contain numpy arrays for each name.
838        # These will typically be the conserved quantities from Domain
839        # (Typically stage,  xmomentum, ymomentum).
840        #
841        # Arrays not listed in dynamic_quantitiues will be ignored, silently.
842        #
843        # This method will also write the ranges for each quantity,
844        # e.g. stage_range, xmomentum_range and ymomentum_range
845        for q in self.dynamic_quantities:
846            if not quant.has_key(q):
847                msg = 'Values for quantity %s was not specified in ' % q
848                msg += 'store_quantities so they cannot be stored.'
849                raise NewQuantity, msg
850            else:
851                q_values = ensure_numeric(quant[q])
852               
853                q_retyped = q_values.astype(sww_precision)
854                outfile.variables[q][slice_index] = q_retyped
855                   
856                # This updates the _range values
857                q_range = outfile.variables[q + Write_sww.RANGE][:]
858                q_values_min = num.min(q_values)
859                if q_values_min < q_range[0]:
860                    outfile.variables[q + Write_sww.RANGE][0] = q_values_min
861                q_values_max = num.max(q_values)
862                if q_values_max > q_range[1]:
863                    outfile.variables[q + Write_sww.RANGE][1] = q_values_max
864
865
866
867       
868
869    def verbose_quantities(self, outfile):
870        log.critical('------------------------------------------------')
871        log.critical('More Statistics:')
872        for q in self.dynamic_quantities:
873            log.critical(%s in [%f, %f]'
874                         % (q, outfile.variables[q+Write_sww.RANGE][0],
875                            outfile.variables[q+Write_sww.RANGE][1]))
876        log.critical('------------------------------------------------')
877
878
879
880
881def extent_sww(file_name):
882    """Read in an sww file, then get its extents
883
884    Input:
885    file_name - the sww file
886
887    Output:
888    A list: [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
889    """
890
891
892    #Get NetCDF
893    fid = NetCDFFile(file_name, netcdf_mode_r)
894
895    # Get the variables
896    x = fid.variables['x'][:]
897    y = fid.variables['y'][:]
898    stage = fid.variables['stage'][:]
899
900    fid.close()
901
902    return [min(x), max(x), min(y), max(y), num.min(stage), num.max(stage)]
903
904
905def load_sww_as_domain(filename, boundary=None, t=None,
906               fail_if_NaN=True, NaN_filler=0,
907               verbose=False, very_verbose=False):
908    """
909    Load an sww file into a domain.
910
911    Usage: domain = load_sww_as_domain('file.sww',
912                        t=time (default = last time in file))
913
914    Boundary is not recommended if domain.smooth is not selected, as it
915    uses unique coordinates, but not unique boundaries. This means that
916    the boundary file will not be compatable with the coordinates, and will
917    give a different final boundary, or crash.
918    """
919   
920    from anuga.shallow_water.shallow_water_domain import Domain
921
922    # initialise NaN.
923    NaN = 9.969209968386869e+036
924
925    if verbose: log.critical('Reading from %s' % filename)
926
927    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
928    time = fid.variables['time']       # Timesteps
929    if t is None:
930        t = time[-1]
931    time_interp = get_time_interp(time,t)
932
933    # Get the variables as numeric arrays
934    x = fid.variables['x'][:]                   # x-coordinates of vertices
935    y = fid.variables['y'][:]                   # y-coordinates of vertices
936    #elevation = fid.variables['elevation']      # Elevation
937    #stage = fid.variables['stage']              # Water level
938    #xmomentum = fid.variables['xmomentum']      # Momentum in the x-direction
939    #ymomentum = fid.variables['ymomentum']      # Momentum in the y-direction
940
941    starttime = float(fid.starttime)
942    #starttime = fid.starttime[0]
943    volumes = fid.variables['volumes'][:]       # Connectivity
944    coordinates = num.transpose(num.asarray([x.tolist(), y.tolist()]))
945    # FIXME (Ole): Something like this might be better:
946    #                 concatenate((x, y), axis=1)
947    # or              concatenate((x[:,num.newaxis], x[:,num.newaxis]), axis=1)
948
949    conserved_quantities = []
950    interpolated_quantities = {}
951    other_quantities = []
952
953    # get geo_reference
954    try:                             # sww files don't have to have a geo_ref
955        geo_reference = Geo_reference(NetCDFObject=fid)
956    except: # AttributeError, e:
957        geo_reference = None
958
959    if verbose: log.critical('    getting quantities')
960
961    for quantity in fid.variables.keys():
962        dimensions = fid.variables[quantity].dimensions
963        if 'number_of_timesteps' in dimensions:
964            conserved_quantities.append(quantity)
965            interpolated_quantities[quantity] = \
966                  interpolated_quantity(fid.variables[quantity][:], time_interp)
967        else:
968            other_quantities.append(quantity)
969
970    other_quantities.remove('x')
971    other_quantities.remove('y')
972    #other_quantities.remove('z')
973    other_quantities.remove('volumes')
974    try:
975        other_quantities.remove('stage_range')
976        other_quantities.remove('xmomentum_range')
977        other_quantities.remove('ymomentum_range')
978        other_quantities.remove('elevation_range')
979    except:
980        pass
981
982    conserved_quantities.remove('time')
983
984    if verbose: log.critical('    building domain')
985
986    #    From domain.Domain:
987    #    domain = Domain(coordinates, volumes,\
988    #                    conserved_quantities = conserved_quantities,\
989    #                    other_quantities = other_quantities,zone=zone,\
990    #                    xllcorner=xllcorner, yllcorner=yllcorner)
991
992    # From shallow_water.Domain:
993    coordinates = coordinates.tolist()
994    volumes = volumes.tolist()
995    # FIXME:should this be in mesh? (peter row)
996    if fid.smoothing == 'Yes':
997        unique = False
998    else:
999        unique = True
1000    if unique:
1001        coordinates, volumes, boundary = weed(coordinates, volumes,boundary)
1002
1003     
1004   
1005    try:
1006        domain = Domain(coordinates, volumes, boundary, starttime=(float(starttime) + float(t)))
1007    except AssertionError, e:
1008        fid.close()
1009        msg = 'Domain could not be created: %s. ' \
1010              'Perhaps use "fail_if_NaN=False and NaN_filler = ..."' % e
1011        raise DataDomainError, msg
1012
1013    if not boundary is None:
1014        domain.boundary = boundary
1015
1016    domain.geo_reference = geo_reference
1017
1018    for quantity in other_quantities:
1019        try:
1020            NaN = fid.variables[quantity].missing_value
1021        except:
1022            pass                       # quantity has no missing_value number
1023        X = fid.variables[quantity][:]
1024        if very_verbose:
1025            log.critical('       %s' % str(quantity))
1026            log.critical('        NaN = %s' % str(NaN))
1027            log.critical('        max(X)')
1028            log.critical('       %s' % str(max(X)))
1029            log.critical('        max(X)==NaN')
1030            log.critical('       %s' % str(max(X)==NaN))
1031            log.critical('')
1032        if max(X) == NaN or min(X) == NaN:
1033            if fail_if_NaN:
1034                msg = 'quantity "%s" contains no_data entry' % quantity
1035                raise DataMissingValuesError, msg
1036            else:
1037                data = (X != NaN)
1038                X = (X*data) + (data==0)*NaN_filler
1039        if unique:
1040            X = num.resize(X, (len(X)/3, 3))
1041        domain.set_quantity(quantity, X)
1042    #
1043    for quantity in conserved_quantities:
1044        try:
1045            NaN = fid.variables[quantity].missing_value
1046        except:
1047            pass                       # quantity has no missing_value number
1048        X = interpolated_quantities[quantity]
1049        if very_verbose:
1050            log.critical('       %s' % str(quantity))
1051            log.critical('        NaN = %s' % str(NaN))
1052            log.critical('        max(X)')
1053            log.critical('       %s' % str(max(X)))
1054            log.critical('        max(X)==NaN')
1055            log.critical('       %s' % str(max(X)==NaN))
1056            log.critical('')
1057        if max(X) == NaN or min(X) == NaN:
1058            if fail_if_NaN:
1059                msg = 'quantity "%s" contains no_data entry' % quantity
1060                raise DataMissingValuesError, msg
1061            else:
1062                data = (X != NaN)
1063                X = (X*data) + (data==0)*NaN_filler
1064        if unique:
1065            X = num.resize(X, (X.shape[0]/3, 3))
1066        domain.set_quantity(quantity, X)
1067
1068    fid.close()
1069
1070    return domain
1071
1072
1073def get_mesh_and_quantities_from_file(filename,
1074                                      quantities=None,
1075                                      verbose=False):
1076    """Get and rebuild mesh structure and associated quantities from sww file
1077
1078    Input:
1079        filename - Name os sww file
1080        quantities - Names of quantities to load
1081
1082    Output:
1083        mesh - instance of class Interpolate
1084               (including mesh and interpolation functionality)
1085        quantities - arrays with quantity values at each mesh node
1086        time - vector of stored timesteps
1087
1088    This function is used by e.g.:
1089        get_interpolated_quantities_at_polyline_midpoints
1090    """
1091
1092    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
1093
1094    import types
1095    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
1096
1097    if verbose: log.critical('Reading from %s' % filename)
1098
1099    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1100    time = fid.variables['time'][:]    # Time vector
1101    #time += fid.starttime[0]
1102    time += fid.starttime
1103
1104    # Get the variables as numeric arrays
1105    x = fid.variables['x'][:]                   # x-coordinates of nodes
1106    y = fid.variables['y'][:]                   # y-coordinates of nodes
1107
1108
1109    elevation = fid.variables['elevation'][:]   # Elevation
1110    stage = fid.variables['stage'][:]           # Water level
1111    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
1112    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
1113
1114
1115
1116    # Mesh (nodes (Mx2), triangles (Nx3))
1117    nodes = num.concatenate((x[:,num.newaxis], y[:,num.newaxis]), axis=1)
1118    triangles = fid.variables['volumes']
1119
1120    # Get geo_reference
1121    try:
1122        geo_reference = Geo_reference(NetCDFObject=fid)
1123    except: #AttributeError, e:
1124        # Sww files don't have to have a geo_ref
1125        geo_reference = None
1126
1127    if verbose: log.critical('    building mesh from sww file %s' % filename)
1128
1129    boundary = None
1130
1131    #FIXME (Peter Row): Should this be in mesh?
1132    if fid.smoothing != 'Yes':
1133        nodes = nodes.tolist()
1134        triangles = triangles.tolist()
1135        nodes, triangles, boundary = weed(nodes, triangles, boundary)
1136
1137    try:
1138        mesh = Mesh(nodes, triangles, boundary, geo_reference=geo_reference)
1139    except AssertionError, e:
1140        fid.close()
1141        msg = 'Domain could not be created: %s. "' % e
1142        raise DataDomainError, msg
1143
1144    quantities = {}
1145    quantities['elevation'] = elevation
1146    quantities['stage'] = stage
1147    quantities['xmomentum'] = xmomentum
1148    quantities['ymomentum'] = ymomentum
1149
1150    fid.close()
1151
1152    return mesh, quantities, time
1153
1154
1155def get_time_interp(time, t=None):
1156    """Finds the ratio and index for time interpolation.
1157        time is an array of time steps
1158        t is the sample time.
1159        returns a tuple containing index into time, and ratio
1160    """
1161    if t is None:
1162        t=time[-1]
1163        index = -1
1164        ratio = 0.
1165    else:
1166        T = time
1167        tau = t
1168        index=0
1169        msg = 'Time interval derived from file %s [%s:%s]' \
1170              % ('FIXMEfilename', T[0], T[-1])
1171        msg += ' does not match model time: %s' % tau
1172        if tau < time[0]: raise DataTimeError, msg
1173        if tau > time[-1]: raise DataTimeError, msg
1174        while tau > time[index]: index += 1
1175        while tau < time[index]: index -= 1
1176        if tau == time[index]:
1177            #Protect against case where tau == time[-1] (last time)
1178            # - also works in general when tau == time[i]
1179            ratio = 0
1180        else:
1181            #t is now between index and index+1
1182            ratio = (tau - time[index])/(time[index+1] - time[index])
1183
1184    return (index, ratio)
1185
1186
1187
1188def interpolated_quantity(saved_quantity, time_interp):
1189    """Interpolate a quantity with respect to time.
1190
1191    saved_quantity  the quantity to interpolate
1192    time_interp     (index, ratio)
1193
1194    Returns a vector of interpolated values.
1195    """
1196
1197    index, ratio = time_interp
1198
1199    Q = saved_quantity
1200
1201    if ratio > 0:
1202        q = (1-ratio)*Q[index] + ratio*Q[index+1]
1203    else:
1204        q = Q[index]
1205
1206    #Return vector of interpolated values
1207    return q
1208
1209
1210def weed(coordinates, volumes, boundary=None):
1211    """ Excise all duplicate points.
1212    """
1213    if isinstance(coordinates, num.ndarray):
1214        coordinates = coordinates.tolist()
1215    if isinstance(volumes, num.ndarray):
1216        volumes = volumes.tolist()
1217
1218    unique = False
1219    point_dict = {}
1220    same_point = {}
1221    for i in range(len(coordinates)):
1222        point = tuple(coordinates[i])
1223        if point_dict.has_key(point):
1224            unique = True
1225            same_point[i] = point
1226            #to change all point i references to point j
1227        else:
1228            point_dict[point] = i
1229            same_point[i] = point
1230
1231    coordinates = []
1232    i = 0
1233    for point in point_dict.keys():
1234        point = tuple(point)
1235        coordinates.append(list(point))
1236        point_dict[point] = i
1237        i += 1
1238
1239    for volume in volumes:
1240        for i in range(len(volume)):
1241            index = volume[i]
1242            if index > -1:
1243                volume[i] = point_dict[same_point[index]]
1244
1245    new_boundary = {}
1246    if not boundary is None:
1247        for segment in boundary.keys():
1248            point0 = point_dict[same_point[segment[0]]]
1249            point1 = point_dict[same_point[segment[1]]]
1250            label = boundary[segment]
1251            #FIXME should the bounday attributes be concaterated
1252            #('exterior, pond') or replaced ('pond')(peter row)
1253
1254            if new_boundary.has_key((point0, point1)):
1255                new_boundary[(point0,point1)] = new_boundary[(point0, point1)]
1256
1257            elif new_boundary.has_key((point1, point0)):
1258                new_boundary[(point1,point0)] = new_boundary[(point1, point0)]
1259            else: new_boundary[(point0, point1)] = label
1260
1261        boundary = new_boundary
1262
1263    return coordinates, volumes, boundary
1264
1265
1266
Note: See TracBrowser for help on using the repository browser.