source: trunk/anuga_core/source/anuga/file/sww.py @ 8286

Last change on this file since 8286 was 8286, checked in by steve, 12 years ago

working version of sww_merge

File size: 44.7 KB
Line 
1""" Classes to read an SWW file.
2"""
3
4import exceptions
5class DataFileNotOpenError(exceptions.Exception): pass
6class DataMissingValuesError(exceptions.Exception): pass
7class NewQuantity(exceptions.Exception): pass
8class DataDomainError(exceptions.Exception): pass
9class DataMissingValuesError(exceptions.Exception): pass
10class DataTimeError(exceptions.Exception): pass
11
12
13from anuga.coordinate_transforms.geo_reference import Geo_reference
14from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
15from anuga.config import netcdf_float, netcdf_float32, netcdf_int
16from anuga.config import max_float
17from anuga.utilities.numerical_tools import ensure_numeric
18import anuga.utilities.log as log
19from Scientific.IO.NetCDF import NetCDFFile
20
21from sts import Write_sts
22
23from anuga.coordinate_transforms.geo_reference import \
24        ensure_geo_reference
25
26from file_utils import create_filename
27import numpy as num
28
29class Data_format:
30    """Generic interface to data formats
31    """
32
33    def __init__(self, domain, extension, mode=netcdf_mode_w):
34        assert mode[0] in ['r', 'w', 'a'], \
35               "Mode %s must be either:\n" % mode + \
36               "   'w' (write)\n" + \
37               "   'r' (read)\n" + \
38               "   'a' (append)"
39
40        #Create filename
41        self.filename = create_filename(domain.get_datadir(),
42                                        domain.get_name(), extension)
43
44        self.timestep = 0
45        self.domain = domain
46
47        # Probably should exclude ghosts in case this is a parallel domain
48       
49        self.number_of_nodes   = domain.number_of_nodes
50        self.number_of_volumes = domain.number_of_triangles
51        #self.number_of_volumes = len(domain)
52
53        #FIXME: Should we have a general set_precision function?
54
55
56class SWW_file(Data_format):
57    """Interface to native NetCDF format (.sww) for storing model output
58
59    There are two kinds of data
60
61    1: Constant data: Vertex coordinates and field values. Stored once
62    2: Variable data: Conserved quantities. Stored once per timestep.
63
64    All data is assumed to reside at vertex locations.
65    """
66
67    def __init__(self, domain, 
68                 mode=netcdf_mode_w, max_size=2000000000, recursion=False):
69        from Scientific.IO.NetCDF import NetCDFFile
70
71        self.precision = netcdf_float32 # Use single precision for quantities
72        self.recursion = recursion
73        self.mode = mode
74        if hasattr(domain, 'max_size'):
75            self.max_size = domain.max_size # File size max is 2Gig
76        else:
77            self.max_size = max_size
78        if hasattr(domain, 'minimum_storable_height'):
79            self.minimum_storable_height = domain.minimum_storable_height
80        else:
81            self.minimum_storable_height = default_minimum_storable_height
82
83        # Call parent constructor
84        Data_format.__init__(self, domain, 'sww', mode)
85
86        # Get static and dynamic quantities from domain
87        static_quantities = []
88        dynamic_quantities = []
89       
90        for q in domain.quantities_to_be_stored:
91            flag = domain.quantities_to_be_stored[q]
92       
93            msg = 'Quantity %s is requested to be stored ' % q
94            msg += 'but it does not exist in domain.quantities'
95            assert q in domain.quantities, msg
96       
97            assert flag in [1,2]
98            if flag == 1: static_quantities.append(q)
99            if flag == 2: dynamic_quantities.append(q)               
100                       
101       
102        # NetCDF file definition
103        fid = NetCDFFile(self.filename, mode)
104        if mode[0] == 'w':
105            description = 'Output from anuga.abstract_2d_finite_volumes ' \
106                          'suitable for plotting'
107                         
108            self.writer = Write_sww(static_quantities, dynamic_quantities)
109            self.writer.store_header(fid,
110                                     domain.starttime,
111                                     self.number_of_volumes,
112                                     self.domain.number_of_nodes,
113                                     description=description,
114                                     smoothing=domain.smooth,
115                                     order=domain.default_order,
116                                     sww_precision=self.precision)
117
118            # Extra optional information
119            if hasattr(domain, 'texture'):
120                fid.texture = domain.texture
121
122            if domain.quantities_to_be_monitored is not None:
123                fid.createDimension('singleton', 1)
124                fid.createDimension('two', 2)
125
126                poly = domain.monitor_polygon
127                if poly is not None:
128                    N = len(poly)
129                    fid.createDimension('polygon_length', N)
130                    fid.createVariable('extrema.polygon',
131                                       self.precision,
132                                       ('polygon_length', 'two'))
133                    fid.variables['extrema.polygon'][:] = poly
134
135                interval = domain.monitor_time_interval
136                if interval is not None:
137                    fid.createVariable('extrema.time_interval',
138                                       self.precision,
139                                       ('two',))
140                    fid.variables['extrema.time_interval'][:] = interval
141
142                for q in domain.quantities_to_be_monitored:
143                    fid.createVariable(q + '.extrema', self.precision,
144                                       ('numbers_in_range',))
145                    fid.createVariable(q + '.min_location', self.precision,
146                                       ('numbers_in_range',))
147                    fid.createVariable(q + '.max_location', self.precision,
148                                       ('numbers_in_range',))
149                    fid.createVariable(q + '.min_time', self.precision,
150                                       ('singleton',))
151                    fid.createVariable(q + '.max_time', self.precision,
152                                       ('singleton',))
153
154        fid.close()
155
156    def store_connectivity(self):
157        """Store information about nodes, triangles and static quantities
158
159        Writes x,y coordinates of triangles and their connectivity.
160       
161        Store also any quantity that has been identified as static.
162        """
163
164        # FIXME: Change name to reflect the fact thta this function
165        # stores both connectivity (triangulation) and static quantities
166       
167        from Scientific.IO.NetCDF import NetCDFFile
168
169        domain = self.domain
170
171        # append to the NetCDF file
172        fid = NetCDFFile(self.filename, netcdf_mode_a)
173
174        # Get X, Y from one (any) of the quantities
175        Q = domain.quantities.values()[0]
176        X,Y,_,V = Q.get_vertex_values(xy=True, precision=self.precision)
177
178        # store the connectivity data
179        points = num.concatenate((X[:,num.newaxis],Y[:,num.newaxis]), axis=1)
180        self.writer.store_triangulation(fid,
181                                        points,
182                                        V.astype(num.float32),
183                                        points_georeference=\
184                                            domain.geo_reference)
185
186
187        if domain.parallel:
188            self.writer.store_parallel_data(fid,
189                                        domain.number_of_global_triangles,
190                                        domain.number_of_global_nodes,
191                                        domain.tri_full_flag,
192                                        domain.tri_l2g,
193                                        domain.node_l2g)
194
195
196        # Get names of static quantities
197        static_quantities = {}
198        for name in self.writer.static_quantities:
199            Q = domain.quantities[name]
200            A, _ = Q.get_vertex_values(xy=False, 
201                                       precision=self.precision)
202            static_quantities[name] = A
203       
204        # Store static quantities       
205        self.writer.store_static_quantities(fid, **static_quantities)
206                                           
207        fid.close()
208
209    def store_timestep(self):
210        """Store time and time dependent quantities
211        """
212
213        from Scientific.IO.NetCDF import NetCDFFile
214        import types
215        from time import sleep
216        from os import stat
217
218        # Get NetCDF
219        retries = 0
220        file_open = False
221        while not file_open and retries < 10:
222            try:
223                # Open existing file
224                fid = NetCDFFile(self.filename, netcdf_mode_a)
225            except IOError:
226                # This could happen if someone was reading the file.
227                # In that case, wait a while and try again
228                msg = 'Warning (store_timestep): File %s could not be opened' \
229                      % self.filename
230                msg += ' - trying step %s again' % self.domain.time
231                log.critical(msg)
232                retries += 1
233                sleep(1)
234            else:
235                file_open = True
236
237        if not file_open:
238            msg = 'File %s could not be opened for append' % self.filename
239            raise DataFileNotOpenError, msg
240
241        # Check to see if the file is already too big:
242        time = fid.variables['time']
243        i = len(time) + 1
244        file_size = stat(self.filename)[6]
245        file_size_increase = file_size / i
246        if file_size + file_size_increase > self.max_size * 2**self.recursion:
247            # In order to get the file name and start time correct,
248            # I change the domain.filename and domain.starttime.
249            # This is the only way to do this without changing
250            # other modules (I think).
251
252            # Write a filename addon that won't break the anuga viewers
253            # (10.sww is bad)
254            filename_ext = '_time_%s' % self.domain.time
255            filename_ext = filename_ext.replace('.', '_')
256
257            # Remember the old filename, then give domain a
258            # name with the extension
259            old_domain_filename = self.domain.get_name()
260            if not self.recursion:
261                self.domain.set_name(old_domain_filename + filename_ext)
262
263            # Temporarily change the domain starttime to the current time
264            old_domain_starttime = self.domain.starttime
265            self.domain.starttime = self.domain.get_time()
266
267            # Build a new data_structure.
268            next_data_structure = SWW_file(self.domain, mode=self.mode,
269                                           max_size=self.max_size,
270                                           recursion=self.recursion+1)
271            if not self.recursion:
272                log.critical('    file_size = %s' % file_size)
273                log.critical('    saving file to %s'
274                             % next_data_structure.filename) 
275
276            # Set up the new data_structure
277            self.domain.writer = next_data_structure
278
279            # Store connectivity and first timestep
280            next_data_structure.store_connectivity()
281            next_data_structure.store_timestep()
282            fid.sync()
283            fid.close()
284
285            # Restore the old starttime and filename
286            self.domain.starttime = old_domain_starttime
287            self.domain.set_name(old_domain_filename)
288        else:
289            self.recursion = False
290            domain = self.domain
291
292            # Get the variables
293            time = fid.variables['time']
294            i = len(time)
295             
296            if 'stage' in self.writer.dynamic_quantities:           
297                # Select only those values for stage,
298                # xmomentum and ymomentum (if stored) where
299                # depth exceeds minimum_storable_height
300                #
301                # In this branch it is assumed that elevation
302                # is also available as a quantity           
303           
304                Q = domain.quantities['stage']
305                w, _ = Q.get_vertex_values(xy=False)
306               
307                Q = domain.quantities['elevation']
308                z, _ = Q.get_vertex_values(xy=False)               
309               
310                storable_indices = (w-z >= self.minimum_storable_height)
311            else:
312                # Very unlikely branch
313                storable_indices = None # This means take all
314           
315           
316            # Now store dynamic quantities
317            dynamic_quantities = {}
318            for name in self.writer.dynamic_quantities:
319                netcdf_array = fid.variables[name]
320               
321                Q = domain.quantities[name]
322                A, _ = Q.get_vertex_values(xy=False,
323                                           precision=self.precision)
324
325                if storable_indices is not None:
326                    if name == 'stage':
327                        A = num.choose(storable_indices, (z, A))
328
329                    if name in ['xmomentum', 'ymomentum']:
330                        # Get xmomentum where depth exceeds
331                        # minimum_storable_height
332                       
333                        # Define a zero vector of same size and type as A
334                        # for use with momenta
335                        null = num.zeros(num.size(A), A.dtype.char)
336                        A = num.choose(storable_indices, (null, A))
337               
338                dynamic_quantities[name] = A
339               
340                                       
341            # Store dynamic quantities
342            self.writer.store_quantities(fid,
343                                         time=self.domain.time,
344                                         sww_precision=self.precision,
345                                         **dynamic_quantities)
346
347
348            # Update extrema if requested
349            domain = self.domain
350            if domain.quantities_to_be_monitored is not None:
351                for q, info in domain.quantities_to_be_monitored.items():
352                    if info['min'] is not None:
353                        fid.variables[q + '.extrema'][0] = info['min']
354                        fid.variables[q + '.min_location'][:] = \
355                                        info['min_location']
356                        fid.variables[q + '.min_time'][0] = info['min_time']
357
358                    if info['max'] is not None:
359                        fid.variables[q + '.extrema'][1] = info['max']
360                        fid.variables[q + '.max_location'][:] = \
361                                        info['max_location']
362                        fid.variables[q + '.max_time'][0] = info['max_time']
363
364            # Flush and close
365            fid.sync()
366            fid.close()
367
368
369class Read_sww:
370
371    def __init__(self, source):
372        """The source parameter is assumed to be a NetCDF sww file.
373        """
374
375        self.source = source
376
377        self.frame_number = 0
378
379        fin = NetCDFFile(self.source, 'r')
380
381        self.time = num.array(fin.variables['time'], num.float)
382        self.last_frame_number = self.time.shape[0] - 1
383
384        self.frames = num.arange(self.last_frame_number+1)
385
386        fin.close()
387       
388        self.read_mesh()
389
390        self.quantities = {}
391
392        self.read_quantities()
393
394
395    def read_mesh(self):
396        """ Read and store the mesh data contained within this sww file.
397        """
398        fin = NetCDFFile(self.source, 'r')
399
400        self.vertices = num.array(fin.variables['volumes'], num.int)
401       
402        self.x = x = num.array(fin.variables['x'], num.float)
403        self.y = y = num.array(fin.variables['y'], num.float)
404
405        assert len(self.x) == len(self.y)
406       
407        self.xmin = num.min(x)
408        self.xmax = num.max(x)
409        self.ymin = num.min(y)
410        self.ymax = num.max(y)
411
412
413        fin.close()
414       
415    def read_quantities(self, frame_number=0):
416        """
417        Read the quantities contained in this file.
418        frame_number is the time index to load.
419        """
420        assert frame_number >= 0 and frame_number <= self.last_frame_number
421
422        self.frame_number = frame_number
423
424        M = len(self.x)/3
425       
426        fin = NetCDFFile(self.source, 'r')
427       
428        for q in filter(lambda n:n != 'x' and n != 'y' and n != 'time' and n != 'volumes' and \
429                        '_range' not in n, \
430                        fin.variables.keys()):
431            if len(fin.variables[q].shape) == 1: # Not a time-varying quantity
432                self.quantities[q] = num.ravel(num.array(fin.variables[q], num.float)).reshape(M,3)
433            else: # Time-varying, get the current timestep data
434                self.quantities[q] = num.array(fin.variables[q][self.frame_number], num.float).reshape(M,3)
435        fin.close()
436        return self.quantities
437
438    def get_bounds(self):
439        """
440            Get the bounding rect around the mesh.
441        """
442        return [self.xmin, self.xmax, self.ymin, self.ymax]
443
444    def get_last_frame_number(self):
445        """
446            Return the last loaded frame index.
447        """
448        return self.last_frame_number
449
450    def get_time(self):
451        """
452            Get time at the current frame num, in secs.
453        """
454        return self.time[self.frame_number]
455
456
457class Write_sww(Write_sts):
458    """
459        A class to write an SWW file.
460       
461        It is domain agnostic, and requires all the data to be fed in
462        manually.
463    """
464
465    def __init__(self, static_quantities, dynamic_quantities):
466        """Initialise Write_sww with two list af quantity names:
467       
468        static_quantities (e.g. elevation or friction):
469            Stored once at the beginning of the simulation in a 1D array
470            of length number_of_points   
471        dynamic_quantities (e.g stage):
472            Stored every timestep in a 2D array with
473            dimensions number_of_points X number_of_timesteps       
474       
475        """
476        self.static_quantities = static_quantities   
477        self.dynamic_quantities = dynamic_quantities
478
479
480    def store_header(self,
481                     outfile,
482                     times,
483                     number_of_volumes,
484                     number_of_points,
485                     description='Generated by ANUGA',
486                     smoothing=True,
487                     order=1,
488                     sww_precision=netcdf_float32,
489                     verbose=False):
490        """Write an SWW file header.
491
492        Writes the first section of the .sww file.
493
494        outfile - the open file that will be written
495        times - A list of the time slice times OR a start time
496        Note, if a list is given the info will be made relative.
497        number_of_volumes - the number of triangles
498        number_of_points - the number of vertices in the mesh
499        """
500
501        from anuga.abstract_2d_finite_volumes.util \
502            import get_revision_number
503
504        outfile.institution = 'Geoscience Australia'
505        outfile.description = description
506
507        # For sww compatibility
508        if smoothing is True:
509            # Smoothing to be depreciated
510            outfile.smoothing = 'Yes'
511            outfile.vertices_are_stored_uniquely = 'False'
512        else:
513            # Smoothing to be depreciated
514            outfile.smoothing = 'No'
515            outfile.vertices_are_stored_uniquely = 'True'
516        outfile.order = order
517
518        try:
519            revision_number = get_revision_number()
520        except:
521            # This will be triggered if the system cannot get the SVN
522            # revision number.
523            revision_number = None
524        # Allow None to be stored as a string
525        outfile.revision_number = str(revision_number)
526
527        # This is being used to seperate one number from a list.
528        # what it is actually doing is sorting lists from numeric arrays.
529        if isinstance(times, (list, num.ndarray)):
530            number_of_times = len(times)
531            times = ensure_numeric(times)
532            if number_of_times == 0:
533                starttime = 0
534            else:
535                starttime = times[0]
536                times = times - starttime  #Store relative times
537        else:
538            number_of_times = 0
539            starttime = times
540
541
542        outfile.starttime = starttime
543
544        # dimension definitions
545        outfile.createDimension('number_of_volumes', number_of_volumes)
546        outfile.createDimension('number_of_vertices', 3)
547        outfile.createDimension('numbers_in_range', 2)
548
549        if smoothing is True:
550            outfile.createDimension('number_of_points', number_of_points)
551            # FIXME(Ole): This will cause sww files for parallel domains to
552            # have ghost nodes stored (but not used by triangles).
553            # To clean this up, we have to change get_vertex_values and
554            # friends in quantity.py (but I can't be bothered right now)
555        else:
556            outfile.createDimension('number_of_points', 3*number_of_volumes)
557
558        outfile.createDimension('number_of_timesteps', number_of_times)
559
560        # variable definitions
561        outfile.createVariable('x', sww_precision, ('number_of_points',))
562        outfile.createVariable('y', sww_precision, ('number_of_points',))
563
564        outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',
565                                                       'number_of_vertices'))
566
567
568        for q in self.static_quantities:
569           
570            outfile.createVariable(q, sww_precision,
571                                   ('number_of_points',))
572           
573            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
574                                   ('numbers_in_range',))
575                                   
576            # Initialise ranges with small and large sentinels.
577            # If this was in pure Python we could have used None sensibly
578            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
579            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
580
581       
582        self.write_dynamic_quantities(outfile, self.dynamic_quantities, times, \
583                                        precis = sww_precision)
584
585
586
587    def store_triangulation(self,
588                            outfile,
589                            points_utm,
590                            volumes,
591                            zone=None, 
592                            new_origin=None,
593                            points_georeference=None, 
594                            verbose=False):
595        """
596        Store triangulation data in the underlying file.
597       
598        Stores the points and triangle indices in the sww file
599       
600        outfile Open handle to underlying file.
601
602        new_origin georeference that the points can be set to.
603
604        points_georeference The georeference of the points_utm.
605
606        verbose True if this function is to be verbose.
607
608        new_origin - qa georeference that the points can be set to. (Maybe
609        do this before calling this function.)
610
611        points_utm - currently a list or array of the points in UTM.
612        points_georeference - the georeference of the points_utm
613
614        How about passing new_origin and current_origin.
615        If you get both, do a convertion from the old to the new.
616
617        If you only get new_origin, the points are absolute,
618        convert to relative
619
620        if you only get the current_origin the points are relative, store
621        as relative.
622
623        if you get no georefs create a new georef based on the minimums of
624        points_utm.  (Another option would be to default to absolute)
625
626        Yes, and this is done in another part of the code.
627        Probably geospatial.
628
629        If you don't supply either geo_refs, then supply a zone. If not
630        the default zone will be used.
631
632        precon:
633            header has been called.
634        """
635
636        number_of_points = len(points_utm)
637        volumes = num.array(volumes)
638        points_utm = num.array(points_utm)
639
640        # Given the two geo_refs and the points, do the stuff
641        # described in the method header
642        # if this is needed else where, pull out as a function
643        points_georeference = ensure_geo_reference(points_georeference)
644        new_origin = ensure_geo_reference(new_origin)
645        if new_origin is None and points_georeference is not None:
646            points = points_utm
647            geo_ref = points_georeference
648        else:
649            if new_origin is None:
650                new_origin = Geo_reference(zone, min(points_utm[:,0]),
651                                                 min(points_utm[:,1]))
652            points = new_origin.change_points_geo_ref(points_utm,
653                                                      points_georeference)
654            geo_ref = new_origin
655
656        # At this stage I need a georef and points
657        # the points are relative to the georef
658        geo_ref.write_NetCDF(outfile)
659
660        # This will put the geo ref in the middle
661        #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
662
663        x =  points[:,0]
664        y =  points[:,1]
665
666        x = x.astype(netcdf_float32)
667        y = y.astype(netcdf_float32)
668       
669
670        if verbose:
671            log.critical('------------------------------------------------')
672            log.critical('More Statistics:')
673            log.critical('  Extent (/lon):')
674            log.critical('    x in [%f, %f], len(lat) == %d'
675                         % (min(x), max(x), len(x)))
676            log.critical('    y in [%f, %f], len(lon) == %d'
677                         % (min(y), max(y), len(y)))
678            #log.critical('    z in [%f, %f], len(z) == %d'
679            #             % (min(elevation), max(elevation), len(elevation)))
680            log.critical('geo_ref: %s' % str(geo_ref))
681            log.critical('------------------------------------------------')
682
683
684        outfile.variables['x'][:] = x #- geo_ref.get_xllcorner()
685        outfile.variables['y'][:] = y #- geo_ref.get_yllcorner()
686        outfile.variables['volumes'][:] = volumes.astype(num.int32) #On Opteron 64
687
688
689    def store_parallel_data(self,
690                            outfile,
691                            number_of_global_triangles,
692                            number_of_global_nodes,
693                            tri_full_flag = None,
694                            tri_l2g = None,
695                            node_l2g = None,
696                            sww_precision=netcdf_float32,
697                            verbose=False):
698
699
700         # dimension definitions
701        #outfile.createDimension('number_of_volumes', number_of_volumes)
702        #outfile.createDimension('number_of_vertices', 3)
703        #outfile.createDimension('numbers_in_range', 2)
704
705        #print 'store parallel data'
706        outfile.number_of_global_triangles = number_of_global_triangles
707        outfile.number_of_global_nodes = number_of_global_nodes
708
709        # variable definitions
710        outfile.createVariable('tri_l2g',  netcdf_int, ('number_of_volumes',))
711        outfile.createVariable('node_l2g', netcdf_int, ('number_of_points',))
712        outfile.createVariable('tri_full_flag', netcdf_int, ('number_of_volumes',))
713
714        #print tri_l2g.shape
715        #print tri_l2g
716        #print outfile.variables['tri_l2g'].shape
717
718        outfile.variables['tri_l2g'][:] = tri_l2g.astype(num.int32)
719
720        #print node_l2g.shape
721        #print node_l2g
722        #print outfile.variables['node_l2g'].shape
723
724        outfile.variables['node_l2g'][:] = node_l2g.astype(num.int32)
725
726        #print tri_full_flag.shape
727        #print tri_full_flag
728        #print outfile.variables['tri_full_flag'].shape
729
730        outfile.variables['tri_full_flag'][:] = tri_full_flag.astype(num.int32)
731
732
733    def store_static_quantities(self, 
734                                outfile, 
735                                sww_precision=num.float32,
736                                verbose=False, 
737                                **quant):
738        """
739        Write the static quantity info.
740
741        **quant is extra keyword arguments passed in. These must be
742          the numpy arrays to be stored in the sww file at each timestep.
743
744        The argument sww_precision allows for storing as either
745        * single precision (default): num.float32
746        * double precision: num.float64 or num.float
747
748        Precondition:
749            store_triangulation and
750            store_header have been called.
751        """
752
753        # The dictionary quant must contain numpy arrays for each name.
754        # These will typically be quantities from Domain such as friction
755        #
756        # Arrays not listed in static_quantitiues will be ignored, silently.
757        #
758        # This method will also write the ranges for each quantity,
759        # e.g. stage_range, xmomentum_range and ymomentum_range
760        for q in self.static_quantities:
761            if not quant.has_key(q):
762                msg = 'Values for quantity %s was not specified in ' % q
763                msg += 'store_quantities so they cannot be stored.'
764                raise NewQuantity, msg
765            else:
766                q_values = ensure_numeric(quant[q])
767               
768                x = q_values.astype(sww_precision)
769                outfile.variables[q][:] = x
770       
771                # This populates the _range values
772                outfile.variables[q + Write_sww.RANGE][0] = num.min(x)
773                outfile.variables[q + Write_sww.RANGE][1] = num.max(x)
774                   
775        # FIXME: Hack for backwards compatibility with old viewer
776        #if 'elevation' in self.static_quantities:
777        #    outfile.variables['z'][:] = outfile.variables['elevation'][:]
778
779                   
780                   
781       
782       
783    def store_quantities(self, 
784                         outfile, 
785                         sww_precision=num.float32,
786                         slice_index=None,
787                         time=None,
788                         verbose=False, 
789                         **quant):
790        """
791        Write the quantity info at each timestep.
792
793        **quant is extra keyword arguments passed in. These must be
794          the numpy arrays to be stored in the sww file at each timestep.
795
796        if the time array is already been built, use the slice_index
797        to specify the index.
798
799        Otherwise, use time to increase the time dimension
800
801        Maybe make this general, but the viewer assumes these quantities,
802        so maybe we don't want it general - unless the viewer is general
803       
804        The argument sww_precision allows for storing as either
805        * single precision (default): num.float32
806        * double precision: num.float64 or num.float
807
808        Precondition:
809            store_triangulation and
810            store_header have been called.
811        """
812
813        if time is not None:
814            file_time = outfile.variables['time']
815            slice_index = len(file_time)
816            file_time[slice_index] = time
817        else:
818            slice_index = int(slice_index) # Has to be cast in case it was numpy.int   
819
820        # Write the named dynamic quantities
821        # The dictionary quant must contain numpy arrays for each name.
822        # These will typically be the conserved quantities from Domain
823        # (Typically stage,  xmomentum, ymomentum).
824        #
825        # Arrays not listed in dynamic_quantitiues will be ignored, silently.
826        #
827        # This method will also write the ranges for each quantity,
828        # e.g. stage_range, xmomentum_range and ymomentum_range
829        for q in self.dynamic_quantities:
830            if not quant.has_key(q):
831                msg = 'Values for quantity %s was not specified in ' % q
832                msg += 'store_quantities so they cannot be stored.'
833                raise NewQuantity, msg
834            else:
835                q_values = ensure_numeric(quant[q])
836               
837                q_retyped = q_values.astype(sww_precision)
838                outfile.variables[q][slice_index] = q_retyped
839                   
840                # This updates the _range values
841                q_range = outfile.variables[q + Write_sww.RANGE][:]
842                q_values_min = num.min(q_values)
843                if q_values_min < q_range[0]:
844                    outfile.variables[q + Write_sww.RANGE][0] = q_values_min
845                q_values_max = num.max(q_values)
846                if q_values_max > q_range[1]:
847                    outfile.variables[q + Write_sww.RANGE][1] = q_values_max
848
849    def verbose_quantities(self, outfile):
850        log.critical('------------------------------------------------')
851        log.critical('More Statistics:')
852        for q in self.dynamic_quantities:
853            log.critical(%s in [%f, %f]'
854                         % (q, outfile.variables[q+Write_sww.RANGE][0],
855                            outfile.variables[q+Write_sww.RANGE][1]))
856        log.critical('------------------------------------------------')
857
858
859
860
861def extent_sww(file_name):
862    """Read in an sww file, then get its extents
863
864    Input:
865    file_name - the sww file
866
867    Output:
868    A list: [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
869    """
870
871    from Scientific.IO.NetCDF import NetCDFFile
872
873    #Get NetCDF
874    fid = NetCDFFile(file_name, netcdf_mode_r)
875
876    # Get the variables
877    x = fid.variables['x'][:]
878    y = fid.variables['y'][:]
879    stage = fid.variables['stage'][:]
880
881    fid.close()
882
883    return [min(x), max(x), min(y), max(y), num.min(stage), num.max(stage)]
884
885
886def load_sww_as_domain(filename, boundary=None, t=None,
887               fail_if_NaN=True, NaN_filler=0,
888               verbose=False, very_verbose=False):
889    """
890    Load an sww file into a domain.
891
892    Usage: domain = load_sww_as_domain('file.sww',
893                        t=time (default = last time in file))
894
895    Boundary is not recommended if domain.smooth is not selected, as it
896    uses unique coordinates, but not unique boundaries. This means that
897    the boundary file will not be compatable with the coordinates, and will
898    give a different final boundary, or crash.
899    """
900   
901    from Scientific.IO.NetCDF import NetCDFFile
902    from anuga.shallow_water.shallow_water_domain import Domain
903
904    # initialise NaN.
905    NaN = 9.969209968386869e+036
906
907    if verbose: log.critical('Reading from %s' % filename)
908
909    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
910    time = fid.variables['time']       # Timesteps
911    if t is None:
912        t = time[-1]
913    time_interp = get_time_interp(time,t)
914
915    # Get the variables as numeric arrays
916    x = fid.variables['x'][:]                   # x-coordinates of vertices
917    y = fid.variables['y'][:]                   # y-coordinates of vertices
918    elevation = fid.variables['elevation']      # Elevation
919    stage = fid.variables['stage']              # Water level
920    xmomentum = fid.variables['xmomentum']      # Momentum in the x-direction
921    ymomentum = fid.variables['ymomentum']      # Momentum in the y-direction
922
923    starttime = fid.starttime[0]
924    volumes = fid.variables['volumes'][:]       # Connectivity
925    coordinates = num.transpose(num.asarray([x.tolist(), y.tolist()]))
926    # FIXME (Ole): Something like this might be better:
927    #                 concatenate((x, y), axis=1)
928    # or              concatenate((x[:,num.newaxis], x[:,num.newaxis]), axis=1)
929
930    conserved_quantities = []
931    interpolated_quantities = {}
932    other_quantities = []
933
934    # get geo_reference
935    try:                             # sww files don't have to have a geo_ref
936        geo_reference = Geo_reference(NetCDFObject=fid)
937    except: # AttributeError, e:
938        geo_reference = None
939
940    if verbose: log.critical('    getting quantities')
941
942    for quantity in fid.variables.keys():
943        dimensions = fid.variables[quantity].dimensions
944        if 'number_of_timesteps' in dimensions:
945            conserved_quantities.append(quantity)
946            interpolated_quantities[quantity] = \
947                  interpolated_quantity(fid.variables[quantity][:], time_interp)
948        else:
949            other_quantities.append(quantity)
950
951    other_quantities.remove('x')
952    other_quantities.remove('y')
953    #other_quantities.remove('z')
954    other_quantities.remove('volumes')
955    try:
956        other_quantities.remove('stage_range')
957        other_quantities.remove('xmomentum_range')
958        other_quantities.remove('ymomentum_range')
959        other_quantities.remove('elevation_range')
960    except:
961        pass
962
963    conserved_quantities.remove('time')
964
965    if verbose: log.critical('    building domain')
966
967    #    From domain.Domain:
968    #    domain = Domain(coordinates, volumes,\
969    #                    conserved_quantities = conserved_quantities,\
970    #                    other_quantities = other_quantities,zone=zone,\
971    #                    xllcorner=xllcorner, yllcorner=yllcorner)
972
973    # From shallow_water.Domain:
974    coordinates = coordinates.tolist()
975    volumes = volumes.tolist()
976    # FIXME:should this be in mesh? (peter row)
977    if fid.smoothing == 'Yes':
978        unique = False
979    else:
980        unique = True
981    if unique:
982        coordinates, volumes, boundary = weed(coordinates, volumes,boundary)
983
984     
985   
986    try:
987        domain = Domain(coordinates, volumes, boundary, starttime=(float(starttime) + float(t)))
988    except AssertionError, e:
989        fid.close()
990        msg = 'Domain could not be created: %s. ' \
991              'Perhaps use "fail_if_NaN=False and NaN_filler = ..."' % e
992        raise DataDomainError, msg
993
994    if not boundary is None:
995        domain.boundary = boundary
996
997    domain.geo_reference = geo_reference
998
999    for quantity in other_quantities:
1000        try:
1001            NaN = fid.variables[quantity].missing_value
1002        except:
1003            pass                       # quantity has no missing_value number
1004        X = fid.variables[quantity][:]
1005        if very_verbose:
1006            log.critical('       %s' % str(quantity))
1007            log.critical('        NaN = %s' % str(NaN))
1008            log.critical('        max(X)')
1009            log.critical('       %s' % str(max(X)))
1010            log.critical('        max(X)==NaN')
1011            log.critical('       %s' % str(max(X)==NaN))
1012            log.critical('')
1013        if max(X) == NaN or min(X) == NaN:
1014            if fail_if_NaN:
1015                msg = 'quantity "%s" contains no_data entry' % quantity
1016                raise DataMissingValuesError, msg
1017            else:
1018                data = (X != NaN)
1019                X = (X*data) + (data==0)*NaN_filler
1020        if unique:
1021            X = num.resize(X, (len(X)/3, 3))
1022        domain.set_quantity(quantity, X)
1023    #
1024    for quantity in conserved_quantities:
1025        try:
1026            NaN = fid.variables[quantity].missing_value
1027        except:
1028            pass                       # quantity has no missing_value number
1029        X = interpolated_quantities[quantity]
1030        if very_verbose:
1031            log.critical('       %s' % str(quantity))
1032            log.critical('        NaN = %s' % str(NaN))
1033            log.critical('        max(X)')
1034            log.critical('       %s' % str(max(X)))
1035            log.critical('        max(X)==NaN')
1036            log.critical('       %s' % str(max(X)==NaN))
1037            log.critical('')
1038        if max(X) == NaN or min(X) == NaN:
1039            if fail_if_NaN:
1040                msg = 'quantity "%s" contains no_data entry' % quantity
1041                raise DataMissingValuesError, msg
1042            else:
1043                data = (X != NaN)
1044                X = (X*data) + (data==0)*NaN_filler
1045        if unique:
1046            X = num.resize(X, (X.shape[0]/3, 3))
1047        domain.set_quantity(quantity, X)
1048
1049    fid.close()
1050
1051    return domain
1052
1053
1054def get_mesh_and_quantities_from_file(filename,
1055                                      quantities=None,
1056                                      verbose=False):
1057    """Get and rebuild mesh structure and associated quantities from sww file
1058
1059    Input:
1060        filename - Name os sww file
1061        quantities - Names of quantities to load
1062
1063    Output:
1064        mesh - instance of class Interpolate
1065               (including mesh and interpolation functionality)
1066        quantities - arrays with quantity values at each mesh node
1067        time - vector of stored timesteps
1068
1069    This function is used by e.g.:
1070        get_interpolated_quantities_at_polyline_midpoints
1071    """
1072
1073    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
1074
1075    import types
1076    from Scientific.IO.NetCDF import NetCDFFile
1077    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
1078
1079    if verbose: log.critical('Reading from %s' % filename)
1080
1081    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1082    time = fid.variables['time'][:]    # Time vector
1083    time += fid.starttime[0]
1084
1085    # Get the variables as numeric arrays
1086    x = fid.variables['x'][:]                   # x-coordinates of nodes
1087    y = fid.variables['y'][:]                   # y-coordinates of nodes
1088    elevation = fid.variables['elevation'][:]   # Elevation
1089    stage = fid.variables['stage'][:]           # Water level
1090    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
1091    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
1092
1093    # Mesh (nodes (Mx2), triangles (Nx3))
1094    nodes = num.concatenate((x[:,num.newaxis], y[:,num.newaxis]), axis=1)
1095    triangles = fid.variables['volumes'][:]
1096
1097    # Get geo_reference
1098    try:
1099        geo_reference = Geo_reference(NetCDFObject=fid)
1100    except: #AttributeError, e:
1101        # Sww files don't have to have a geo_ref
1102        geo_reference = None
1103
1104    if verbose: log.critical('    building mesh from sww file %s' % filename)
1105
1106    boundary = None
1107
1108    #FIXME (Peter Row): Should this be in mesh?
1109    if fid.smoothing != 'Yes':
1110        nodes = nodes.tolist()
1111        triangles = triangles.tolist()
1112        nodes, triangles, boundary = weed(nodes, triangles, boundary)
1113
1114    try:
1115        mesh = Mesh(nodes, triangles, boundary, geo_reference=geo_reference)
1116    except AssertionError, e:
1117        fid.close()
1118        msg = 'Domain could not be created: %s. "' % e
1119        raise DataDomainError, msg
1120
1121    quantities = {}
1122    quantities['elevation'] = elevation
1123    quantities['stage'] = stage
1124    quantities['xmomentum'] = xmomentum
1125    quantities['ymomentum'] = ymomentum
1126
1127    fid.close()
1128
1129    return mesh, quantities, time
1130
1131
1132def get_time_interp(time, t=None):
1133    """Finds the ratio and index for time interpolation.
1134        time is an array of time steps
1135        t is the sample time.
1136        returns a tuple containing index into time, and ratio
1137    """
1138    if t is None:
1139        t=time[-1]
1140        index = -1
1141        ratio = 0.
1142    else:
1143        T = time
1144        tau = t
1145        index=0
1146        msg = 'Time interval derived from file %s [%s:%s]' \
1147              % ('FIXMEfilename', T[0], T[-1])
1148        msg += ' does not match model time: %s' % tau
1149        if tau < time[0]: raise DataTimeError, msg
1150        if tau > time[-1]: raise DataTimeError, msg
1151        while tau > time[index]: index += 1
1152        while tau < time[index]: index -= 1
1153        if tau == time[index]:
1154            #Protect against case where tau == time[-1] (last time)
1155            # - also works in general when tau == time[i]
1156            ratio = 0
1157        else:
1158            #t is now between index and index+1
1159            ratio = (tau - time[index])/(time[index+1] - time[index])
1160
1161    return (index, ratio)
1162
1163
1164
1165def interpolated_quantity(saved_quantity, time_interp):
1166    """Interpolate a quantity with respect to time.
1167
1168    saved_quantity  the quantity to interpolate
1169    time_interp     (index, ratio)
1170
1171    Returns a vector of interpolated values.
1172    """
1173
1174    index, ratio = time_interp
1175
1176    Q = saved_quantity
1177
1178    if ratio > 0:
1179        q = (1-ratio)*Q[index] + ratio*Q[index+1]
1180    else:
1181        q = Q[index]
1182
1183    #Return vector of interpolated values
1184    return q
1185
1186
1187def weed(coordinates, volumes, boundary=None):
1188    """ Excise all duplicate points.
1189    """
1190    if isinstance(coordinates, num.ndarray):
1191        coordinates = coordinates.tolist()
1192    if isinstance(volumes, num.ndarray):
1193        volumes = volumes.tolist()
1194
1195    unique = False
1196    point_dict = {}
1197    same_point = {}
1198    for i in range(len(coordinates)):
1199        point = tuple(coordinates[i])
1200        if point_dict.has_key(point):
1201            unique = True
1202            same_point[i] = point
1203            #to change all point i references to point j
1204        else:
1205            point_dict[point] = i
1206            same_point[i] = point
1207
1208    coordinates = []
1209    i = 0
1210    for point in point_dict.keys():
1211        point = tuple(point)
1212        coordinates.append(list(point))
1213        point_dict[point] = i
1214        i += 1
1215
1216    for volume in volumes:
1217        for i in range(len(volume)):
1218            index = volume[i]
1219            if index > -1:
1220                volume[i] = point_dict[same_point[index]]
1221
1222    new_boundary = {}
1223    if not boundary is None:
1224        for segment in boundary.keys():
1225            point0 = point_dict[same_point[segment[0]]]
1226            point1 = point_dict[same_point[segment[1]]]
1227            label = boundary[segment]
1228            #FIXME should the bounday attributes be concaterated
1229            #('exterior, pond') or replaced ('pond')(peter row)
1230
1231            if new_boundary.has_key((point0, point1)):
1232                new_boundary[(point0,point1)] = new_boundary[(point0, point1)]
1233
1234            elif new_boundary.has_key((point1, point0)):
1235                new_boundary[(point1,point0)] = new_boundary[(point1, point0)]
1236            else: new_boundary[(point0, point1)] = label
1237
1238        boundary = new_boundary
1239
1240    return coordinates, volumes, boundary
1241
1242
1243
Note: See TracBrowser for help on using the repository browser.