source: trunk/anuga_core/source/anuga/file/sww.py @ 9573

Last change on this file since 9573 was 9551, checked in by steve, 10 years ago

Cleaning up the imports

File size: 55.5 KB
Line 
1""" Classes to read an SWW file.
2"""
3
4import exceptions
5class DataFileNotOpenError(exceptions.Exception): pass
6class DataMissingValuesError(exceptions.Exception): pass
7class NewQuantity(exceptions.Exception): pass
8class DataDomainError(exceptions.Exception): pass
9class DataTimeError(exceptions.Exception): pass
10
11import numpy
12from anuga.coordinate_transforms.geo_reference import Geo_reference
13from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
14from anuga.config import netcdf_float, netcdf_float32, netcdf_int, netcdf_float64
15from anuga.config import max_float
16from anuga.utilities.numerical_tools import ensure_numeric
17import anuga.utilities.log as log
18from anuga.file.netcdf import NetCDFFile
19
20from anuga.config import minimum_storable_height as default_minimum_storable_height
21
22from sts import Write_sts
23
24from anuga.coordinate_transforms.geo_reference import \
25        ensure_geo_reference
26
27from anuga.utilities.file_utils import create_filename
28import numpy as num
29
30class Data_format:
31    """Generic interface to data formats
32    """
33
34    def __init__(self, domain, extension, mode=netcdf_mode_w):
35        assert mode[0] in ['r', 'w', 'a'], \
36               "Mode %s must be either:\n" % mode + \
37               "   'w' (write)\n" + \
38               "   'r' (read)\n" + \
39               "   'a' (append)"
40
41        #Create filename
42        self.filename = create_filename(domain.get_datadir(),
43                                        domain.get_name(), extension)
44
45        self.timestep = 0
46        self.domain = domain
47
48        # Probably should exclude ghosts in case this is a parallel domain
49       
50        self.number_of_nodes   = domain.number_of_nodes
51        self.number_of_volumes = domain.number_of_triangles
52        #self.number_of_volumes = len(domain)
53
54        #FIXME: Should we have a general set_precision function?
55
56
57class SWW_file(Data_format):
58    """Interface to native NetCDF format (.sww) for storing model output
59
60    There are two kinds of data
61
62    1: Constant data: Vertex coordinates and field values. Stored once
63    2: Variable data: Conserved quantities. Stored once per timestep.
64
65    All data is assumed to reside at vertex locations.
66    """
67
68    def __init__(self, domain, 
69                 mode=netcdf_mode_w, max_size=200000000000, recursion=False):
70
71        self.precision = netcdf_float32 # Use single precision for quantities
72        self.recursion = recursion
73        self.mode = mode
74       
75        if hasattr(domain, 'max_size'):
76            self.max_size = domain.max_size # File size max is 2Gig
77        else:
78            self.max_size = max_size
79           
80        if hasattr(domain, 'store_centroids'):
81            self.store_centroids = domain.store_centroids
82        else:
83            self.store_centroids = False
84           
85        if hasattr(domain, 'minimum_storable_height'):
86            self.minimum_storable_height = domain.minimum_storable_height
87        else:
88            self.minimum_storable_height = default_minimum_storable_height
89
90        # Call parent constructor
91        Data_format.__init__(self, domain, 'sww', mode)
92
93        # Get static and dynamic quantities from domain
94        static_quantities = []
95        dynamic_quantities = []
96        static_c_quantities = []
97        dynamic_c_quantities = []
98       
99        for q in domain.quantities_to_be_stored:
100            flag = domain.quantities_to_be_stored[q]
101       
102            msg = 'Quantity %s is requested to be stored ' % q
103            msg += 'but it does not exist in domain.quantities'
104            assert q in domain.quantities, msg
105       
106            assert flag in [1,2]
107            if flag == 1:
108                static_quantities.append(q)
109                if self.store_centroids: static_c_quantities.append(q+'_c')
110               
111            if flag == 2:
112                dynamic_quantities.append(q)
113                if self.store_centroids: dynamic_c_quantities.append(q+'_c')
114                       
115       
116        # NetCDF file definition
117        fid = NetCDFFile(self.filename, mode)
118        if mode[0] == 'w':
119            description = 'Output from anuga.file.sww ' \
120                          'suitable for plotting'
121                         
122            self.writer = Write_sww(static_quantities,
123                                    dynamic_quantities,
124                                    static_c_quantities,
125                                    dynamic_c_quantities)
126           
127            self.writer.store_header(fid,
128                                     domain.starttime,
129                                     self.number_of_volumes,
130                                     self.domain.number_of_nodes,
131                                     description=description,
132                                     smoothing=domain.smooth,
133                                     order=domain.default_order,
134                                     sww_precision=self.precision)
135
136            # Extra optional information
137            if hasattr(domain, 'texture'):
138                fid.texture = domain.texture
139
140            if domain.quantities_to_be_monitored is not None:
141                fid.createDimension('singleton', 1)
142                fid.createDimension('two', 2)
143
144                poly = domain.monitor_polygon
145                if poly is not None:
146                    N = len(poly)
147                    fid.createDimension('polygon_length', N)
148                    fid.createVariable('extrema.polygon',
149                                       self.precision,
150                                       ('polygon_length', 'two'))
151                    fid.variables['extrema.polygon'][:] = poly
152
153                interval = domain.monitor_time_interval
154                if interval is not None:
155                    fid.createVariable('extrema.time_interval',
156                                       self.precision,
157                                       ('two',))
158                    fid.variables['extrema.time_interval'][:] = interval
159
160                for q in domain.quantities_to_be_monitored:
161                    fid.createVariable(q + '.extrema', self.precision,
162                                       ('numbers_in_range',))
163                    fid.createVariable(q + '.min_location', self.precision,
164                                       ('numbers_in_range',))
165                    fid.createVariable(q + '.max_location', self.precision,
166                                       ('numbers_in_range',))
167                    fid.createVariable(q + '.min_time', self.precision,
168                                       ('singleton',))
169                    fid.createVariable(q + '.max_time', self.precision,
170                                       ('singleton',))
171
172        fid.close()
173
174    def store_connectivity(self):
175        """Store information about nodes, triangles and static quantities
176
177        Writes x,y coordinates of triangles and their connectivity.
178       
179        Store also any quantity that has been identified as static.
180        """
181
182        # FIXME: Change name to reflect the fact thta this function
183        # stores both connectivity (triangulation) and static quantities
184       
185
186        domain = self.domain
187
188        # append to the NetCDF file
189        fid = NetCDFFile(self.filename, netcdf_mode_a)
190
191        # Get X, Y from one (any) of the quantities
192        Q = domain.quantities.values()[0]
193        X,Y,_,V = Q.get_vertex_values(xy=True, precision=self.precision)
194
195        # store the connectivity data
196        points = num.concatenate((X[:,num.newaxis],Y[:,num.newaxis]), axis=1)
197        self.writer.store_triangulation(fid,
198                                        points,
199                                        V.astype(num.float32),
200                                        points_georeference=\
201                                        domain.geo_reference)
202
203
204        if domain.parallel:
205            self.writer.store_parallel_data(fid,
206                                        domain.number_of_global_triangles,
207                                        domain.number_of_global_nodes,
208                                        domain.tri_full_flag,
209                                        domain.tri_l2g,
210                                        domain.node_l2g)
211
212
213        # Get names of static quantities
214        static_quantities = {}
215        static_quantities_centroid = {}
216       
217        for name in self.writer.static_quantities:
218            Q = domain.quantities[name]
219            A, _ = Q.get_vertex_values(xy=False, 
220                                       precision=self.precision)
221            static_quantities[name] = A
222
223        #print domain.quantities
224        #print self.writer.static_c_quantities
225
226        for name in self.writer.static_c_quantities:
227            Q = domain.quantities[name[:-2]]  # rip off _c from name
228            static_quantities_centroid[name] = Q.centroid_values
229       
230        # Store static quantities       
231        self.writer.store_static_quantities(fid, **static_quantities)
232        self.writer.store_static_quantities_centroid(fid, **static_quantities_centroid) 
233                                       
234        fid.close()
235
236
237    def store_timestep(self):
238        """Store time and time dependent quantities
239        """
240
241        #import types
242        from time import sleep
243        from os import stat
244
245
246        # Get NetCDF
247        retries = 0
248        file_open = False
249        while not file_open and retries < 10:
250            try:
251                # Open existing file
252                fid = NetCDFFile(self.filename, netcdf_mode_a)
253            except IOError:
254                # This could happen if someone was reading the file.
255                # In that case, wait a while and try again
256                msg = 'Warning (store_timestep): File %s could not be opened' \
257                      % self.filename
258                msg += ' - trying step %s again' % self.domain.time
259                log.critical(msg)
260                retries += 1
261                sleep(1)
262            else:
263                file_open = True
264
265        if not file_open:
266            msg = 'File %s could not be opened for append' % self.filename
267            raise DataFileNotOpenError, msg
268
269        # Check to see if the file is already too big:
270        time = fid.variables['time'][:]
271
272        i = len(time) + 1
273        file_size = stat(self.filename)[6]
274        file_size_increase = file_size / i
275        if file_size + file_size_increase > self.max_size * 2**self.recursion:
276            # In order to get the file name and start time correct,
277            # I change the domain.filename and domain.starttime.
278            # This is the only way to do this without changing
279            # other modules (I think).
280
281            # Write a filename addon that won't break the anuga viewers
282            # (10.sww is bad)
283            filename_ext = '_time_%s' % self.domain.time
284            filename_ext = filename_ext.replace('.', '_')
285
286            # Remember the old filename, then give domain a
287            # name with the extension
288            old_domain_filename = self.domain.get_name()
289            if not self.recursion:
290                self.domain.set_name(old_domain_filename + filename_ext)
291
292            # Temporarily change the domain starttime to the current time
293            old_domain_starttime = self.domain.starttime
294            self.domain.starttime = self.domain.get_time()
295
296            # Build a new data_structure.
297            next_data_structure = SWW_file(self.domain, mode=self.mode,
298                                           max_size=self.max_size,
299                                           recursion=self.recursion+1)
300            if not self.recursion:
301                log.critical('    file_size = %s' % file_size)
302                log.critical('    saving file to %s'
303                             % next_data_structure.filename) 
304
305            # Set up the new data_structure
306            self.domain.writer = next_data_structure
307
308            # Store connectivity and first timestep
309            next_data_structure.store_connectivity()
310            next_data_structure.store_timestep()
311            fid.sync()
312            fid.close()
313
314            # Restore the old starttime and filename
315            self.domain.starttime = old_domain_starttime
316            self.domain.set_name(old_domain_filename)
317        else:
318            self.recursion = False
319            domain = self.domain
320
321            # Get the variables
322            time = fid.variables['time'][:]
323            i = len(time)
324             
325            if 'stage' in self.writer.dynamic_quantities:           
326                # Select only those values for stage,
327                # xmomentum and ymomentum (if stored) where
328                # depth exceeds minimum_storable_height
329                #
330                # In this branch it is assumed that elevation
331                # is also available as a quantity
332
333
334                # Smoothing for the get_vertex_values will be obtained
335                # from the smooth setting in domain
336           
337                Q = domain.quantities['stage']
338                w, _ = Q.get_vertex_values(xy=False)
339               
340                Q = domain.quantities['elevation']
341                z, _ = Q.get_vertex_values(xy=False)               
342               
343                storable_indices = num.array(w-z >= self.minimum_storable_height)
344            else:
345                # Very unlikely branch
346                storable_indices = None # This means take all
347           
348            # Now store dynamic quantities
349            dynamic_quantities = {}
350            dynamic_quantities_centroid = {}
351           
352            for name in self.writer.dynamic_quantities:
353                #netcdf_array = fid.variables[name]
354               
355                Q = domain.quantities[name]
356                A, _ = Q.get_vertex_values(xy=False,
357                                           precision=self.precision)
358               
359                if storable_indices is not None:
360                    if name == 'stage':
361                        A = num.choose(storable_indices, (z, A))
362
363                    if name in ['xmomentum', 'ymomentum']:
364                        # Get xmomentum where depth exceeds
365                        # minimum_storable_height
366                       
367                        # Define a zero vector of same size and type as A
368                        # for use with momenta
369                        null = num.zeros(num.size(A), A.dtype.char)
370                        A = num.choose(storable_indices, (null, A))
371               
372                dynamic_quantities[name] = A
373               
374            for name in self.writer.dynamic_c_quantities:
375                Q = domain.quantities[name[:-2]]
376                dynamic_quantities_centroid[name] = Q.centroid_values
377               
378                                       
379            # Store dynamic quantities
380            slice_index = self.writer.store_quantities(fid,
381                                         time=self.domain.time,
382                                         sww_precision=self.precision,
383                                         **dynamic_quantities)
384           
385            # Store dynamic quantities
386            if self.store_centroids:
387                self.writer.store_quantities_centroid(fid,
388                                                      slice_index= slice_index,
389                                                      sww_precision=self.precision,
390                                                      **dynamic_quantities_centroid)           
391
392
393            # Update extrema if requested
394            domain = self.domain
395            if domain.quantities_to_be_monitored is not None:
396                for q, info in domain.quantities_to_be_monitored.items():
397                    if info['min'] is not None:
398                        fid.variables[q + '.extrema'][0] = info['min']
399                        fid.variables[q + '.min_location'][:] = \
400                                        info['min_location']
401                        fid.variables[q + '.min_time'][0] = info['min_time']
402
403                    if info['max'] is not None:
404                        fid.variables[q + '.extrema'][1] = info['max']
405                        fid.variables[q + '.max_location'][:] = \
406                                        info['max_location']
407                        fid.variables[q + '.max_time'][0] = info['max_time']
408
409            # Flush and close
410            #fid.sync()
411            fid.close()
412
413
414class Read_sww:
415
416    def __init__(self, source):
417        """The source parameter is assumed to be a NetCDF sww file.
418        """
419
420        self.source = source
421
422        self.frame_number = 0
423
424        fin = NetCDFFile(self.source, 'r')
425
426        self.time = num.array(fin.variables['time'][:], num.float)
427        self.last_frame_number = self.time.shape[0] - 1
428
429        self.frames = num.arange(self.last_frame_number+1)
430
431        fin.close()
432       
433        self.read_mesh()
434
435        self.quantities = {}
436
437        self.read_quantities()
438
439
440    def read_mesh(self):
441        """ Read and store the mesh data contained within this sww file.
442        """
443        fin = NetCDFFile(self.source, 'r')
444
445        self.vertices = num.array(fin.variables['volumes'][:], num.int)
446       
447        self.x = x = num.array(fin.variables['x'][:], num.float)
448        self.y = y = num.array(fin.variables['y'][:], num.float)
449
450        assert len(self.x) == len(self.y)
451       
452        self.xmin = num.min(x)
453        self.xmax = num.max(x)
454        self.ymin = num.min(y)
455        self.ymax = num.max(y)
456
457
458        fin.close()
459       
460    def read_quantities(self, frame_number=0):
461        """
462        Read the quantities contained in this file.
463        frame_number is the time index to load.
464        """
465        assert frame_number >= 0 and frame_number <= self.last_frame_number
466
467        self.frame_number = frame_number
468
469        M = len(self.x)/3
470       
471        fin = NetCDFFile(self.source, 'r')
472       
473        for q in filter(lambda n:n != 'x' and n != 'y' and n != 'time' and n != 'volumes' and \
474                        '_range' not in n and '_c' not in n , \
475                        fin.variables.keys()):
476            #print q
477            if len(fin.variables[q].shape) == 1: # Not a time-varying quantity
478                self.quantities[q] = num.ravel(num.array(fin.variables[q][:], num.float)).reshape(M,3)
479            else: # Time-varying, get the current timestep data
480                self.quantities[q] = num.array(fin.variables[q][self.frame_number], num.float).reshape(M,3)
481        fin.close()
482        return self.quantities
483
484    def get_bounds(self):
485        """
486            Get the bounding rect around the mesh.
487        """
488        return [self.xmin, self.xmax, self.ymin, self.ymax]
489
490    def get_last_frame_number(self):
491        """
492            Return the last loaded frame index.
493        """
494        return self.last_frame_number
495
496    def get_time(self):
497        """
498            Get time at the current frame num, in secs.
499        """
500        return self.time[self.frame_number]
501
502
503class Write_sww(Write_sts):
504    """
505        A class to write an SWW file.
506       
507        It is domain agnostic, and requires all the data to be fed in
508        manually.
509    """
510
511    def __init__(self,
512                 static_quantities,
513                 dynamic_quantities,
514                 static_c_quantities = [],
515                 dynamic_c_quantities = []):
516       
517        """Initialise Write_sww with two (or 4) list af quantity names:
518       
519        static_quantities (e.g. elevation or friction):
520            Stored once at the beginning of the simulation in a 1D array
521            of length number_of_points   
522        dynamic_quantities (e.g stage):
523            Stored every timestep in a 2D array with
524            dimensions number_of_points X number_of_timesteps
525
526        static_c_quantities (e.g. elevation_c or friction_c):
527            Stored once at the beginning of the simulation in a 1D array
528            of length number_of_triangles 
529        dynamic_c_quantities (e.g stage_c):
530            Stored every timestep in a 2D array with
531            dimensions number_of_triangles X number_of_timesteps
532       
533        """
534        self.static_quantities = static_quantities   
535        self.dynamic_quantities = dynamic_quantities
536        self.static_c_quantities = static_c_quantities
537        self.dynamic_c_quantities = dynamic_c_quantities
538
539        self.store_centroids = False
540        if static_c_quantities or dynamic_c_quantities:
541            self.store_centroids = True
542
543
544    def store_header(self,
545                     outfile,
546                     times,
547                     number_of_volumes,
548                     number_of_points,
549                     description='Generated by ANUGA',
550                     smoothing=True,
551                     order=1,
552                     sww_precision=netcdf_float32,
553                     verbose=False):
554        """Write an SWW file header.
555
556        Writes the first section of the .sww file.
557
558        outfile - the open file that will be written
559        times - A list of the time slice times OR a start time
560        Note, if a list is given the info will be made relative.
561        number_of_volumes - the number of triangles
562        number_of_points - the number of vertices in the mesh
563        """
564
565        from anuga.abstract_2d_finite_volumes.util \
566            import get_revision_number
567
568        outfile.institution = 'Geoscience Australia'
569        outfile.description = description
570
571        # For sww compatibility
572        if smoothing is True:
573            # Smoothing to be depreciated
574            outfile.smoothing = 'Yes'
575            outfile.vertices_are_stored_uniquely = 'False'
576        else:
577            # Smoothing to be depreciated
578            outfile.smoothing = 'No'
579            outfile.vertices_are_stored_uniquely = 'True'
580        outfile.order = order
581
582        try:
583            revision_number = get_revision_number()
584        except:
585            # This will be triggered if the system cannot get the SVN
586            # revision number.
587            revision_number = None
588        # Allow None to be stored as a string
589        outfile.revision_number = str(revision_number)
590
591        # This is being used to seperate one number from a list.
592        # what it is actually doing is sorting lists from numeric arrays.
593        if isinstance(times, (list, num.ndarray)):
594            number_of_times = len(times)
595            times = ensure_numeric(times)
596            if number_of_times == 0:
597                starttime = 0
598            else:
599                starttime = times[0]
600                times = times - starttime  #Store relative times
601        else:
602            number_of_times = 0
603            starttime = times
604
605
606        outfile.starttime = starttime
607
608        # dimension definitions
609        outfile.createDimension('number_of_volumes', number_of_volumes)
610        outfile.createDimension('number_of_triangle_vertices', number_of_points)
611        outfile.createDimension('number_of_vertices', 3)
612        outfile.createDimension('numbers_in_range', 2)
613
614        if smoothing is True:
615            outfile.createDimension('number_of_points', number_of_points)
616            # FIXME(Ole): This will cause sww files for parallel domains to
617            # have ghost nodes stored (but not used by triangles).
618            # To clean this up, we have to change get_vertex_values and
619            # friends in quantity.py (but I can't be bothered right now)
620        else:
621            outfile.createDimension('number_of_points', 3*number_of_volumes)
622
623        outfile.createDimension('number_of_timesteps', number_of_times)
624
625        # variable definitions
626        outfile.createVariable('x', sww_precision, ('number_of_points',))
627        outfile.createVariable('y', sww_precision, ('number_of_points',))
628
629        outfile.createVariable('volumes', netcdf_int , ('number_of_volumes',
630                                                       'number_of_vertices'))
631
632
633        for q in self.static_quantities:
634           
635            outfile.createVariable(q, sww_precision,
636                                   ('number_of_points',))
637           
638            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
639                                   ('numbers_in_range',))
640           
641            # Initialise ranges with small and large sentinels.
642            # If this was in pure Python we could have used None sensibly
643            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
644            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
645
646
647        for q in self.static_c_quantities:
648            outfile.createVariable(q, sww_precision,
649                                   ('number_of_volumes',))
650                                   
651
652        self.write_dynamic_quantities(outfile, times, precis = sww_precision)
653
654
655        outfile.sync()
656
657
658
659    def store_triangulation(self,
660                            outfile,
661                            points_utm,
662                            volumes,
663                            zone=None, 
664                            new_origin=None,
665                            points_georeference=None, 
666                            verbose=False):
667        """
668        Store triangulation data in the underlying file.
669       
670        Stores the points and triangle indices in the sww file
671       
672        outfile Open handle to underlying file.
673
674        new_origin georeference that the points can be set to.
675
676        points_georeference The georeference of the points_utm.
677
678        verbose True if this function is to be verbose.
679
680        new_origin - qa georeference that the points can be set to. (Maybe
681        do this before calling this function.)
682
683        points_utm - currently a list or array of the points in UTM.
684        points_georeference - the georeference of the points_utm
685
686        How about passing new_origin and current_origin.
687        If you get both, do a convertion from the old to the new.
688
689        If you only get new_origin, the points are absolute,
690        convert to relative
691
692        if you only get the current_origin the points are relative, store
693        as relative.
694
695        if you get no georefs create a new georef based on the minimums of
696        points_utm.  (Another option would be to default to absolute)
697
698        Yes, and this is done in another part of the code.
699        Probably geospatial.
700
701        If you don't supply either geo_refs, then supply a zone. If not
702        the default zone will be used.
703
704        precon:
705            header has been called.
706        """
707
708        number_of_points = len(points_utm)
709        volumes = num.array(volumes,num.int32).reshape(-1,3)
710
711
712        points_utm = num.array(points_utm)
713
714        # Given the two geo_refs and the points, do the stuff
715        # described in the method header
716        # if this is needed else where, pull out as a function
717        points_georeference = ensure_geo_reference(points_georeference)
718        new_origin = ensure_geo_reference(new_origin)
719        if new_origin is None and points_georeference is not None:
720            points = points_utm
721            geo_ref = points_georeference
722        else:
723            if new_origin is None:
724                new_origin = Geo_reference(zone, min(points_utm[:,0]),
725                                                 min(points_utm[:,1]))
726            points = new_origin.change_points_geo_ref(points_utm,
727                                                      points_georeference)
728            geo_ref = new_origin
729
730        # At this stage I need a georef and points
731        # the points are relative to the georef
732        geo_ref.write_NetCDF(outfile)
733
734        # This will put the geo ref in the middle
735        #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
736
737        x =  points[:,0]
738        y =  points[:,1]
739
740        #x = x.astype(netcdf_float32)
741        #y = y.astype(netcdf_float32)
742       
743
744        if verbose:
745            log.critical('------------------------------------------------')
746            log.critical('More Statistics:')
747            log.critical('  Extent (/lon):')
748            log.critical('    x in [%f, %f], len(lat) == %d'
749                         % (min(x), max(x), len(x)))
750            log.critical('    y in [%f, %f], len(lon) == %d'
751                         % (min(y), max(y), len(y)))
752            #log.critical('    z in [%f, %f], len(z) == %d'
753            #             % (min(elevation), max(elevation), len(elevation)))
754            log.critical('geo_ref: %s' % str(geo_ref))
755            log.critical('------------------------------------------------')
756
757
758        outfile.variables['x'][:] = x #- geo_ref.get_xllcorner()
759        outfile.variables['y'][:] = y #- geo_ref.get_yllcorner()
760
761
762        msg = 'Mismatch between shape of volumes array and (number_of_volumes , 3)'
763        assert volumes.shape == outfile.variables['volumes'].shape, msg
764
765        outfile.variables['volumes'][:] = volumes
766
767
768
769    def write_dynamic_quantities(self, outfile, 
770                                 times, precis = netcdf_float32, verbose = False):   
771        """
772            Write out given quantities to file.
773        """
774       
775
776        for q in self.dynamic_quantities:
777            outfile.createVariable(q, precis, ('number_of_timesteps',
778                                               'number_of_points'))
779            outfile.createVariable(q + Write_sts.RANGE, precis,
780                                   ('numbers_in_range',))
781           
782            # Initialise ranges with small and large sentinels.
783            # If this was in pure Python we could have used None sensibly
784            outfile.variables[q+Write_sts.RANGE][0] = max_float  # Min
785            outfile.variables[q+Write_sts.RANGE][1] = -max_float # Max
786
787        for q in self.dynamic_c_quantities:
788            outfile.createVariable(q, precis, ('number_of_timesteps',
789                                                    'number_of_volumes'))
790
791        # Doing sts_precision instead of Float gives cast errors.
792        outfile.createVariable('time', netcdf_float, ('number_of_timesteps',))
793
794        if isinstance(times, (list, num.ndarray)):
795            outfile.variables['time'][:] = times    # Store time relative
796
797        if verbose:
798            log.critical('------------------------------------------------')
799            log.critical('Statistics:')
800            log.critical('    t in [%f, %f], len(t) == %d'
801                         % (num.min(times), num.max(times), len(times.flat)))
802
803    def store_parallel_data(self,
804                            outfile,
805                            number_of_global_triangles,
806                            number_of_global_nodes,
807                            tri_full_flag = None,
808                            tri_l2g = None,
809                            node_l2g = None,
810                            sww_precision=netcdf_float32,
811                            verbose=False):
812
813
814        # dimension definitions
815        #outfile.createDimension('number_of_volumes', number_of_volumes)
816        #outfile.createDimension('number_of_vertices', 3)
817        #outfile.createDimension('numbers_in_range', 2)
818
819        #print 'store parallel data'
820        outfile.number_of_global_triangles = number_of_global_triangles
821        outfile.number_of_global_nodes = number_of_global_nodes
822
823        # variable definitions
824        outfile.createVariable('tri_l2g',  netcdf_int, ('number_of_volumes',))
825        outfile.createVariable('node_l2g', netcdf_int, ('number_of_triangle_vertices',))
826        outfile.createVariable('tri_full_flag', netcdf_int, ('number_of_volumes',))
827
828        #print tri_l2g.shape
829        #print tri_l2g
830        #print outfile.variables['tri_l2g'].shape
831
832        outfile.variables['tri_l2g'][:] = tri_l2g.astype(num.int32)
833
834        #print node_l2g.shape
835        #print node_l2g
836        #print outfile.variables['node_l2g'].shape
837
838        outfile.variables['node_l2g'][:] = node_l2g.astype(num.int32)
839
840        #print tri_full_flag.shape
841        #print tri_full_flag
842        #print outfile.variables['tri_full_flag'].shape
843
844        outfile.variables['tri_full_flag'][:] = tri_full_flag.astype(num.int32)
845
846
847
848    def store_static_quantities(self, 
849                                outfile, 
850                                sww_precision=num.float32,
851                                verbose=False, 
852                                **quant):
853        """
854        Write the static quantity info.
855
856        **quant is extra keyword arguments passed in. These must be
857          the numpy arrays to be stored in the sww file at each timestep.
858
859        The argument sww_precision allows for storing as either
860        * single precision (default): num.float32
861        * double precision: num.float64 or num.float
862
863        Precondition:
864            store_triangulation and
865            store_header have been called.
866        """
867
868        # The dictionary quant must contain numpy arrays for each name.
869        # These will typically be quantities from Domain such as friction
870        #
871        # Arrays not listed in static_quantitiues will be ignored, silently.
872        #
873        # This method will also write the ranges for each quantity,
874        # e.g. stage_range, xmomentum_range and ymomentum_range
875        for q in self.static_quantities:
876            if not quant.has_key(q):
877                msg = 'Values for quantity %s was not specified in ' % q
878                msg += 'store_quantities so they cannot be stored.'
879                raise NewQuantity, msg
880            else:
881                q_values = ensure_numeric(quant[q])
882               
883                x = q_values.astype(sww_precision)
884                outfile.variables[q][:] = x
885       
886                # This populates the _range values
887                outfile.variables[q + Write_sww.RANGE][0] = num.min(x)
888                outfile.variables[q + Write_sww.RANGE][1] = num.max(x)
889                   
890        # FIXME: Hack for backwards compatibility with old viewer
891        #if 'elevation' in self.static_quantities:
892        #    outfile.variables['z'][:] = outfile.variables['elevation'][:]
893
894                   
895
896    def store_static_quantities_centroid(self, 
897                                            outfile, 
898                                            sww_precision=num.float32,
899                                            verbose=False, 
900                                            **quant):
901        """
902        Write the static centroid quantity info.
903
904        **quant is extra keyword arguments passed in. These must be
905          the numpy arrays to be stored in the sww file at each timestep.
906
907        The argument sww_precision allows for storing as either
908        * single precision (default): num.float32
909        * double precision: num.float64 or num.float
910
911        Precondition:
912            store_triangulation and
913            store_header have been called.
914        """
915
916        # The dictionary quant must contain numpy arrays for each name.
917        # These will typically be quantities from Domain such as friction
918        #
919        # Arrays not listed in static_quantitiues will be ignored, silently.
920        #
921        # This method will also write the ranges for each quantity,
922        # e.g. stage_range, xmomentum_range and ymomentum_range
923
924        #print outfile.variables.keys()
925        #print self.static_c_quantities
926       
927        for q in self.static_c_quantities:
928            if not quant.has_key(q):
929                msg = 'Values for quantity %s was not specified in ' % q
930                msg += 'store_quantities so they cannot be stored.'
931                raise NewQuantity, msg
932            else:
933                q_values = ensure_numeric(quant[q])
934               
935                x = q_values.astype(sww_precision)
936                outfile.variables[q][:] = x
937       
938
939                   
940       
941
942    def store_quantities(self, 
943                         outfile, 
944                         sww_precision=num.float32,
945                         slice_index=None,
946                         time=None,
947                         verbose=False, 
948                         **quant):
949        """
950        Write the quantity info at each timestep.
951
952        **quant is extra keyword arguments passed in. These must be
953          the numpy arrays to be stored in the sww file at each timestep.
954
955        if the time array is already been built, use the slice_index
956        to specify the index.
957
958        Otherwise, use time to increase the time dimension
959
960        Maybe make this general, but the viewer assumes these quantities,
961        so maybe we don't want it general - unless the viewer is general
962       
963        The argument sww_precision allows for storing as either
964        * single precision (default): num.float32
965        * double precision: num.float64 or num.float
966
967        Precondition:
968            store_triangulation and
969            store_header have been called.
970        """
971
972        if time is not None:
973            file_time = outfile.variables['time']
974            slice_index = len(file_time)
975            # check if time already saved as in check pointing
976            if slice_index > 0:
977                if time <= file_time[slice_index-1]:
978                    check = numpy.where(numpy.abs(file_time[:]-time)< 1.0e-14)
979                    slice_index = int(check[0][0])
980            file_time[slice_index] = time
981        else:
982            slice_index = int(slice_index) # Has to be cast in case it was numpy.int   
983
984        # Write the named dynamic quantities
985        # The dictionary quant must contain numpy arrays for each name.
986        # These will typically be the conserved quantities from Domain
987        # (Typically stage,  xmomentum, ymomentum).
988        #
989        # Arrays not listed in dynamic_quantitiues will be ignored, silently.
990        #
991        # This method will also write the ranges for each quantity,
992        # e.g. stage_range, xmomentum_range and ymomentum_range
993        for q in self.dynamic_quantities:
994            if not quant.has_key(q):
995                msg = 'Values for quantity %s was not specified in ' % q
996                msg += 'store_quantities so they cannot be stored.'
997                raise NewQuantity, msg
998            else:
999                q_values = ensure_numeric(quant[q])
1000               
1001                q_retyped = q_values.astype(sww_precision)
1002                outfile.variables[q][slice_index] = q_retyped
1003                   
1004                # This updates the _range values
1005                q_range = outfile.variables[q + Write_sww.RANGE][:]
1006                q_values_min = num.min(q_values)
1007                if q_values_min < q_range[0]:
1008                    outfile.variables[q + Write_sww.RANGE][0] = q_values_min
1009                q_values_max = num.max(q_values)
1010                if q_values_max > q_range[1]:
1011                    outfile.variables[q + Write_sww.RANGE][1] = q_values_max
1012                   
1013        return slice_index
1014                   
1015                   
1016
1017    def store_quantities_centroid(self, 
1018                                  outfile, 
1019                                  sww_precision=num.float32,
1020                                  slice_index=None,
1021                                  verbose=False, 
1022                                  **quant):
1023        """
1024        Write the quantity centroid info at each timestep.
1025
1026        **quant is extra keyword arguments passed in. These must be
1027          the numpy arrays to be stored in the sww file at each timestep.
1028
1029        if the time array is already been built, use the slice_index
1030        to specify the index.
1031
1032        Otherwise, use time to increase the time dimension
1033
1034        Maybe make this general, but the viewer assumes these quantities,
1035        so maybe we don't want it general - unless the viewer is general
1036       
1037        The argument sww_precision allows for storing as either
1038        * single precision (default): num.float32
1039        * double precision: num.float64 or num.float
1040
1041        Precondition:
1042            store_triangulation and
1043            store_header have been called.
1044        """
1045
1046        assert slice_index is not None, 'slice_index should be set in store_quantities'
1047         
1048
1049        # Write the named dynamic quantities
1050        # The dictionary quant must contain numpy arrays for each name.
1051        # These will typically be the conserved quantities from Domain
1052        # (Typically stage,  xmomentum, ymomentum).
1053        #
1054        # Arrays not listed in dynamic_quantitiues will be ignored, silently.
1055        #
1056        # This method will also write the ranges for each quantity,
1057        # e.g. stage_range, xmomentum_range and ymomentum_range
1058       
1059        #print 50*"="
1060        #print quant
1061        #print self.dynamic_c_quantities
1062
1063        for q in self.dynamic_c_quantities:
1064            if not quant.has_key(q):
1065                msg = 'Values for quantity %s was not specified in ' % q
1066                msg += 'store_quantities so they cannot be stored.'
1067                raise NewQuantity, msg
1068            else:
1069                q_values = ensure_numeric(quant[q])
1070               
1071                q_retyped = q_values.astype(sww_precision)
1072                outfile.variables[q][slice_index] = q_retyped
1073
1074
1075       
1076
1077    def verbose_quantities(self, outfile):
1078        log.critical('------------------------------------------------')
1079        log.critical('More Statistics:')
1080        for q in self.dynamic_quantities:
1081            log.critical(%s in [%f, %f]'
1082                         % (q, outfile.variables[q+Write_sww.RANGE][0],
1083                            outfile.variables[q+Write_sww.RANGE][1]))
1084        log.critical('------------------------------------------------')
1085
1086
1087
1088
1089def extent_sww(file_name):
1090    """Read in an sww file, then get its extents
1091
1092    Input:
1093    file_name - the sww file
1094
1095    Output:
1096    A list: [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
1097    """
1098
1099
1100    #Get NetCDF
1101    fid = NetCDFFile(file_name, netcdf_mode_r)
1102
1103    # Get the variables
1104    x = fid.variables['x'][:]
1105    y = fid.variables['y'][:]
1106    stage = fid.variables['stage'][:]
1107
1108    fid.close()
1109
1110    return [min(x), max(x), min(y), max(y), num.min(stage), num.max(stage)]
1111
1112
1113def load_sww_as_domain(filename, boundary=None, t=None,
1114               fail_if_NaN=True, NaN_filler=0,
1115               verbose=False, very_verbose=False):
1116    """
1117    Load an sww file into a domain.
1118
1119    Usage: domain = load_sww_as_domain('file.sww',
1120                        t=time (default = last time in file))
1121
1122    Boundary is not recommended if domain.smooth is not selected, as it
1123    uses unique coordinates, but not unique boundaries. This means that
1124    the boundary file will not be compatable with the coordinates, and will
1125    give a different final boundary, or crash.
1126    """
1127   
1128    from anuga.shallow_water.shallow_water_domain import Domain
1129
1130    # initialise NaN.
1131    NaN = 9.969209968386869e+036
1132
1133    if verbose: log.critical('Reading from %s' % filename)
1134
1135    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1136    time = fid.variables['time'][:]       # Timesteps
1137    if t is None:
1138        t = time[-1]
1139    time_interp = get_time_interp(time,t)
1140
1141    # Get the variables as numeric arrays
1142    x = fid.variables['x'][:]                   # x-coordinates of vertices
1143    y = fid.variables['y'][:]                   # y-coordinates of vertices
1144    #elevation = fid.variables['elevation']      # Elevation
1145    #stage = fid.variables['stage']              # Water level
1146    #xmomentum = fid.variables['xmomentum']      # Momentum in the x-direction
1147    #ymomentum = fid.variables['ymomentum']      # Momentum in the y-direction
1148
1149    starttime = float(fid.starttime)
1150    #starttime = fid.starttime[0]
1151    volumes = fid.variables['volumes'][:]       # Connectivity
1152    coordinates = num.transpose(num.asarray([x.tolist(), y.tolist()]))
1153    # FIXME (Ole): Something like this might be better:
1154    #                 concatenate((x, y), axis=1)
1155    # or              concatenate((x[:,num.newaxis], x[:,num.newaxis]), axis=1)
1156
1157    dynamic_quantities = []
1158    interpolated_quantities = {}
1159    static_quantities = []
1160
1161    # get geo_reference
1162    try:                             # sww files don't have to have a geo_ref
1163        geo_reference = Geo_reference(NetCDFObject=fid)
1164    except: # AttributeError, e:
1165        geo_reference = None
1166
1167    if verbose: log.critical('    getting quantities')
1168
1169    for quantity in fid.variables.keys():
1170        dimensions = fid.variables[quantity].dimensions
1171        if 'number_of_timesteps' in dimensions:
1172            dynamic_quantities.append(quantity)
1173            interpolated_quantities[quantity] = \
1174                  interpolated_quantity(fid.variables[quantity][:], time_interp)
1175        else:
1176            static_quantities.append(quantity)
1177
1178    #print static_quantities
1179    #print dynamic_quantities
1180   
1181    try:
1182        dynamic_quantities.remove('stage_c')
1183        dynamic_quantities.remove('xmomentum_c')
1184        dynamic_quantities.remove('ymomentum_c')
1185        dynamic_quantities.remove('elevation_c')
1186        dynamic_quantities.remove('friction_c')
1187    except:
1188        pass
1189   
1190    try:
1191        static_quantities.remove('elevation_c')
1192        static_quantities.remove('friction_c')
1193    except:
1194        pass
1195       
1196   
1197    static_quantities.remove('x')
1198    static_quantities.remove('y')
1199    #other_quantities.remove('z')
1200    static_quantities.remove('volumes')
1201    try:
1202        static_quantities.remove('stage_range')
1203        static_quantities.remove('xmomentum_range')
1204        static_quantities.remove('ymomentum_range')
1205        static_quantities.remove('elevation_range')
1206        static_quantities.remove('friction_range')
1207    except:
1208        pass
1209
1210    dynamic_quantities.remove('time')
1211
1212    if verbose: log.critical('    building domain')
1213
1214    #    From domain.Domain:
1215    #    domain = Domain(coordinates, volumes,\
1216    #                    conserved_quantities = conserved_quantities,\
1217    #                    other_quantities = other_quantities,zone=zone,\
1218    #                    xllcorner=xllcorner, yllcorner=yllcorner)
1219
1220    # From shallow_water.Domain:
1221    coordinates = coordinates.tolist()
1222    volumes = volumes.tolist()
1223    # FIXME:should this be in mesh? (peter row)
1224    if fid.smoothing == 'Yes':
1225        unique = False
1226    else:
1227        unique = True
1228    if unique:
1229        coordinates, volumes, boundary = weed(coordinates, volumes, boundary)
1230
1231     
1232   
1233    try:
1234        domain = Domain(coordinates, volumes, boundary, starttime=(float(starttime) + float(t)))
1235    except AssertionError, e:
1236        fid.close()
1237        msg = 'Domain could not be created: %s. ' \
1238              'Perhaps use "fail_if_NaN=False and NaN_filler = ..."' % e
1239        raise DataDomainError, msg
1240
1241    if not boundary is None:
1242        domain.boundary = boundary
1243
1244    domain.geo_reference = geo_reference
1245
1246    for quantity in static_quantities:
1247        try:
1248            NaN = fid.variables[quantity].missing_value
1249        except:
1250            pass                       # quantity has no missing_value number
1251        X = fid.variables[quantity][:]
1252        if very_verbose:
1253            log.critical('       %s' % str(quantity))
1254            log.critical('        NaN = %s' % str(NaN))
1255            log.critical('        max(X)')
1256            log.critical('       %s' % str(max(X)))
1257            log.critical('        max(X)==NaN')
1258            log.critical('       %s' % str(max(X)==NaN))
1259            log.critical('')
1260        if max(X) == NaN or min(X) == NaN:
1261            if fail_if_NaN:
1262                msg = 'quantity "%s" contains no_data entry' % quantity
1263                raise DataMissingValuesError, msg
1264            else:
1265                data = (X != NaN)
1266                X = (X*data) + (data==0)*NaN_filler
1267        if unique:
1268            X = num.resize(X, (len(X)/3, 3))
1269        domain.set_quantity(quantity, X)
1270    #
1271    for quantity in dynamic_quantities:
1272        try:
1273            NaN = fid.variables[quantity].missing_value
1274        except:
1275            pass                       # quantity has no missing_value number
1276        X = interpolated_quantities[quantity]
1277        if very_verbose:
1278            log.critical('       %s' % str(quantity))
1279            log.critical('        NaN = %s' % str(NaN))
1280            log.critical('        max(X)')
1281            log.critical('       %s' % str(max(X)))
1282            log.critical('        max(X)==NaN')
1283            log.critical('       %s' % str(max(X)==NaN))
1284            log.critical('')
1285        if max(X) == NaN or min(X) == NaN:
1286            if fail_if_NaN:
1287                msg = 'quantity "%s" contains no_data entry' % quantity
1288                raise DataMissingValuesError, msg
1289            else:
1290                data = (X != NaN)
1291                X = (X*data) + (data==0)*NaN_filler
1292        if unique:
1293            X = num.resize(X, (X.shape[0]/3, 3))
1294        domain.set_quantity(quantity, X)
1295
1296    fid.close()
1297
1298    return domain
1299
1300
1301def get_mesh_and_quantities_from_file(filename,
1302                                      quantities=None,
1303                                      verbose=False):
1304    """Get and rebuild mesh structure and associated quantities from sww file
1305
1306    Input:
1307        filename - Name os sww file
1308        quantities - Names of quantities to load
1309
1310    Output:
1311        mesh - instance of class Interpolate
1312               (including mesh and interpolation functionality)
1313        quantities - arrays with quantity values at each mesh node
1314        time - vector of stored timesteps
1315
1316    This function is used by e.g.:
1317        get_interpolated_quantities_at_polyline_midpoints
1318    """
1319
1320    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
1321
1322    import types
1323    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
1324
1325    if verbose: log.critical('Reading from %s' % filename)
1326
1327    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1328    time = fid.variables['time'][:]    # Time vector
1329    #time += fid.starttime[0]
1330    time += fid.starttime
1331
1332    # Get the variables as numeric arrays
1333    x = fid.variables['x'][:]                   # x-coordinates of nodes
1334    y = fid.variables['y'][:]                   # y-coordinates of nodes
1335
1336
1337    elevation = fid.variables['elevation'][:]   # Elevation
1338    stage = fid.variables['stage'][:]           # Water level
1339    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
1340    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
1341
1342
1343
1344    # Mesh (nodes (Mx2), triangles (Nx3))
1345    nodes = num.concatenate((x[:,num.newaxis], y[:,num.newaxis]), axis=1)
1346    triangles = fid.variables['volumes'][:]
1347
1348    # Get geo_reference
1349    try:
1350        geo_reference = Geo_reference(NetCDFObject=fid)
1351    except: #AttributeError, e:
1352        # Sww files don't have to have a geo_ref
1353        geo_reference = None
1354
1355    if verbose: log.critical('    building mesh from sww file %s' % filename)
1356
1357    boundary = None
1358
1359    #FIXME (Peter Row): Should this be in mesh?
1360    if fid.smoothing != 'Yes':
1361        nodes = nodes.tolist()
1362        triangles = triangles.tolist()
1363        nodes, triangles, boundary = weed(nodes, triangles, boundary)
1364
1365    try:
1366        mesh = Mesh(nodes, triangles, boundary, geo_reference=geo_reference)
1367    except AssertionError, e:
1368        fid.close()
1369        msg = 'Domain could not be created: %s. "' % e
1370        raise DataDomainError, msg
1371
1372    def gather(quantity):
1373           
1374        shape = quantity.shape
1375       
1376        def my_num_add_at(a,indices,b):
1377            """
1378            Use the numpy add.at opperation if it is available, (numpy version >1.8)
1379            otherwise just use a quick and dirty implementation via a python loop
1380            """
1381           
1382            try:
1383                num.add.at(a, indices, b)
1384            except:
1385                n_ids = len(indices)
1386                b_array = num.zeros_like(indices,dtype=num.float)
1387                b_array[:] = b
1388             
1389                for n in xrange(n_ids):
1390                    a[indices[n]] = a[indices[n]] + b_array[n]
1391                   
1392       
1393       
1394        if len(shape) == 2:
1395            # time array
1396            if shape[1] == len(mesh.nodes):
1397                return quantity
1398            # need to calculate unique vertex values
1399            n_time = shape[0]
1400            n_nodes = len(mesh.nodes)
1401           
1402            mesh_ids = mesh.triangles.ravel()
1403            temp_uv = num.zeros((n_time,n_nodes))
1404       
1405            count_uv = num.zeros(len(mesh.nodes))
1406            my_num_add_at(count_uv, mesh_ids, 1)
1407   
1408            for i in range(n_time):
1409                my_num_add_at(temp_uv[i,:], mesh_ids, quantity[i,:] )
1410                temp_uv[i,:] = temp_uv[i,:]/count_uv
1411   
1412        elif len(shape) == 1:
1413            # non time array
1414            if shape[0] == len(mesh.nodes):
1415                return quantity
1416           
1417            mesh_ids = mesh.triangles.ravel()
1418            temp_uv = num.zeros(len(mesh.nodes))
1419
1420            count_uv = num.zeros(len(mesh.nodes))
1421            my_num_add_at(count_uv, mesh_ids, 1)
1422            my_num_add_at(temp_uv, mesh_ids, quantity )
1423           
1424        else:
1425            raise Exception
1426       
1427        return temp_uv
1428
1429       
1430    quantities = {}
1431    quantities['elevation'] = gather(elevation)
1432    quantities['stage'] = gather(stage)
1433    quantities['xmomentum'] = gather(xmomentum)
1434    quantities['ymomentum'] = gather(ymomentum)
1435
1436    fid.close()
1437
1438    return mesh, quantities, time
1439
1440
1441def get_time_interp(time, t=None):
1442    """Finds the ratio and index for time interpolation.
1443        time is an array of time steps
1444        t is the sample time.
1445        returns a tuple containing index into time, and ratio
1446    """
1447    if t is None:
1448        t=time[-1]
1449        index = -1
1450        ratio = 0.
1451    else:
1452        T = time
1453        tau = t
1454        index=0
1455        msg = 'Time interval derived from file %s [%s:%s]' \
1456              % ('FIXMEfilename', T[0], T[-1])
1457        msg += ' does not match model time: %s' % tau
1458        if tau < time[0]: raise DataTimeError, msg
1459        if tau > time[-1]: raise DataTimeError, msg
1460        while tau > time[index]: index += 1
1461        while tau < time[index]: index -= 1
1462        if tau == time[index]:
1463            #Protect against case where tau == time[-1] (last time)
1464            # - also works in general when tau == time[i]
1465            ratio = 0
1466        else:
1467            #t is now between index and index+1
1468            ratio = (tau - time[index])/(time[index+1] - time[index])
1469
1470    return (index, ratio)
1471
1472
1473
1474def interpolated_quantity(saved_quantity, time_interp):
1475    """Interpolate a quantity with respect to time.
1476
1477    saved_quantity  the quantity to interpolate
1478    time_interp     (index, ratio)
1479
1480    Returns a vector of interpolated values.
1481    """
1482
1483    index, ratio = time_interp
1484
1485    Q = saved_quantity
1486
1487    if ratio > 0:
1488        q = (1-ratio)*Q[index] + ratio*Q[index+1]
1489    else:
1490        q = Q[index]
1491
1492    #Return vector of interpolated values
1493    return q
1494
1495
1496def weed(coordinates, volumes, boundary=None):
1497    """ Excise all duplicate points.
1498    """
1499    if isinstance(coordinates, num.ndarray):
1500        coordinates = coordinates.tolist()
1501    if isinstance(volumes, num.ndarray):
1502        volumes = volumes.tolist()
1503
1504    unique = False
1505    point_dict = {}
1506    same_point = {}
1507    for i in range(len(coordinates)):
1508        point = tuple(coordinates[i])
1509        if point_dict.has_key(point):
1510            unique = True
1511            same_point[i] = point
1512            #to change all point i references to point j
1513        else:
1514            point_dict[point] = i
1515            same_point[i] = point
1516
1517    coordinates = []
1518    i = 0
1519    for point in point_dict.keys():
1520        point = tuple(point)
1521        coordinates.append(list(point))
1522        point_dict[point] = i
1523        i += 1
1524
1525    for volume in volumes:
1526        for i in range(len(volume)):
1527            index = volume[i]
1528            if index > -1:
1529                volume[i] = point_dict[same_point[index]]
1530
1531    new_boundary = {}
1532    if not boundary is None:
1533        for segment in boundary.keys():
1534            point0 = point_dict[same_point[segment[0]]]
1535            point1 = point_dict[same_point[segment[1]]]
1536            label = boundary[segment]
1537            #FIXME should the bounday attributes be concaterated
1538            #('exterior, pond') or replaced ('pond')(peter row)
1539
1540            if new_boundary.has_key((point0, point1)):
1541                new_boundary[(point0,point1)] = new_boundary[(point0, point1)]
1542
1543            elif new_boundary.has_key((point1, point0)):
1544                new_boundary[(point1,point0)] = new_boundary[(point1, point0)]
1545            else: new_boundary[(point0, point1)] = label
1546
1547        boundary = new_boundary
1548
1549    return coordinates, volumes, boundary
1550
1551
1552
Note: See TracBrowser for help on using the repository browser.