source: trunk/anuga_core/source/anuga/file/sww.py @ 9258

Last change on this file since 9258 was 9258, checked in by steve, 11 years ago

Checking out error with sww.py

File size: 53.6 KB
Line 
1""" Classes to read an SWW file.
2"""
3
4import exceptions
5class DataFileNotOpenError(exceptions.Exception): pass
6class DataMissingValuesError(exceptions.Exception): pass
7class NewQuantity(exceptions.Exception): pass
8class DataDomainError(exceptions.Exception): pass
9class DataTimeError(exceptions.Exception): pass
10
11import numpy
12from anuga.coordinate_transforms.geo_reference import Geo_reference
13from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
14from anuga.config import netcdf_float, netcdf_float32, netcdf_int, netcdf_float64
15from anuga.config import max_float
16from anuga.utilities.numerical_tools import ensure_numeric
17import anuga.utilities.log as log
18from anuga.file.netcdf import NetCDFFile
19
20from sts import Write_sts
21
22from anuga.coordinate_transforms.geo_reference import \
23        ensure_geo_reference
24
25from file_utils import create_filename
26import numpy as num
27
28class Data_format:
29    """Generic interface to data formats
30    """
31
32    def __init__(self, domain, extension, mode=netcdf_mode_w):
33        assert mode[0] in ['r', 'w', 'a'], \
34               "Mode %s must be either:\n" % mode + \
35               "   'w' (write)\n" + \
36               "   'r' (read)\n" + \
37               "   'a' (append)"
38
39        #Create filename
40        self.filename = create_filename(domain.get_datadir(),
41                                        domain.get_name(), extension)
42
43        self.timestep = 0
44        self.domain = domain
45
46        # Probably should exclude ghosts in case this is a parallel domain
47       
48        self.number_of_nodes   = domain.number_of_nodes
49        self.number_of_volumes = domain.number_of_triangles
50        #self.number_of_volumes = len(domain)
51
52        #FIXME: Should we have a general set_precision function?
53
54
55class SWW_file(Data_format):
56    """Interface to native NetCDF format (.sww) for storing model output
57
58    There are two kinds of data
59
60    1: Constant data: Vertex coordinates and field values. Stored once
61    2: Variable data: Conserved quantities. Stored once per timestep.
62
63    All data is assumed to reside at vertex locations.
64    """
65
66    def __init__(self, domain, 
67                 mode=netcdf_mode_w, max_size=2000000000, recursion=False):
68
69        self.precision = netcdf_float32 # Use single precision for quantities
70        self.recursion = recursion
71        self.mode = mode
72       
73        if hasattr(domain, 'max_size'):
74            self.max_size = domain.max_size # File size max is 2Gig
75        else:
76            self.max_size = max_size
77           
78        if hasattr(domain, 'store_centroids'):
79            self.store_centroids = domain.store_centroids
80        else:
81            self.store_centroids = False
82           
83        if hasattr(domain, 'minimum_storable_height'):
84            self.minimum_storable_height = domain.minimum_storable_height
85        else:
86            self.minimum_storable_height = default_minimum_storable_height
87
88        # Call parent constructor
89        Data_format.__init__(self, domain, 'sww', mode)
90
91        # Get static and dynamic quantities from domain
92        static_quantities = []
93        dynamic_quantities = []
94        static_c_quantities = []
95        dynamic_c_quantities = []
96       
97        for q in domain.quantities_to_be_stored:
98            flag = domain.quantities_to_be_stored[q]
99       
100            msg = 'Quantity %s is requested to be stored ' % q
101            msg += 'but it does not exist in domain.quantities'
102            assert q in domain.quantities, msg
103       
104            assert flag in [1,2]
105            if flag == 1:
106                static_quantities.append(q)
107                if self.store_centroids: static_c_quantities.append(q+'_c')
108               
109            if flag == 2:
110                dynamic_quantities.append(q)
111                if self.store_centroids: dynamic_c_quantities.append(q+'_c')
112                       
113       
114        # NetCDF file definition
115        fid = NetCDFFile(self.filename, mode)
116        if mode[0] == 'w':
117            description = 'Output from anuga.file.sww ' \
118                          'suitable for plotting'
119                         
120            self.writer = Write_sww(static_quantities,
121                                    dynamic_quantities,
122                                    static_c_quantities,
123                                    dynamic_c_quantities)
124           
125            self.writer.store_header(fid,
126                                     domain.starttime,
127                                     self.number_of_volumes,
128                                     self.domain.number_of_nodes,
129                                     description=description,
130                                     smoothing=domain.smooth,
131                                     order=domain.default_order,
132                                     sww_precision=self.precision)
133
134            # Extra optional information
135            if hasattr(domain, 'texture'):
136                fid.texture = domain.texture
137
138            if domain.quantities_to_be_monitored is not None:
139                fid.createDimension('singleton', 1)
140                fid.createDimension('two', 2)
141
142                poly = domain.monitor_polygon
143                if poly is not None:
144                    N = len(poly)
145                    fid.createDimension('polygon_length', N)
146                    fid.createVariable('extrema.polygon',
147                                       self.precision,
148                                       ('polygon_length', 'two'))
149                    fid.variables['extrema.polygon'][:] = poly
150
151                interval = domain.monitor_time_interval
152                if interval is not None:
153                    fid.createVariable('extrema.time_interval',
154                                       self.precision,
155                                       ('two',))
156                    fid.variables['extrema.time_interval'][:] = interval
157
158                for q in domain.quantities_to_be_monitored:
159                    fid.createVariable(q + '.extrema', self.precision,
160                                       ('numbers_in_range',))
161                    fid.createVariable(q + '.min_location', self.precision,
162                                       ('numbers_in_range',))
163                    fid.createVariable(q + '.max_location', self.precision,
164                                       ('numbers_in_range',))
165                    fid.createVariable(q + '.min_time', self.precision,
166                                       ('singleton',))
167                    fid.createVariable(q + '.max_time', self.precision,
168                                       ('singleton',))
169
170        fid.close()
171
172    def store_connectivity(self):
173        """Store information about nodes, triangles and static quantities
174
175        Writes x,y coordinates of triangles and their connectivity.
176       
177        Store also any quantity that has been identified as static.
178        """
179
180        # FIXME: Change name to reflect the fact thta this function
181        # stores both connectivity (triangulation) and static quantities
182       
183
184        domain = self.domain
185
186        # append to the NetCDF file
187        fid = NetCDFFile(self.filename, netcdf_mode_a)
188
189        # Get X, Y from one (any) of the quantities
190        Q = domain.quantities.values()[0]
191        X,Y,_,V = Q.get_vertex_values(xy=True, precision=self.precision)
192
193        # store the connectivity data
194        points = num.concatenate((X[:,num.newaxis],Y[:,num.newaxis]), axis=1)
195        self.writer.store_triangulation(fid,
196                                        points,
197                                        V.astype(num.float32),
198                                        points_georeference=\
199                                        domain.geo_reference)
200
201
202        if domain.parallel:
203            self.writer.store_parallel_data(fid,
204                                        domain.number_of_global_triangles,
205                                        domain.number_of_global_nodes,
206                                        domain.tri_full_flag,
207                                        domain.tri_l2g,
208                                        domain.node_l2g)
209
210
211        # Get names of static quantities
212        static_quantities = {}
213        static_quantities_centroid = {}
214       
215        for name in self.writer.static_quantities:
216            Q = domain.quantities[name]
217            A, _ = Q.get_vertex_values(xy=False, 
218                                       precision=self.precision)
219            static_quantities[name] = A
220
221        #print domain.quantities
222        #print self.writer.static_c_quantities
223
224        for name in self.writer.static_c_quantities:
225            Q = domain.quantities[name[:-2]]  # rip off _c from name
226            static_quantities_centroid[name] = Q.centroid_values
227       
228        # Store static quantities       
229        self.writer.store_static_quantities(fid, **static_quantities)
230        self.writer.store_static_quantities_centroid(fid, **static_quantities_centroid) 
231                                       
232        fid.close()
233
234
235    def store_timestep(self):
236        """Store time and time dependent quantities
237        """
238
239        #import types
240        from time import sleep
241        from os import stat
242
243
244        # Get NetCDF
245        retries = 0
246        file_open = False
247        while not file_open and retries < 10:
248            try:
249                # Open existing file
250                fid = NetCDFFile(self.filename, netcdf_mode_a)
251            except IOError:
252                # This could happen if someone was reading the file.
253                # In that case, wait a while and try again
254                msg = 'Warning (store_timestep): File %s could not be opened' \
255                      % self.filename
256                msg += ' - trying step %s again' % self.domain.time
257                log.critical(msg)
258                retries += 1
259                sleep(1)
260            else:
261                file_open = True
262
263        if not file_open:
264            msg = 'File %s could not be opened for append' % self.filename
265            raise DataFileNotOpenError, msg
266
267        # Check to see if the file is already too big:
268        time = fid.variables['time'][:]
269
270        i = len(time) + 1
271        file_size = stat(self.filename)[6]
272        file_size_increase = file_size / i
273        if file_size + file_size_increase > self.max_size * 2**self.recursion:
274            # In order to get the file name and start time correct,
275            # I change the domain.filename and domain.starttime.
276            # This is the only way to do this without changing
277            # other modules (I think).
278
279            # Write a filename addon that won't break the anuga viewers
280            # (10.sww is bad)
281            filename_ext = '_time_%s' % self.domain.time
282            filename_ext = filename_ext.replace('.', '_')
283
284            # Remember the old filename, then give domain a
285            # name with the extension
286            old_domain_filename = self.domain.get_name()
287            if not self.recursion:
288                self.domain.set_name(old_domain_filename + filename_ext)
289
290            # Temporarily change the domain starttime to the current time
291            old_domain_starttime = self.domain.starttime
292            self.domain.starttime = self.domain.get_time()
293
294            # Build a new data_structure.
295            next_data_structure = SWW_file(self.domain, mode=self.mode,
296                                           max_size=self.max_size,
297                                           recursion=self.recursion+1)
298            if not self.recursion:
299                log.critical('    file_size = %s' % file_size)
300                log.critical('    saving file to %s'
301                             % next_data_structure.filename) 
302
303            # Set up the new data_structure
304            self.domain.writer = next_data_structure
305
306            # Store connectivity and first timestep
307            next_data_structure.store_connectivity()
308            next_data_structure.store_timestep()
309            fid.sync()
310            fid.close()
311
312            # Restore the old starttime and filename
313            self.domain.starttime = old_domain_starttime
314            self.domain.set_name(old_domain_filename)
315        else:
316            self.recursion = False
317            domain = self.domain
318
319            # Get the variables
320            time = fid.variables['time'][:]
321            i = len(time)
322             
323            if 'stage' in self.writer.dynamic_quantities:           
324                # Select only those values for stage,
325                # xmomentum and ymomentum (if stored) where
326                # depth exceeds minimum_storable_height
327                #
328                # In this branch it is assumed that elevation
329                # is also available as a quantity
330
331
332                # Smoothing for the get_vertex_values will be obtained
333                # from the smooth setting in domain
334           
335                Q = domain.quantities['stage']
336                w, _ = Q.get_vertex_values(xy=False)
337               
338                Q = domain.quantities['elevation']
339                z, _ = Q.get_vertex_values(xy=False)               
340               
341                storable_indices = num.array(w-z >= self.minimum_storable_height)
342            else:
343                # Very unlikely branch
344                storable_indices = None # This means take all
345           
346            # Now store dynamic quantities
347            dynamic_quantities = {}
348            dynamic_quantities_centroid = {}
349           
350            for name in self.writer.dynamic_quantities:
351                #netcdf_array = fid.variables[name]
352               
353                Q = domain.quantities[name]
354                A, _ = Q.get_vertex_values(xy=False,
355                                           precision=self.precision)
356               
357                if storable_indices is not None:
358                    if name == 'stage':
359                        A = num.choose(storable_indices, (z, A))
360
361                    if name in ['xmomentum', 'ymomentum']:
362                        # Get xmomentum where depth exceeds
363                        # minimum_storable_height
364                       
365                        # Define a zero vector of same size and type as A
366                        # for use with momenta
367                        null = num.zeros(num.size(A), A.dtype.char)
368                        A = num.choose(storable_indices, (null, A))
369               
370                dynamic_quantities[name] = A
371               
372            for name in self.writer.dynamic_c_quantities:
373                Q = domain.quantities[name[:-2]]
374                dynamic_quantities_centroid[name] = Q.centroid_values
375               
376                                       
377            # Store dynamic quantities
378            slice_index = self.writer.store_quantities(fid,
379                                         time=self.domain.time,
380                                         sww_precision=self.precision,
381                                         **dynamic_quantities)
382           
383            # Store dynamic quantities
384            if self.store_centroids:
385                self.writer.store_quantities_centroid(fid,
386                                                      slice_index= slice_index,
387                                                      sww_precision=self.precision,
388                                                      **dynamic_quantities_centroid)           
389
390
391            # Update extrema if requested
392            domain = self.domain
393            if domain.quantities_to_be_monitored is not None:
394                for q, info in domain.quantities_to_be_monitored.items():
395                    if info['min'] is not None:
396                        fid.variables[q + '.extrema'][0] = info['min']
397                        fid.variables[q + '.min_location'][:] = \
398                                        info['min_location']
399                        fid.variables[q + '.min_time'][0] = info['min_time']
400
401                    if info['max'] is not None:
402                        fid.variables[q + '.extrema'][1] = info['max']
403                        fid.variables[q + '.max_location'][:] = \
404                                        info['max_location']
405                        fid.variables[q + '.max_time'][0] = info['max_time']
406
407            # Flush and close
408            #fid.sync()
409            fid.close()
410
411
412class Read_sww:
413
414    def __init__(self, source):
415        """The source parameter is assumed to be a NetCDF sww file.
416        """
417
418        self.source = source
419
420        self.frame_number = 0
421
422        fin = NetCDFFile(self.source, 'r')
423
424        self.time = num.array(fin.variables['time'][:], num.float)
425        self.last_frame_number = self.time.shape[0] - 1
426
427        self.frames = num.arange(self.last_frame_number+1)
428
429        fin.close()
430       
431        self.read_mesh()
432
433        self.quantities = {}
434
435        self.read_quantities()
436
437
438    def read_mesh(self):
439        """ Read and store the mesh data contained within this sww file.
440        """
441        fin = NetCDFFile(self.source, 'r')
442
443        self.vertices = num.array(fin.variables['volumes'][:], num.int)
444       
445        self.x = x = num.array(fin.variables['x'][:], num.float)
446        self.y = y = num.array(fin.variables['y'][:], num.float)
447
448        assert len(self.x) == len(self.y)
449       
450        self.xmin = num.min(x)
451        self.xmax = num.max(x)
452        self.ymin = num.min(y)
453        self.ymax = num.max(y)
454
455
456        fin.close()
457       
458    def read_quantities(self, frame_number=0):
459        """
460        Read the quantities contained in this file.
461        frame_number is the time index to load.
462        """
463        assert frame_number >= 0 and frame_number <= self.last_frame_number
464
465        self.frame_number = frame_number
466
467        M = len(self.x)/3
468       
469        fin = NetCDFFile(self.source, 'r')
470       
471        for q in filter(lambda n:n != 'x' and n != 'y' and n != 'time' and n != 'volumes' and \
472                        '_range' not in n and '_c' not in n , \
473                        fin.variables.keys()):
474            #print q
475            if len(fin.variables[q].shape) == 1: # Not a time-varying quantity
476                self.quantities[q] = num.ravel(num.array(fin.variables[q][:], num.float)).reshape(M,3)
477            else: # Time-varying, get the current timestep data
478                self.quantities[q] = num.array(fin.variables[q][self.frame_number], num.float).reshape(M,3)
479        fin.close()
480        return self.quantities
481
482    def get_bounds(self):
483        """
484            Get the bounding rect around the mesh.
485        """
486        return [self.xmin, self.xmax, self.ymin, self.ymax]
487
488    def get_last_frame_number(self):
489        """
490            Return the last loaded frame index.
491        """
492        return self.last_frame_number
493
494    def get_time(self):
495        """
496            Get time at the current frame num, in secs.
497        """
498        return self.time[self.frame_number]
499
500
501class Write_sww(Write_sts):
502    """
503        A class to write an SWW file.
504       
505        It is domain agnostic, and requires all the data to be fed in
506        manually.
507    """
508
509    def __init__(self,
510                 static_quantities,
511                 dynamic_quantities,
512                 static_c_quantities = [],
513                 dynamic_c_quantities = []):
514       
515        """Initialise Write_sww with two (or 4) list af quantity names:
516       
517        static_quantities (e.g. elevation or friction):
518            Stored once at the beginning of the simulation in a 1D array
519            of length number_of_points   
520        dynamic_quantities (e.g stage):
521            Stored every timestep in a 2D array with
522            dimensions number_of_points X number_of_timesteps
523
524        static_c_quantities (e.g. elevation_c or friction_c):
525            Stored once at the beginning of the simulation in a 1D array
526            of length number_of_triangles 
527        dynamic_c_quantities (e.g stage_c):
528            Stored every timestep in a 2D array with
529            dimensions number_of_triangles X number_of_timesteps
530       
531        """
532        self.static_quantities = static_quantities   
533        self.dynamic_quantities = dynamic_quantities
534        self.static_c_quantities = static_c_quantities
535        self.dynamic_c_quantities = dynamic_c_quantities
536
537        self.store_centroids = False
538        if static_c_quantities or dynamic_c_quantities:
539            self.store_centroids = True
540
541
542    def store_header(self,
543                     outfile,
544                     times,
545                     number_of_volumes,
546                     number_of_points,
547                     description='Generated by ANUGA',
548                     smoothing=True,
549                     order=1,
550                     sww_precision=netcdf_float32,
551                     verbose=False):
552        """Write an SWW file header.
553
554        Writes the first section of the .sww file.
555
556        outfile - the open file that will be written
557        times - A list of the time slice times OR a start time
558        Note, if a list is given the info will be made relative.
559        number_of_volumes - the number of triangles
560        number_of_points - the number of vertices in the mesh
561        """
562
563        from anuga.abstract_2d_finite_volumes.util \
564            import get_revision_number
565
566        outfile.institution = 'Geoscience Australia'
567        outfile.description = description
568
569        # For sww compatibility
570        if smoothing is True:
571            # Smoothing to be depreciated
572            outfile.smoothing = 'Yes'
573            outfile.vertices_are_stored_uniquely = 'False'
574        else:
575            # Smoothing to be depreciated
576            outfile.smoothing = 'No'
577            outfile.vertices_are_stored_uniquely = 'True'
578        outfile.order = order
579
580        try:
581            revision_number = get_revision_number()
582        except:
583            # This will be triggered if the system cannot get the SVN
584            # revision number.
585            revision_number = None
586        # Allow None to be stored as a string
587        outfile.revision_number = str(revision_number)
588
589        # This is being used to seperate one number from a list.
590        # what it is actually doing is sorting lists from numeric arrays.
591        if isinstance(times, (list, num.ndarray)):
592            number_of_times = len(times)
593            times = ensure_numeric(times)
594            if number_of_times == 0:
595                starttime = 0
596            else:
597                starttime = times[0]
598                times = times - starttime  #Store relative times
599        else:
600            number_of_times = 0
601            starttime = times
602
603
604        outfile.starttime = starttime
605
606        # dimension definitions
607        outfile.createDimension('number_of_volumes', number_of_volumes)
608        outfile.createDimension('number_of_triangle_vertices', number_of_points)
609        outfile.createDimension('number_of_vertices', 3)
610        outfile.createDimension('numbers_in_range', 2)
611
612        if smoothing is True:
613            outfile.createDimension('number_of_points', number_of_points)
614            # FIXME(Ole): This will cause sww files for parallel domains to
615            # have ghost nodes stored (but not used by triangles).
616            # To clean this up, we have to change get_vertex_values and
617            # friends in quantity.py (but I can't be bothered right now)
618        else:
619            outfile.createDimension('number_of_points', 3*number_of_volumes)
620
621        outfile.createDimension('number_of_timesteps', number_of_times)
622
623        # variable definitions
624        outfile.createVariable('x', sww_precision, ('number_of_points',))
625        outfile.createVariable('y', sww_precision, ('number_of_points',))
626
627        outfile.createVariable('volumes', netcdf_int , ('number_of_volumes',
628                                                       'number_of_vertices'))
629
630
631        for q in self.static_quantities:
632           
633            outfile.createVariable(q, sww_precision,
634                                   ('number_of_points',))
635           
636            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
637                                   ('numbers_in_range',))
638           
639            # Initialise ranges with small and large sentinels.
640            # If this was in pure Python we could have used None sensibly
641            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
642            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
643
644
645        for q in self.static_c_quantities:
646            outfile.createVariable(q, sww_precision,
647                                   ('number_of_volumes',))
648                                   
649
650        self.write_dynamic_quantities(outfile, times, precis = sww_precision)
651
652
653        outfile.sync()
654
655
656
657    def store_triangulation(self,
658                            outfile,
659                            points_utm,
660                            volumes,
661                            zone=None, 
662                            new_origin=None,
663                            points_georeference=None, 
664                            verbose=False):
665        """
666        Store triangulation data in the underlying file.
667       
668        Stores the points and triangle indices in the sww file
669       
670        outfile Open handle to underlying file.
671
672        new_origin georeference that the points can be set to.
673
674        points_georeference The georeference of the points_utm.
675
676        verbose True if this function is to be verbose.
677
678        new_origin - qa georeference that the points can be set to. (Maybe
679        do this before calling this function.)
680
681        points_utm - currently a list or array of the points in UTM.
682        points_georeference - the georeference of the points_utm
683
684        How about passing new_origin and current_origin.
685        If you get both, do a convertion from the old to the new.
686
687        If you only get new_origin, the points are absolute,
688        convert to relative
689
690        if you only get the current_origin the points are relative, store
691        as relative.
692
693        if you get no georefs create a new georef based on the minimums of
694        points_utm.  (Another option would be to default to absolute)
695
696        Yes, and this is done in another part of the code.
697        Probably geospatial.
698
699        If you don't supply either geo_refs, then supply a zone. If not
700        the default zone will be used.
701
702        precon:
703            header has been called.
704        """
705
706        number_of_points = len(points_utm)
707        volumes = num.array(volumes,num.int32).reshape(-1,3)
708
709
710        points_utm = num.array(points_utm)
711
712        # Given the two geo_refs and the points, do the stuff
713        # described in the method header
714        # if this is needed else where, pull out as a function
715        points_georeference = ensure_geo_reference(points_georeference)
716        new_origin = ensure_geo_reference(new_origin)
717        if new_origin is None and points_georeference is not None:
718            points = points_utm
719            geo_ref = points_georeference
720        else:
721            if new_origin is None:
722                new_origin = Geo_reference(zone, min(points_utm[:,0]),
723                                                 min(points_utm[:,1]))
724            points = new_origin.change_points_geo_ref(points_utm,
725                                                      points_georeference)
726            geo_ref = new_origin
727
728        # At this stage I need a georef and points
729        # the points are relative to the georef
730        geo_ref.write_NetCDF(outfile)
731
732        # This will put the geo ref in the middle
733        #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
734
735        x =  points[:,0]
736        y =  points[:,1]
737
738        #x = x.astype(netcdf_float32)
739        #y = y.astype(netcdf_float32)
740       
741
742        if verbose:
743            log.critical('------------------------------------------------')
744            log.critical('More Statistics:')
745            log.critical('  Extent (/lon):')
746            log.critical('    x in [%f, %f], len(lat) == %d'
747                         % (min(x), max(x), len(x)))
748            log.critical('    y in [%f, %f], len(lon) == %d'
749                         % (min(y), max(y), len(y)))
750            #log.critical('    z in [%f, %f], len(z) == %d'
751            #             % (min(elevation), max(elevation), len(elevation)))
752            log.critical('geo_ref: %s' % str(geo_ref))
753            log.critical('------------------------------------------------')
754
755
756        outfile.variables['x'][:] = x #- geo_ref.get_xllcorner()
757        outfile.variables['y'][:] = y #- geo_ref.get_yllcorner()
758
759
760        msg = 'Mismatch between shape of volumes array and (number_of_volumes , 3)'
761        assert volumes.shape == outfile.variables['volumes'].shape, msg
762
763        outfile.variables['volumes'][:] = volumes
764
765
766
767    def write_dynamic_quantities(self, outfile, 
768                                 times, precis = netcdf_float32, verbose = False):   
769        """
770            Write out given quantities to file.
771        """
772       
773
774        for q in self.dynamic_quantities:
775            outfile.createVariable(q, precis, ('number_of_timesteps',
776                                               'number_of_points'))
777            outfile.createVariable(q + Write_sts.RANGE, precis,
778                                   ('numbers_in_range',))
779           
780            # Initialise ranges with small and large sentinels.
781            # If this was in pure Python we could have used None sensibly
782            outfile.variables[q+Write_sts.RANGE][0] = max_float  # Min
783            outfile.variables[q+Write_sts.RANGE][1] = -max_float # Max
784
785        for q in self.dynamic_c_quantities:
786            outfile.createVariable(q, precis, ('number_of_timesteps',
787                                                    'number_of_volumes'))
788
789        # Doing sts_precision instead of Float gives cast errors.
790        outfile.createVariable('time', netcdf_float, ('number_of_timesteps',))
791
792        if isinstance(times, (list, num.ndarray)):
793            outfile.variables['time'][:] = times    # Store time relative
794
795        if verbose:
796            log.critical('------------------------------------------------')
797            log.critical('Statistics:')
798            log.critical('    t in [%f, %f], len(t) == %d'
799                         % (num.min(times), num.max(times), len(times.flat)))
800
801    def store_parallel_data(self,
802                            outfile,
803                            number_of_global_triangles,
804                            number_of_global_nodes,
805                            tri_full_flag = None,
806                            tri_l2g = None,
807                            node_l2g = None,
808                            sww_precision=netcdf_float32,
809                            verbose=False):
810
811
812        # dimension definitions
813        #outfile.createDimension('number_of_volumes', number_of_volumes)
814        #outfile.createDimension('number_of_vertices', 3)
815        #outfile.createDimension('numbers_in_range', 2)
816
817        #print 'store parallel data'
818        outfile.number_of_global_triangles = number_of_global_triangles
819        outfile.number_of_global_nodes = number_of_global_nodes
820
821        # variable definitions
822        outfile.createVariable('tri_l2g',  netcdf_int, ('number_of_volumes',))
823        outfile.createVariable('node_l2g', netcdf_int, ('number_of_triangle_vertices',))
824        outfile.createVariable('tri_full_flag', netcdf_int, ('number_of_volumes',))
825
826        #print tri_l2g.shape
827        #print tri_l2g
828        #print outfile.variables['tri_l2g'].shape
829
830        outfile.variables['tri_l2g'][:] = tri_l2g.astype(num.int32)
831
832        #print node_l2g.shape
833        #print node_l2g
834        #print outfile.variables['node_l2g'].shape
835
836        outfile.variables['node_l2g'][:] = node_l2g.astype(num.int32)
837
838        #print tri_full_flag.shape
839        #print tri_full_flag
840        #print outfile.variables['tri_full_flag'].shape
841
842        outfile.variables['tri_full_flag'][:] = tri_full_flag.astype(num.int32)
843
844
845
846    def store_static_quantities(self, 
847                                outfile, 
848                                sww_precision=num.float32,
849                                verbose=False, 
850                                **quant):
851        """
852        Write the static quantity info.
853
854        **quant is extra keyword arguments passed in. These must be
855          the numpy arrays to be stored in the sww file at each timestep.
856
857        The argument sww_precision allows for storing as either
858        * single precision (default): num.float32
859        * double precision: num.float64 or num.float
860
861        Precondition:
862            store_triangulation and
863            store_header have been called.
864        """
865
866        # The dictionary quant must contain numpy arrays for each name.
867        # These will typically be quantities from Domain such as friction
868        #
869        # Arrays not listed in static_quantitiues will be ignored, silently.
870        #
871        # This method will also write the ranges for each quantity,
872        # e.g. stage_range, xmomentum_range and ymomentum_range
873        for q in self.static_quantities:
874            if not quant.has_key(q):
875                msg = 'Values for quantity %s was not specified in ' % q
876                msg += 'store_quantities so they cannot be stored.'
877                raise NewQuantity, msg
878            else:
879                q_values = ensure_numeric(quant[q])
880               
881                x = q_values.astype(sww_precision)
882                outfile.variables[q][:] = x
883       
884                # This populates the _range values
885                outfile.variables[q + Write_sww.RANGE][0] = num.min(x)
886                outfile.variables[q + Write_sww.RANGE][1] = num.max(x)
887                   
888        # FIXME: Hack for backwards compatibility with old viewer
889        #if 'elevation' in self.static_quantities:
890        #    outfile.variables['z'][:] = outfile.variables['elevation'][:]
891
892                   
893
894    def store_static_quantities_centroid(self, 
895                                            outfile, 
896                                            sww_precision=num.float32,
897                                            verbose=False, 
898                                            **quant):
899        """
900        Write the static centroid quantity info.
901
902        **quant is extra keyword arguments passed in. These must be
903          the numpy arrays to be stored in the sww file at each timestep.
904
905        The argument sww_precision allows for storing as either
906        * single precision (default): num.float32
907        * double precision: num.float64 or num.float
908
909        Precondition:
910            store_triangulation and
911            store_header have been called.
912        """
913
914        # The dictionary quant must contain numpy arrays for each name.
915        # These will typically be quantities from Domain such as friction
916        #
917        # Arrays not listed in static_quantitiues will be ignored, silently.
918        #
919        # This method will also write the ranges for each quantity,
920        # e.g. stage_range, xmomentum_range and ymomentum_range
921
922        #print outfile.variables.keys()
923        #print self.static_c_quantities
924       
925        for q in self.static_c_quantities:
926            if not quant.has_key(q):
927                msg = 'Values for quantity %s was not specified in ' % q
928                msg += 'store_quantities so they cannot be stored.'
929                raise NewQuantity, msg
930            else:
931                q_values = ensure_numeric(quant[q])
932               
933                x = q_values.astype(sww_precision)
934                outfile.variables[q][:] = x
935       
936
937                   
938       
939
940    def store_quantities(self, 
941                         outfile, 
942                         sww_precision=num.float32,
943                         slice_index=None,
944                         time=None,
945                         verbose=False, 
946                         **quant):
947        """
948        Write the quantity info at each timestep.
949
950        **quant is extra keyword arguments passed in. These must be
951          the numpy arrays to be stored in the sww file at each timestep.
952
953        if the time array is already been built, use the slice_index
954        to specify the index.
955
956        Otherwise, use time to increase the time dimension
957
958        Maybe make this general, but the viewer assumes these quantities,
959        so maybe we don't want it general - unless the viewer is general
960       
961        The argument sww_precision allows for storing as either
962        * single precision (default): num.float32
963        * double precision: num.float64 or num.float
964
965        Precondition:
966            store_triangulation and
967            store_header have been called.
968        """
969
970        if time is not None:
971            file_time = outfile.variables['time']
972            slice_index = len(file_time)
973            # check if time already saved as in check pointing           
974            if slice_index > 0:
975                if time <= file_time[slice_index-1]:
976                    check = numpy.where(numpy.abs(file_time[:]-time)< 1.0e-14)
977                    slice_index = check[0][0]
978               
979            file_time[slice_index] = time
980        else:
981            slice_index = int(slice_index) # Has to be cast in case it was numpy.int   
982
983        # Write the named dynamic quantities
984        # The dictionary quant must contain numpy arrays for each name.
985        # These will typically be the conserved quantities from Domain
986        # (Typically stage,  xmomentum, ymomentum).
987        #
988        # Arrays not listed in dynamic_quantitiues will be ignored, silently.
989        #
990        # This method will also write the ranges for each quantity,
991        # e.g. stage_range, xmomentum_range and ymomentum_range
992        for q in self.dynamic_quantities:
993            if not quant.has_key(q):
994                msg = 'Values for quantity %s was not specified in ' % q
995                msg += 'store_quantities so they cannot be stored.'
996                raise NewQuantity, msg
997            else:
998                q_values = ensure_numeric(quant[q])
999               
1000                q_retyped = q_values.astype(sww_precision)
1001                outfile.variables[q][slice_index] = q_retyped
1002                   
1003                # This updates the _range values
1004                q_range = outfile.variables[q + Write_sww.RANGE][:]
1005                q_values_min = num.min(q_values)
1006                if q_values_min < q_range[0]:
1007                    outfile.variables[q + Write_sww.RANGE][0] = q_values_min
1008                q_values_max = num.max(q_values)
1009                if q_values_max > q_range[1]:
1010                    outfile.variables[q + Write_sww.RANGE][1] = q_values_max
1011                   
1012        return slice_index
1013                   
1014                   
1015
1016    def store_quantities_centroid(self, 
1017                                  outfile, 
1018                                  sww_precision=num.float32,
1019                                  slice_index=None,
1020                                  verbose=False, 
1021                                  **quant):
1022        """
1023        Write the quantity centroid info at each timestep.
1024
1025        **quant is extra keyword arguments passed in. These must be
1026          the numpy arrays to be stored in the sww file at each timestep.
1027
1028        if the time array is already been built, use the slice_index
1029        to specify the index.
1030
1031        Otherwise, use time to increase the time dimension
1032
1033        Maybe make this general, but the viewer assumes these quantities,
1034        so maybe we don't want it general - unless the viewer is general
1035       
1036        The argument sww_precision allows for storing as either
1037        * single precision (default): num.float32
1038        * double precision: num.float64 or num.float
1039
1040        Precondition:
1041            store_triangulation and
1042            store_header have been called.
1043        """
1044
1045        assert slice_index is not None, 'slice_index should be set in store_quantities'
1046         
1047
1048        # Write the named dynamic quantities
1049        # The dictionary quant must contain numpy arrays for each name.
1050        # These will typically be the conserved quantities from Domain
1051        # (Typically stage,  xmomentum, ymomentum).
1052        #
1053        # Arrays not listed in dynamic_quantitiues will be ignored, silently.
1054        #
1055        # This method will also write the ranges for each quantity,
1056        # e.g. stage_range, xmomentum_range and ymomentum_range
1057       
1058        #print 50*"="
1059        #print quant
1060        #print self.dynamic_c_quantities
1061
1062        for q in self.dynamic_c_quantities:
1063            if not quant.has_key(q):
1064                msg = 'Values for quantity %s was not specified in ' % q
1065                msg += 'store_quantities so they cannot be stored.'
1066                raise NewQuantity, msg
1067            else:
1068                q_values = ensure_numeric(quant[q])
1069               
1070                q_retyped = q_values.astype(sww_precision)
1071                outfile.variables[q][slice_index] = q_retyped
1072
1073
1074       
1075
1076    def verbose_quantities(self, outfile):
1077        log.critical('------------------------------------------------')
1078        log.critical('More Statistics:')
1079        for q in self.dynamic_quantities:
1080            log.critical(%s in [%f, %f]'
1081                         % (q, outfile.variables[q+Write_sww.RANGE][0],
1082                            outfile.variables[q+Write_sww.RANGE][1]))
1083        log.critical('------------------------------------------------')
1084
1085
1086
1087
1088def extent_sww(file_name):
1089    """Read in an sww file, then get its extents
1090
1091    Input:
1092    file_name - the sww file
1093
1094    Output:
1095    A list: [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
1096    """
1097
1098
1099    #Get NetCDF
1100    fid = NetCDFFile(file_name, netcdf_mode_r)
1101
1102    # Get the variables
1103    x = fid.variables['x'][:]
1104    y = fid.variables['y'][:]
1105    stage = fid.variables['stage'][:]
1106
1107    fid.close()
1108
1109    return [min(x), max(x), min(y), max(y), num.min(stage), num.max(stage)]
1110
1111
1112def load_sww_as_domain(filename, boundary=None, t=None,
1113               fail_if_NaN=True, NaN_filler=0,
1114               verbose=False, very_verbose=False):
1115    """
1116    Load an sww file into a domain.
1117
1118    Usage: domain = load_sww_as_domain('file.sww',
1119                        t=time (default = last time in file))
1120
1121    Boundary is not recommended if domain.smooth is not selected, as it
1122    uses unique coordinates, but not unique boundaries. This means that
1123    the boundary file will not be compatable with the coordinates, and will
1124    give a different final boundary, or crash.
1125    """
1126   
1127    from anuga.shallow_water.shallow_water_domain import Domain
1128
1129    # initialise NaN.
1130    NaN = 9.969209968386869e+036
1131
1132    if verbose: log.critical('Reading from %s' % filename)
1133
1134    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1135    time = fid.variables['time'][:]       # Timesteps
1136    if t is None:
1137        t = time[-1]
1138    time_interp = get_time_interp(time,t)
1139
1140    # Get the variables as numeric arrays
1141    x = fid.variables['x'][:]                   # x-coordinates of vertices
1142    y = fid.variables['y'][:]                   # y-coordinates of vertices
1143    #elevation = fid.variables['elevation']      # Elevation
1144    #stage = fid.variables['stage']              # Water level
1145    #xmomentum = fid.variables['xmomentum']      # Momentum in the x-direction
1146    #ymomentum = fid.variables['ymomentum']      # Momentum in the y-direction
1147
1148    starttime = float(fid.starttime)
1149    #starttime = fid.starttime[0]
1150    volumes = fid.variables['volumes'][:]       # Connectivity
1151    coordinates = num.transpose(num.asarray([x.tolist(), y.tolist()]))
1152    # FIXME (Ole): Something like this might be better:
1153    #                 concatenate((x, y), axis=1)
1154    # or              concatenate((x[:,num.newaxis], x[:,num.newaxis]), axis=1)
1155
1156    dynamic_quantities = []
1157    interpolated_quantities = {}
1158    static_quantities = []
1159
1160    # get geo_reference
1161    try:                             # sww files don't have to have a geo_ref
1162        geo_reference = Geo_reference(NetCDFObject=fid)
1163    except: # AttributeError, e:
1164        geo_reference = None
1165
1166    if verbose: log.critical('    getting quantities')
1167
1168    for quantity in fid.variables.keys():
1169        dimensions = fid.variables[quantity].dimensions
1170        if 'number_of_timesteps' in dimensions:
1171            dynamic_quantities.append(quantity)
1172            interpolated_quantities[quantity] = \
1173                  interpolated_quantity(fid.variables[quantity][:], time_interp)
1174        else:
1175            static_quantities.append(quantity)
1176
1177    #print static_quantities
1178    #print dynamic_quantities
1179   
1180    try:
1181        dynamic_quantities.remove('stage_c')
1182        dynamic_quantities.remove('xmomentum_c')
1183        dynamic_quantities.remove('ymomentum_c')
1184        dynamic_quantities.remove('elevation_c')
1185        dynamic_quantities.remove('friction_c')
1186    except:
1187        pass
1188   
1189    try:
1190        static_quantities.remove('elevation_c')
1191        static_quantities.remove('friction_c')
1192    except:
1193        pass
1194       
1195   
1196    static_quantities.remove('x')
1197    static_quantities.remove('y')
1198    #other_quantities.remove('z')
1199    static_quantities.remove('volumes')
1200    try:
1201        static_quantities.remove('stage_range')
1202        static_quantities.remove('xmomentum_range')
1203        static_quantities.remove('ymomentum_range')
1204        static_quantities.remove('elevation_range')
1205        static_quantities.remove('friction_range')
1206    except:
1207        pass
1208
1209    dynamic_quantities.remove('time')
1210
1211    if verbose: log.critical('    building domain')
1212
1213    #    From domain.Domain:
1214    #    domain = Domain(coordinates, volumes,\
1215    #                    conserved_quantities = conserved_quantities,\
1216    #                    other_quantities = other_quantities,zone=zone,\
1217    #                    xllcorner=xllcorner, yllcorner=yllcorner)
1218
1219    # From shallow_water.Domain:
1220    coordinates = coordinates.tolist()
1221    volumes = volumes.tolist()
1222    # FIXME:should this be in mesh? (peter row)
1223    if fid.smoothing == 'Yes':
1224        unique = False
1225    else:
1226        unique = True
1227    if unique:
1228        coordinates, volumes, boundary = weed(coordinates, volumes, boundary)
1229
1230     
1231   
1232    try:
1233        domain = Domain(coordinates, volumes, boundary, starttime=(float(starttime) + float(t)))
1234    except AssertionError, e:
1235        fid.close()
1236        msg = 'Domain could not be created: %s. ' \
1237              'Perhaps use "fail_if_NaN=False and NaN_filler = ..."' % e
1238        raise DataDomainError, msg
1239
1240    if not boundary is None:
1241        domain.boundary = boundary
1242
1243    domain.geo_reference = geo_reference
1244
1245    for quantity in static_quantities:
1246        try:
1247            NaN = fid.variables[quantity].missing_value
1248        except:
1249            pass                       # quantity has no missing_value number
1250        X = fid.variables[quantity][:]
1251        if very_verbose:
1252            log.critical('       %s' % str(quantity))
1253            log.critical('        NaN = %s' % str(NaN))
1254            log.critical('        max(X)')
1255            log.critical('       %s' % str(max(X)))
1256            log.critical('        max(X)==NaN')
1257            log.critical('       %s' % str(max(X)==NaN))
1258            log.critical('')
1259        if max(X) == NaN or min(X) == NaN:
1260            if fail_if_NaN:
1261                msg = 'quantity "%s" contains no_data entry' % quantity
1262                raise DataMissingValuesError, msg
1263            else:
1264                data = (X != NaN)
1265                X = (X*data) + (data==0)*NaN_filler
1266        if unique:
1267            X = num.resize(X, (len(X)/3, 3))
1268        domain.set_quantity(quantity, X)
1269    #
1270    for quantity in dynamic_quantities:
1271        try:
1272            NaN = fid.variables[quantity].missing_value
1273        except:
1274            pass                       # quantity has no missing_value number
1275        X = interpolated_quantities[quantity]
1276        if very_verbose:
1277            log.critical('       %s' % str(quantity))
1278            log.critical('        NaN = %s' % str(NaN))
1279            log.critical('        max(X)')
1280            log.critical('       %s' % str(max(X)))
1281            log.critical('        max(X)==NaN')
1282            log.critical('       %s' % str(max(X)==NaN))
1283            log.critical('')
1284        if max(X) == NaN or min(X) == NaN:
1285            if fail_if_NaN:
1286                msg = 'quantity "%s" contains no_data entry' % quantity
1287                raise DataMissingValuesError, msg
1288            else:
1289                data = (X != NaN)
1290                X = (X*data) + (data==0)*NaN_filler
1291        if unique:
1292            X = num.resize(X, (X.shape[0]/3, 3))
1293        domain.set_quantity(quantity, X)
1294
1295    fid.close()
1296
1297    return domain
1298
1299
1300def get_mesh_and_quantities_from_file(filename,
1301                                      quantities=None,
1302                                      verbose=False):
1303    """Get and rebuild mesh structure and associated quantities from sww file
1304
1305    Input:
1306        filename - Name os sww file
1307        quantities - Names of quantities to load
1308
1309    Output:
1310        mesh - instance of class Interpolate
1311               (including mesh and interpolation functionality)
1312        quantities - arrays with quantity values at each mesh node
1313        time - vector of stored timesteps
1314
1315    This function is used by e.g.:
1316        get_interpolated_quantities_at_polyline_midpoints
1317    """
1318
1319    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
1320
1321    import types
1322    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
1323
1324    if verbose: log.critical('Reading from %s' % filename)
1325
1326    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1327    time = fid.variables['time'][:]    # Time vector
1328    #time += fid.starttime[0]
1329    time += fid.starttime
1330
1331    # Get the variables as numeric arrays
1332    x = fid.variables['x'][:]                   # x-coordinates of nodes
1333    y = fid.variables['y'][:]                   # y-coordinates of nodes
1334
1335
1336    elevation = fid.variables['elevation'][:]   # Elevation
1337    stage = fid.variables['stage'][:]           # Water level
1338    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
1339    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
1340
1341
1342
1343    # Mesh (nodes (Mx2), triangles (Nx3))
1344    nodes = num.concatenate((x[:,num.newaxis], y[:,num.newaxis]), axis=1)
1345    triangles = fid.variables['volumes'][:]
1346
1347    # Get geo_reference
1348    try:
1349        geo_reference = Geo_reference(NetCDFObject=fid)
1350    except: #AttributeError, e:
1351        # Sww files don't have to have a geo_ref
1352        geo_reference = None
1353
1354    if verbose: log.critical('    building mesh from sww file %s' % filename)
1355
1356    boundary = None
1357
1358    #FIXME (Peter Row): Should this be in mesh?
1359    if fid.smoothing != 'Yes':
1360        nodes = nodes.tolist()
1361        triangles = triangles.tolist()
1362        nodes, triangles, boundary = weed(nodes, triangles, boundary)
1363
1364    try:
1365        mesh = Mesh(nodes, triangles, boundary, geo_reference=geo_reference)
1366    except AssertionError, e:
1367        fid.close()
1368        msg = 'Domain could not be created: %s. "' % e
1369        raise DataDomainError, msg
1370
1371    quantities = {}
1372    quantities['elevation'] = elevation
1373    quantities['stage'] = stage
1374    quantities['xmomentum'] = xmomentum
1375    quantities['ymomentum'] = ymomentum
1376
1377    fid.close()
1378
1379    return mesh, quantities, time
1380
1381
1382def get_time_interp(time, t=None):
1383    """Finds the ratio and index for time interpolation.
1384        time is an array of time steps
1385        t is the sample time.
1386        returns a tuple containing index into time, and ratio
1387    """
1388    if t is None:
1389        t=time[-1]
1390        index = -1
1391        ratio = 0.
1392    else:
1393        T = time
1394        tau = t
1395        index=0
1396        msg = 'Time interval derived from file %s [%s:%s]' \
1397              % ('FIXMEfilename', T[0], T[-1])
1398        msg += ' does not match model time: %s' % tau
1399        if tau < time[0]: raise DataTimeError, msg
1400        if tau > time[-1]: raise DataTimeError, msg
1401        while tau > time[index]: index += 1
1402        while tau < time[index]: index -= 1
1403        if tau == time[index]:
1404            #Protect against case where tau == time[-1] (last time)
1405            # - also works in general when tau == time[i]
1406            ratio = 0
1407        else:
1408            #t is now between index and index+1
1409            ratio = (tau - time[index])/(time[index+1] - time[index])
1410
1411    return (index, ratio)
1412
1413
1414
1415def interpolated_quantity(saved_quantity, time_interp):
1416    """Interpolate a quantity with respect to time.
1417
1418    saved_quantity  the quantity to interpolate
1419    time_interp     (index, ratio)
1420
1421    Returns a vector of interpolated values.
1422    """
1423
1424    index, ratio = time_interp
1425
1426    Q = saved_quantity
1427
1428    if ratio > 0:
1429        q = (1-ratio)*Q[index] + ratio*Q[index+1]
1430    else:
1431        q = Q[index]
1432
1433    #Return vector of interpolated values
1434    return q
1435
1436
1437def weed(coordinates, volumes, boundary=None):
1438    """ Excise all duplicate points.
1439    """
1440    if isinstance(coordinates, num.ndarray):
1441        coordinates = coordinates.tolist()
1442    if isinstance(volumes, num.ndarray):
1443        volumes = volumes.tolist()
1444
1445    unique = False
1446    point_dict = {}
1447    same_point = {}
1448    for i in range(len(coordinates)):
1449        point = tuple(coordinates[i])
1450        if point_dict.has_key(point):
1451            unique = True
1452            same_point[i] = point
1453            #to change all point i references to point j
1454        else:
1455            point_dict[point] = i
1456            same_point[i] = point
1457
1458    coordinates = []
1459    i = 0
1460    for point in point_dict.keys():
1461        point = tuple(point)
1462        coordinates.append(list(point))
1463        point_dict[point] = i
1464        i += 1
1465
1466    for volume in volumes:
1467        for i in range(len(volume)):
1468            index = volume[i]
1469            if index > -1:
1470                volume[i] = point_dict[same_point[index]]
1471
1472    new_boundary = {}
1473    if not boundary is None:
1474        for segment in boundary.keys():
1475            point0 = point_dict[same_point[segment[0]]]
1476            point1 = point_dict[same_point[segment[1]]]
1477            label = boundary[segment]
1478            #FIXME should the bounday attributes be concaterated
1479            #('exterior, pond') or replaced ('pond')(peter row)
1480
1481            if new_boundary.has_key((point0, point1)):
1482                new_boundary[(point0,point1)] = new_boundary[(point0, point1)]
1483
1484            elif new_boundary.has_key((point1, point0)):
1485                new_boundary[(point1,point0)] = new_boundary[(point1, point0)]
1486            else: new_boundary[(point0, point1)] = label
1487
1488        boundary = new_boundary
1489
1490    return coordinates, volumes, boundary
1491
1492
1493
Note: See TracBrowser for help on using the repository browser.