source: anuga_core/source/anuga/abstract_2d_finite_volumes/domain.py @ 5847

Last change on this file since 5847 was 5847, checked in by steve, 16 years ago

Changed parallel_api so that global mesh only needs to
be constructed on processor 0

File size: 54.9 KB
RevLine 
[3804]1"""Class Domain - 2D triangular domains for finite-volume computations of
2   conservation laws.
3
4
5   Copyright 2004
[4200]6   Ole Nielsen, Stephen Roberts, Duncan Gray
[3804]7   Geoscience Australia
8"""
9
[4829]10from Numeric import allclose, argmax, zeros, Float
[3817]11from anuga.config import epsilon
[5162]12from anuga.config import beta_euler, beta_rk2
[3817]13
[3804]14from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
15from anuga.abstract_2d_finite_volumes.generic_boundary_conditions\
16     import Boundary
17from anuga.abstract_2d_finite_volumes.generic_boundary_conditions\
18     import File_boundary
19from anuga.abstract_2d_finite_volumes.generic_boundary_conditions\
20     import Dirichlet_boundary
21from anuga.abstract_2d_finite_volumes.generic_boundary_conditions\
22     import Time_boundary
23from anuga.abstract_2d_finite_volumes.generic_boundary_conditions\
24     import Transmissive_boundary
25
26from anuga.abstract_2d_finite_volumes.pmesh2domain import pmesh_to_domain
27from anuga.abstract_2d_finite_volumes.region\
28     import Set_region as region_set_region
29
[4704]30from anuga.utilities.polygon import inside_polygon
31from anuga.abstract_2d_finite_volumes.util import get_textual_float
32
[3804]33import types
[5421]34from time import time as walltime
[3804]35
[4712]36
37
[3804]38class Domain(Mesh):
39
40
41    def __init__(self,
42                 source=None,
43                 triangles=None,
44                 boundary=None,
45                 conserved_quantities=None,
46                 other_quantities=None,
47                 tagged_elements=None,
48                 geo_reference=None,
49                 use_inscribed_circle=False,
50                 mesh_filename=None,
51                 use_cache=False,
52                 verbose=False,
53                 full_send_dict=None,
54                 ghost_recv_dict=None,
55                 processor=0,
[3926]56                 numproc=1,
[3928]57                 number_of_full_nodes=None,
58                 number_of_full_triangles=None):
[3804]59
60
61        """Instantiate generic computational Domain.
62
63        Input:
64          source:    Either a mesh filename or coordinates of mesh vertices.
65                     If it is a filename values specified for triangles will
66                     be overridden.
67          triangles: Mesh connectivity (see mesh.py for more information)
68          boundary:  See mesh.py for more information
69
70          conserved_quantities: List of quantity names entering the
71                                conservation equations
72          other_quantities:     List of other quantity names
73
74          tagged_elements:
75          ...
76
77
78        """
79
80        # Determine whether source is a mesh filename or coordinates
81        if type(source) == types.StringType:
82            mesh_filename = source
83        else:
84            coordinates = source
85
86
87        # In case a filename has been specified, extract content
88        if mesh_filename is not None:
89            coordinates, triangles, boundary, vertex_quantity_dict, \
90                         tagged_elements, geo_reference = \
91                         pmesh_to_domain(file_name=mesh_filename,
92                                         use_cache=use_cache,
93                                         verbose=verbose)
94
95
96        # Initialise underlying mesh structure
[3928]97        Mesh.__init__(self, coordinates, triangles,
98                      boundary=boundary,
99                      tagged_elements=tagged_elements,
100                      geo_reference=geo_reference,
101                      use_inscribed_circle=use_inscribed_circle,
102                      number_of_full_nodes=number_of_full_nodes,
103                      number_of_full_triangles=number_of_full_triangles,
[3804]104                      verbose=verbose)
105
106        if verbose: print 'Initialising Domain'
107        from Numeric import zeros, Float, Int, ones
[4957]108        from quantity import Quantity
[3804]109
110        # List of quantity names entering
111        # the conservation equations
112        if conserved_quantities is None:
113            self.conserved_quantities = []
114        else:
115            self.conserved_quantities = conserved_quantities
116
117        # List of other quantity names
118        if other_quantities is None:
119            self.other_quantities = []
120        else:
121            self.other_quantities = other_quantities
122
123
124        #Build dictionary of Quantity instances keyed by quantity names
125        self.quantities = {}
126
127        #FIXME: remove later - maybe OK, though....
128        for name in self.conserved_quantities:
[4957]129            self.quantities[name] = Quantity(self)
[3804]130        for name in self.other_quantities:
131            self.quantities[name] = Quantity(self)
132
133        #Create an empty list for explicit forcing terms
134        self.forcing_terms = []
135
136        #Setup the ghost cell communication
137        if full_send_dict is None:
138            self.full_send_dict = {}
139        else:
140            self.full_send_dict  = full_send_dict
141
142        # List of other quantity names
[3926]143        if ghost_recv_dict is None:
144            self.ghost_recv_dict = {}
[3804]145        else:
[3926]146            self.ghost_recv_dict = ghost_recv_dict
[3804]147
148        self.processor = processor
[3926]149        self.numproc = numproc
[3804]150
[3926]151
[3804]152        # Setup Communication Buffers
153        if verbose: print 'Domain: Set up communication buffers (parallel)'
154        self.nsys = len(self.conserved_quantities)
155        for key in self.full_send_dict:
156            buffer_shape = self.full_send_dict[key][0].shape[0]
157            self.full_send_dict[key].append(zeros( (buffer_shape,self.nsys) ,Float))
158
159
160        for key in self.ghost_recv_dict:
161            buffer_shape = self.ghost_recv_dict[key][0].shape[0]
162            self.ghost_recv_dict[key].append(zeros( (buffer_shape,self.nsys) ,Float))
163
164
165        # Setup cell full flag
166        # =1 for full
167        # =0 for ghost
[3928]168        N = len(self) #number_of_elements
[5242]169        self.number_of_elements = N
[3804]170        self.tri_full_flag = ones(N, Int)
171        for i in self.ghost_recv_dict.keys():
172            for id in self.ghost_recv_dict[i][0]:
173                self.tri_full_flag[id] = 0
174
[3946]175        # Test the assumption that all full triangles are store before
176        # the ghost triangles.
[5242]177        if not allclose(self.tri_full_flag[:self.number_of_full_nodes],1):
178            print 'WARNING:  Not all full triangles are store before ghost triangles'
[3946]179                       
[3804]180
[4701]181        # Defaults
[5442]182        from anuga.config import max_smallsteps, beta_w, epsilon
[4677]183        from anuga.config import CFL
[4712]184        from anuga.config import timestepping_method
[4677]185        from anuga.config import protect_against_isolated_degenerate_timesteps
[3804]186        self.beta_w = beta_w
187        self.epsilon = epsilon
[4677]188        self.protect_against_isolated_degenerate_timesteps = protect_against_isolated_degenerate_timesteps
189       
[4712]190       
[5442]191        # Maybe get rid of order altogether and use beta_w
[5421]192        # FIXME (Ole): In any case, this should appear in the config file - not here
[3804]193        self.set_default_order(1)
194
195        self.smallsteps = 0
196        self.max_smallsteps = max_smallsteps
197        self.number_of_steps = 0
198        self.number_of_first_order_steps = 0
199        self.CFL = CFL
[4712]200        self.set_timestepping_method(timestepping_method)
[5847]201        self.set_beta(beta_w)
[3804]202        self.boundary_map = None  # Will be populated by set_boundary       
203       
204
[4701]205        # Model time
[3804]206        self.time = 0.0
207        self.finaltime = None
208        self.min_timestep = self.max_timestep = 0.0
[4771]209        self.starttime = 0 # Physical starttime if any
210                           # (0 is 1 Jan 1970 00:00:00)
[4713]211        self.timestep = 0.0
212        self.flux_timestep = 0.0
[3804]213
[5421]214        self.last_walltime = walltime()
215       
[4701]216        # Monitoring
217        self.quantities_to_be_monitored = None
218        self.monitor_polygon = None
219        self.monitor_time_interval = None               
[4704]220        self.monitor_indices = None
[3804]221
[4701]222        # Checkpointing and storage
[3804]223        from anuga.config import default_datadir
224        self.datadir = default_datadir
[3846]225        self.simulation_name = 'domain'
[3804]226        self.checkpoint = False
227
[4771]228        # To avoid calculating the flux across each edge twice,
229        # keep an integer (boolean) array, to be used during the flux
230        # calculation
231        N = len(self) # Number_of_triangles
[3804]232        self.already_computed_flux = zeros((N, 3), Int)
233
[4771]234        # Storage for maximal speeds computed for each triangle by
235        # compute_fluxes
[4829]236        # This is used for diagnostics only (reset at every yieldstep)
[4200]237        self.max_speed = zeros(N, Float)
238
[3804]239        if mesh_filename is not None:
240            # If the mesh file passed any quantity values
241            # , initialise with these values.
242            if verbose: print 'Domain: Initialising quantity values'
243            self.set_quantity_vertices_dict(vertex_quantity_dict)
244
245
246        if verbose: print 'Domain: Done'
247
248
249
250
251
252
[4771]253    #---------------------------   
254    # Public interface to Domain
255    #---------------------------       
[3804]256    def get_conserved_quantities(self, vol_id, vertex=None, edge=None):
257        """Get conserved quantities at volume vol_id
258
259        If vertex is specified use it as index for vertex values
260        If edge is specified use it as index for edge values
261        If neither are specified use centroid values
262        If both are specified an exeception is raised
263
264        Return value: Vector of length == number_of_conserved quantities
265
266        """
267
268        from Numeric import zeros, Float
269
270        if not (vertex is None or edge is None):
271            msg = 'Values for both vertex and edge was specified.'
272            msg += 'Only one (or none) is allowed.'
273            raise msg
274
275        q = zeros( len(self.conserved_quantities), Float)
276
277        for i, name in enumerate(self.conserved_quantities):
278            Q = self.quantities[name]
279            if vertex is not None:
280                q[i] = Q.vertex_values[vol_id, vertex]
281            elif edge is not None:
282                q[i] = Q.edge_values[vol_id, edge]
283            else:
284                q[i] = Q.centroid_values[vol_id]
285
286        return q
287
288    def set_time(self, time=0.0):
289        """Set the model time (seconds)"""
[5631]290        # FIXME: this is setting the relative time
291        # Note that get_time and set_time are now not symmetric
[3804]292
293        self.time = time
294
295    def get_time(self):
[5631]296        """Get the absolute model time (seconds)"""
[3804]297
[5631]298        return self.time + self.starttime
[3804]299
[5847]300    def set_beta(self,beta):
301        """Set default beta for limiting
302        """
303
304        self.beta = beta
305        for name in self.quantities:
306            #print 'setting beta for quantity ',name
307            Q = self.quantities[name]
308            Q.set_beta(beta)
309
310    def get_beta(self):
311        """Get default beta for limiting
312        """
313
314        return self.beta
315
[4713]316    def set_default_order(self, n):
317        """Set default (spatial) order to either 1 or 2
318        """
319
320        msg = 'Default order must be either 1 or 2. I got %s' %n
321        assert n in [1,2], msg
322
323        self.default_order = n
324        self._order_ = self.default_order
[5162]325
326        if self.default_order == 1:
[5306]327            pass
328            #self.set_timestepping_method('euler')
[5162]329            #self.set_all_limiters(beta_euler)
330
331        if self.default_order == 2:
[5306]332            pass
333            #self.set_timestepping_method('rk2')
[5162]334            #self.set_all_limiters(beta_rk2)
[4713]335       
336
[3804]337    def set_quantity_vertices_dict(self, quantity_dict):
338        """Set values for named quantities.
339        The index is the quantity
340
341        name: Name of quantity
342        X: Compatible list, Numeric array, const or function (see below)
343
344        The values will be stored in elements following their
345        internal ordering.
346
347        """
[4701]348       
349        # FIXME: Could we name this a bit more intuitively
350        # E.g. set_quantities_from_dictionary
[3804]351        for key in quantity_dict.keys():
352            self.set_quantity(key, quantity_dict[key], location='vertices')
353
354
355    def set_quantity(self, name, *args, **kwargs):
356        """Set values for named quantity
357
358
359        One keyword argument is documented here:
360        expression = None, # Arbitrary expression
361
362        expression:
363          Arbitrary expression involving quantity names
364
365        See Quantity.set_values for further documentation.
366        """
367
[4771]368        # Do the expression stuff
[3804]369        if kwargs.has_key('expression'):
370            expression = kwargs['expression']
371            del kwargs['expression']
372
373            Q = self.create_quantity_from_expression(expression)
374            kwargs['quantity'] = Q
375
[4771]376        # Assign values
[3804]377        self.quantities[name].set_values(*args, **kwargs)
378
379
[3963]380    def get_quantity_names(self):
381        """Get a list of all the quantity names that this domain is aware of.
382        Any value in the result should be a valid input to get_quantity.
383        """
384        return self.quantities.keys()
385
[3804]386    def get_quantity(self, name, location='vertices', indices = None):
[5729]387        """Get pointer to quantity object.
[3804]388
389        name: Name of quantity
390
391        See methods inside the quantity object for more options
392
393        FIXME: clean input args
394        """
395
396        return self.quantities[name] #.get_values( location, indices = indices)
397
398
399
400    def create_quantity_from_expression(self, expression):
401        """Create new quantity from other quantities using arbitrary expression
402
403        Combine existing quantities in domain using expression and return
404        result as a new quantity.
405
406        Note, the new quantity could e.g. be used in set_quantity
407
408        Valid expressions are limited to operators defined in class Quantity
409
[4020]410        Examples creating derived quantities:
[3804]411
[4020]412            Depth = domain.create_quantity_from_expression('stage-elevation')
[3804]413
[4897]414            exp = '(xmomentum*xmomentum + ymomentum*ymomentum)**0.5'       
[4020]415            Absolute_momentum = domain.create_quantity_from_expression(exp)
416
[3804]417        """
418
[3829]419        from anuga.abstract_2d_finite_volumes.util import\
420             apply_expression_to_dictionary
421       
[3804]422        return apply_expression_to_dictionary(expression, self.quantities)
423
424
425
426    def set_boundary(self, boundary_map):
427        """Associate boundary objects with tagged boundary segments.
428
429        Input boundary_map is a dictionary of boundary objects keyed
430        by symbolic tags to matched against tags in the internal dictionary
431        self.boundary.
432
433        As result one pointer to a boundary object is stored for each vertex
434        in the list self.boundary_objects.
435        More entries may point to the same boundary object
436
437        Schematically the mapping is from two dictionaries to one list
438        where the index is used as pointer to the boundary_values arrays
439        within each quantity.
440
441        self.boundary:          (vol_id, edge_id): tag
442        boundary_map (input):   tag: boundary_object
443        ----------------------------------------------
444        self.boundary_objects:  ((vol_id, edge_id), boundary_object)
445
446
447        Pre-condition:
448          self.boundary has been built.
449
450        Post-condition:
451          self.boundary_objects is built
452
453        If a tag from the domain doesn't appear in the input dictionary an
454        exception is raised.
455        However, if a tag is not used to the domain, no error is thrown.
456        FIXME: This would lead to implementation of a
457        default boundary condition
458
459        Note: If a segment is listed in the boundary dictionary and if it is
460        not None, it *will* become a boundary -
461        even if there is a neighbouring triangle.
462        This would be the case for internal boundaries
463
464        Boundary objects that are None will be skipped.
465
[3829]466        If a boundary_map has already been set
467        (i.e. set_boundary has been called before), the old boundary map
468        will be updated with new values. The new map need not define all
469        boundary tags, and can thus change only those that are needed.
470
[3804]471        FIXME: If set_boundary is called multiple times and if Boundary
472        object is changed into None, the neighbour structure will not be
473        restored!!!
474
475
476        """
477
[3829]478        if self.boundary_map is None:
479            # This the first call to set_boundary. Store
480            # map for later updates and for use with boundary_stats.
481            self.boundary_map = boundary_map
482        else:   
483            # This is a modification of an already existing map
484            # Update map an proceed normally
[3804]485
[3829]486            for key in boundary_map.keys():
487                self.boundary_map[key] = boundary_map[key]
488               
489       
[4771]490        # FIXME (Ole): Try to remove the sorting and fix test_mesh.py
[3804]491        x = self.boundary.keys()
492        x.sort()
493
[4771]494        # Loop through edges that lie on the boundary and associate them with
495        # callable boundary objects depending on their tags
[3829]496        self.boundary_objects = []       
[3804]497        for k, (vol_id, edge_id) in enumerate(x):
498            tag = self.boundary[ (vol_id, edge_id) ]
499
[3829]500            if self.boundary_map.has_key(tag):
[4771]501                B = self.boundary_map[tag]  # Get callable boundary object
[3804]502
503                if B is not None:
504                    self.boundary_objects.append( ((vol_id, edge_id), B) )
[3829]505                    self.neighbours[vol_id, edge_id] = \
506                                            -len(self.boundary_objects)
[3804]507                else:
508                    pass
509                    #FIXME: Check and perhaps fix neighbour structure
510
511
512            else:
513                msg = 'ERROR (domain.py): Tag "%s" has not been ' %tag
514                msg += 'bound to a boundary object.\n'
515                msg += 'All boundary tags defined in domain must appear '
[4679]516                msg += 'in set_boundary.\n'
[3804]517                msg += 'The tags are: %s' %self.get_boundary_tags()
518                raise msg
519
520
521    def set_region(self, *args, **kwargs):
522        """
523        This method is used to set quantities based on a regional tag.
524       
525        It is most often called with the following parameters;
526        (self, tag, quantity, X, location='vertices')
527        tag: the name of the regional tag used to specify the region
528        quantity: Name of quantity to change
529        X: const or function - how the quantity is changed
530        location: Where values are to be stored.
531            Permissible options are: vertices, centroid and unique vertices
532
533        A callable region class or a list of callable region classes
534        can also be passed into this function.
535        """
[4771]536
[3804]537        if len(args) == 1:
538            self._set_region(*args, **kwargs)
539        else:
[4771]540            # Assume it is arguments for the region.set_region function
[3804]541            func = region_set_region(*args, **kwargs)
542            self._set_region(func)
543           
544       
545    def _set_region(self, functions):
546        # The order of functions in the list is used.
547        if type(functions) not in [types.ListType,types.TupleType]:
548            functions = [functions]
549        for function in functions:
550            for tag in self.tagged_elements.keys():
551                function(tag, self.tagged_elements[tag], self)
552
553
[4701]554
555
556    def set_quantities_to_be_monitored(self, q,
557                                       polygon=None,
558                                       time_interval=None):
559        """Specify which quantities will be monitored for extrema.
560
561        q must be either:
[4735]562          - the name of a quantity or a derived quantity such as 'stage-elevation'
[4701]563          - a list of quantity names
564          - None
565
566        In the two first cases, the named quantities will be monitored at
567        each internal timestep
568       
569        If q is None, monitoring will be switched off altogether.
570
571        polygon (if specified) will restrict monitoring to triangles inside polygon.
572        If omitted all triangles will be included.
573
574        time_interval (if specified) will restrict monitoring to time steps in
575        that interval. If omitted all timesteps will be included.
576        """
577
[4702]578        from anuga.abstract_2d_finite_volumes.util import\
579             apply_expression_to_dictionary       
580
[4701]581        if q is None:
582            self.quantities_to_be_monitored = None
583            self.monitor_polygon = None
[4704]584            self.monitor_time_interval = None
585            self.monitor_indices = None           
[4701]586            return
587
588        if isinstance(q, basestring):
589            q = [q] # Turn argument into a list
590
[4702]591        # Check correcness and initialise
592        self.quantities_to_be_monitored = {}
[4701]593        for quantity_name in q:
594            msg = 'Quantity %s is not a valid conserved quantity'\
595                  %quantity_name
596           
597
[4702]598            if not quantity_name in self.quantities:
599                # See if this expression is valid
600                apply_expression_to_dictionary(quantity_name, self.quantities)
601
[4704]602            # Initialise extrema information
603            info_block = {'min': None,          # Min value
604                          'max': None,          # Max value
605                          'min_location': None, # Argmin (x, y)
606                          'max_location': None, # Argmax (x, y)
607                          'min_time': None,     # Argmin (t)
608                          'max_time': None}     # Argmax (t)
[4702]609           
[4704]610            self.quantities_to_be_monitored[quantity_name] = info_block
[4702]611
[4704]612           
613
[4701]614        if polygon is not None:
[4704]615            # Check input
616            if isinstance(polygon, basestring):
[4701]617
[4704]618                # Check if multiple quantities were accidentally
619                # given as separate argument rather than a list.
620                msg = 'Multiple quantities must be specified in a list. '
621                msg += 'Not as multiple arguments. '
622                msg += 'I got "%s" as a second argument' %polygon
623               
624                if polygon in self.quantities:
625                    raise Exception, msg
626               
627                try:
628                    apply_expression_to_dictionary(polygon, self.quantities)
629                except:
630                    # At least polygon wasn't an expression involving quantitites
631                    pass
632                else:
633                    raise Exception, msg
634
635                # In any case, we don't allow polygon to be a string
636                msg = 'argument "polygon" must not be a string: '
637                msg += 'I got polygon=\'%s\' ' %polygon
638                raise Exception, msg
639
640
641            # Get indices for centroids that are inside polygon
642            points = self.get_centroid_coordinates(absolute=True)
643            self.monitor_indices = inside_polygon(points, polygon)
644           
645
[4701]646        if time_interval is not None:
[4704]647            assert len(time_interval) == 2
[4701]648
[4702]649       
[4701]650        self.monitor_polygon = polygon
651        self.monitor_time_interval = time_interval       
652       
653
[4771]654    #--------------------------   
655    # Miscellaneous diagnostics
656    #--------------------------       
[3804]657    def check_integrity(self):
658        Mesh.check_integrity(self)
659
660        for quantity in self.conserved_quantities:
661            msg = 'Conserved quantities must be a subset of all quantities'
662            assert quantity in self.quantities, msg
663
664        ##assert hasattr(self, 'boundary_objects')
[4771]665           
[3804]666
[4204]667    def write_time(self, track_speeds=False):
668        print self.timestepping_statistics(track_speeds)
[3804]669
670
[4836]671    def timestepping_statistics(self,
672                                track_speeds=False,
673                                triangle_id=None):
[3804]674        """Return string with time stepping statistics for printing or logging
[4200]675
[4771]676        Optional boolean keyword track_speeds decides whether to report
677        location of smallest timestep as well as a histogram and percentile
[4836]678        report.
679
680        Optional keyword triangle_id can be used to specify a particular
681        triangle rather than the one with the largest speed.
[3804]682        """
683
[4204]684        from anuga.utilities.numerical_tools import histogram, create_bins
685       
686
[4827]687        # qwidth determines the text field used for quantities
688        qwidth = self.qwidth = 12
689
690
[3804]691        msg = ''
[5421]692        #if self.min_timestep == self.max_timestep:
693        #    msg += 'Time = %.4f, delta t = %.8f, steps=%d (%d)'\
694        #           %(self.time, self.min_timestep, self.number_of_steps,
695        #             self.number_of_first_order_steps)
696        #elif self.min_timestep > self.max_timestep:
697        #    msg += 'Time = %.4f, steps=%d (%d)'\
698        #           %(self.time, self.number_of_steps,
699        #             self.number_of_first_order_steps)
700        #else:
701        #    msg += 'Time = %.4f, delta t in [%.8f, %.8f], steps=%d (%d)'\
702        #           %(self.time, self.min_timestep,
703        #             self.max_timestep, self.number_of_steps,
704        #             self.number_of_first_order_steps)
[5631]705
706        model_time = self.get_time()
[3804]707        if self.min_timestep == self.max_timestep:
[5421]708            msg += 'Time = %.4f, delta t = %.8f, steps=%d'\
[5631]709                   %(model_time, self.min_timestep, self.number_of_steps)
[3804]710        elif self.min_timestep > self.max_timestep:
[5421]711            msg += 'Time = %.4f, steps=%d'\
[5631]712                   %(model_time, self.number_of_steps)
[3804]713        else:
[5421]714            msg += 'Time = %.4f, delta t in [%.8f, %.8f], steps=%d'\
[5631]715                   %(model_time, self.min_timestep,
[5421]716                     self.max_timestep, self.number_of_steps)
717                                         
718        msg += ' (%ds)' %(walltime() - self.last_walltime)   
719        self.last_walltime = walltime()           
720       
[4204]721        if track_speeds is True:
[4200]722            msg += '\n'
[4204]723
724
[4771]725            # Setup 10 bins for speed histogram
[4204]726            bins = create_bins(self.max_speed, 10)
727            hist = histogram(self.max_speed, bins)
728
729            msg += '------------------------------------------------\n'
[4771]730            msg += '  Speeds in [%f, %f]\n' %(min(self.max_speed),
731                                              max(self.max_speed))
[4204]732            msg += '  Histogram:\n'
733
734            hi = bins[0]
735            for i, count in enumerate(hist):
736                lo = hi
737                if i+1 < len(bins):
[4771]738                    # Open upper interval               
[4204]739                    hi = bins[i+1]
740                    msg += '    [%f, %f[: %d\n' %(lo, hi, count)               
741                else:
[4771]742                    # Closed upper interval
[4204]743                    hi = max(self.max_speed)
744                    msg += '    [%f, %f]: %d\n' %(lo, hi, count)
745
746
747            N = len(self.max_speed)
748            if N > 10:
749                msg += '  Percentiles (10%):\n'
750                speed = self.max_speed.tolist()
751                speed.sort()
752
753                k = 0
754                lower = min(speed)
755                for i, a in enumerate(speed):       
[4771]756                    if i % (N/10) == 0 and i != 0:
757                        # For every 10% of the sorted speeds
[4204]758                        msg += '    %d speeds in [%f, %f]\n' %(i-k, lower, a)
759                        lower = a
760                        k = i
761                       
762                msg += '    %d speeds in [%f, %f]\n'\
763                       %(N-k, lower, max(speed))                   
764               
765                     
766
767               
768           
[4200]769            # Find index of largest computed flux speed
[4836]770            if triangle_id is None:
771                k = self.k = argmax(self.max_speed)
772            else:
773                errmsg = 'Triangle_id %d does not exist in mesh: %s' %(triangle_id,
774                                                                    str(self))
775                assert 0 <= triangle_id < len(self), errmsg
776                k = self.k = triangle_id
777           
[4200]778
[4201]779            x, y = self.get_centroid_coordinates()[k]
[4702]780            radius = self.get_radii()[k]
781            area = self.get_areas()[k]     
782            max_speed = self.max_speed[k]           
[4200]783
[4376]784            msg += '  Triangle #%d with centroid (%.4f, %.4f), ' %(k, x, y)
[4702]785            msg += 'area = %.4f and radius = %.4f ' %(area, radius)
[4836]786            if triangle_id is None:
787                msg += 'had the largest computed speed: %.6f m/s ' %(max_speed)
788            else:
789                msg += 'had computed speed: %.6f m/s ' %(max_speed)
790               
[4702]791            if max_speed > 0.0:
[4376]792                msg += '(timestep=%.6f)\n' %(radius/max_speed)
793            else:
[4702]794                msg += '(timestep=%.6f)\n' %(0)     
[4201]795           
[4827]796            # Report all quantity values at vertices, edges and centroid
797            msg += '    Quantity'
[4835]798            msg += '------------\n'
[4201]799            for name in self.quantities:
800                q = self.quantities[name]
[4677]801
802                V = q.get_values(location='vertices', indices=[k])[0]
[4827]803                E = q.get_values(location='edges', indices=[k])[0]
[4677]804                C = q.get_values(location='centroids', indices=[k])                 
[4201]805               
[4827]806                s  = '    %s: vertex_values =  %.4f,\t %.4f,\t %.4f\n'\
807                     %(name.ljust(qwidth), V[0], V[1], V[2])
[4200]808
[4827]809                s += '    %s: edge_values =    %.4f,\t %.4f,\t %.4f\n'\
810                     %(name.ljust(qwidth), E[0], E[1], E[2])
811
812                s += '    %s: centroid_value = %.4f\n'\
813                     %(name.ljust(qwidth), C[0])                               
814
[4201]815                msg += s
[4200]816
[3804]817        return msg
818
819
820    def write_boundary_statistics(self, quantities = None, tags = None):
821        print self.boundary_statistics(quantities, tags)
822
823    def boundary_statistics(self, quantities = None, tags = None):
824        """Output statistics about boundary forcing at each timestep
825
826
827        Input:
[4771]828          quantities: either None, a string or a list of strings naming the
829                      quantities to be reported
830          tags:       either None, a string or a list of strings naming the
831                      tags to be reported
[3804]832
833
834        Example output:
835        Tag 'wall':
836            stage in [2, 5.5]
837            xmomentum in []
838            ymomentum in []
839        Tag 'ocean'
840
841
[4771]842        If quantities are specified only report on those. Otherwise take all
843        conserved quantities.
[3804]844        If tags are specified only report on those, otherwise take all tags.
845
846        """
847
[4704]848        # Input checks
[3804]849        import types, string
850
851        if quantities is None:
852            quantities = self.conserved_quantities
853        elif type(quantities) == types.StringType:
854            quantities = [quantities] #Turn it into a list
855
856        msg = 'Keyword argument quantities must be either None, '
857        msg += 'string or list. I got %s' %str(quantities)
858        assert type(quantities) == types.ListType, msg
859
860
861        if tags is None:
862            tags = self.get_boundary_tags()
863        elif type(tags) == types.StringType:
864            tags = [tags] #Turn it into a list
865
866        msg = 'Keyword argument tags must be either None, '
867        msg += 'string or list. I got %s' %str(tags)
868        assert type(tags) == types.ListType, msg
869
[4704]870        # Determine width of longest quantity name (for cosmetic purposes)
[3804]871        maxwidth = 0
872        for name in quantities:
873            w = len(name)
874            if w > maxwidth:
875                maxwidth = w
876
[4771]877        # Output statistics
[3804]878        msg = 'Boundary values at time %.4f:\n' %self.time
879        for tag in tags:
880            msg += '    %s:\n' %tag
881
882            for name in quantities:
883                q = self.quantities[name]
884
[4704]885                # Find range of boundary values for tag and q
[3804]886                maxval = minval = None
887                for i, ((vol_id, edge_id), B) in\
888                        enumerate(self.boundary_objects):
889                    if self.boundary[(vol_id, edge_id)] == tag:
890                        v = q.boundary_values[i]
891                        if minval is None or v < minval: minval = v
892                        if maxval is None or v > maxval: maxval = v
893
894                if minval is None or maxval is None:
895                    msg += '        Sorry no information available about' +\
896                           ' tag %s and quantity %s\n' %(tag, name)
897                else:
898                    msg += '        %s in [%12.8f, %12.8f]\n'\
899                           %(string.ljust(name, maxwidth), minval, maxval)
900
901
902        return msg
903
904
[4704]905    def update_extrema(self):
906        """Update extrema if requested by set_quantities_to_be_monitored.
907        This data is used for reporting e.g. by running
908        print domain.quantity_statistics()
909        and may also stored in output files (see data_manager in shallow_water)
910        """
[4701]911
[4711]912        # Define a tolerance for extremum computations
913        epsilon = 1.0e-6 # Import 'single_precision' from config
914       
[4704]915        if self.quantities_to_be_monitored is None:
916            return
917
918        # Observe time interval restriction if any
919        if self.monitor_time_interval is not None and\
920               (self.time < self.monitor_time_interval[0] or\
921               self.time > self.monitor_time_interval[1]):
922            return
[4711]923
[4704]924        # Update extrema for each specified quantity subject to
925        # polygon restriction (via monitor_indices).
926        for quantity_name in self.quantities_to_be_monitored:
927
928            if quantity_name in self.quantities:
929                Q = self.get_quantity(quantity_name)
930            else:
931                Q = self.create_quantity_from_expression(quantity_name)
932
933            info_block = self.quantities_to_be_monitored[quantity_name]
934
[4711]935            # Update maximum
[4771]936            # (n > None is always True, but we check explicitly because
937            # of the epsilon)
[4704]938            maxval = Q.get_maximum_value(self.monitor_indices)
[4711]939            if info_block['max'] is None or\
940                   maxval > info_block['max'] + epsilon:
[4704]941                info_block['max'] = maxval
942                maxloc = Q.get_maximum_location()
943                info_block['max_location'] = maxloc
944                info_block['max_time'] = self.time
945
946
[4711]947            # Update minimum
[4704]948            minval = Q.get_minimum_value(self.monitor_indices)
949            if info_block['min'] is None or\
[4711]950                   minval < info_block['min'] - epsilon:
[4704]951                info_block['min'] = minval               
952                minloc = Q.get_minimum_location()
953                info_block['min_location'] = minloc
954                info_block['min_time'] = self.time               
955       
956
957
958    def quantity_statistics(self, precision = '%.4f'):
[4771]959        """Return string with statistics about quantities for
960        printing or logging
[4701]961
962        Quantities reported are specified through method
963
964           set_quantities_to_be_monitored
965           
966        """
967
[4704]968        maxlen = 128 # Max length of polygon string representation
[4701]969
[4771]970        # Output statistics
[4704]971        msg = 'Monitored quantities at time %.4f:\n' %self.time
972        if self.monitor_polygon is not None:
973            p_str = str(self.monitor_polygon)
974            msg += '- Restricted by polygon: %s' %p_str[:maxlen]
975            if len(p_str) >= maxlen:
976                msg += '...\n'
977            else:
978                msg += '\n'
[4701]979
980
[4704]981        if self.monitor_time_interval is not None:
[4771]982            msg += '- Restricted by time interval: %s\n'\
983                   %str(self.monitor_time_interval)
[4704]984            time_interval_start = self.monitor_time_interval[0]
985        else:
986            time_interval_start = 0.0
[4701]987
[4704]988           
989        for quantity_name, info in self.quantities_to_be_monitored.items():
990            msg += '    %s:\n' %quantity_name
991
992            msg += '      values since time = %.2f in [%s, %s]\n'\
993                   %(time_interval_start,
994                     get_textual_float(info['min'], precision),
[4771]995                     get_textual_float(info['max'], precision))       
[4704]996                     
997            msg += '      minimum attained at time = %s, location = %s\n'\
998                   %(get_textual_float(info['min_time'], precision),
[4771]999                     get_textual_float(info['min_location'], precision))
1000           
[4704]1001
1002            msg += '      maximum attained at time = %s, location = %s\n'\
1003                   %(get_textual_float(info['max_time'], precision),
[4771]1004                     get_textual_float(info['max_location'], precision))
[4704]1005
1006
1007        return msg
1008
[4712]1009    def get_timestepping_method(self):
1010        return self.timestepping_method
[4704]1011
[4712]1012    def set_timestepping_method(self,timestepping_method):
1013       
[4713]1014        if timestepping_method in ['euler', 'rk2', 'rk3']:
[4712]1015            self.timestepping_method = timestepping_method
1016            return
1017
1018        msg = '%s is an incorrect timestepping type'% timestepping_method
1019        raise Exception, msg
1020
[3804]1021    def get_name(self):
[3846]1022        return self.simulation_name
[3804]1023
1024    def set_name(self, name):
[3846]1025        """Assign a name to this simulation.
1026        This will be used to identify the output sww file.
[3804]1027
[3846]1028        """
1029        if name.endswith('.sww'):
[3850]1030            name = name[:-4]
[3846]1031           
1032        self.simulation_name = name
1033
[3804]1034    def get_datadir(self):
1035        return self.datadir
1036
1037    def set_datadir(self, name):
1038        self.datadir = name
1039
[4712]1040    def get_starttime(self):
1041        return self.starttime
[3804]1042
[4712]1043    def set_starttime(self, time):
1044        self.starttime = float(time)       
[3804]1045
[4712]1046
1047
[4771]1048    #--------------------------
1049    # Main components of evolve
1050    #--------------------------   
[3804]1051
1052    def evolve(self,
1053               yieldstep = None,
1054               finaltime = None,
1055               duration = None,
1056               skip_initial_step = False):
1057        """Evolve model through time starting from self.starttime.
1058
1059
1060        yieldstep: Interval between yields where results are stored,
1061                   statistics written and domain inspected or
1062                   possibly modified. If omitted the internal predefined
1063                   max timestep is used.
1064                   Internally, smaller timesteps may be taken.
1065
1066        duration: Duration of simulation
1067
[4565]1068        finaltime: Time where simulation should end. This is currently
1069        relative time.  So it's the same as duration.
[3804]1070
1071        If both duration and finaltime are given an exception is thrown.
1072
1073
1074        skip_initial_step: Boolean flag that decides whether the first
1075        yield step is skipped or not. This is useful for example to avoid
1076        duplicate steps when multiple evolve processes are dove tailed.
1077
1078
1079        Evolve is implemented as a generator and is to be called as such, e.g.
1080
1081        for t in domain.evolve(yieldstep, finaltime):
1082            <Do something with domain and t>
1083
1084
1085        All times are given in seconds
1086
1087        """
1088
1089        from anuga.config import min_timestep, max_timestep, epsilon
1090
[4704]1091        # FIXME: Maybe lump into a larger check prior to evolving
[3817]1092        msg = 'Boundary tags must be bound to boundary objects before '
1093        msg += 'evolving system, '
[3804]1094        msg += 'e.g. using the method set_boundary.\n'
1095        msg += 'This system has the boundary tags %s '\
1096               %self.get_boundary_tags()
1097        assert hasattr(self, 'boundary_objects'), msg
1098
1099
1100        if yieldstep is None:
1101            yieldstep = max_timestep
1102        else:
1103            yieldstep = float(yieldstep)
1104
1105        self._order_ = self.default_order
1106
1107
1108        if finaltime is not None and duration is not None:
[4704]1109            # print 'F', finaltime, duration
[3804]1110            msg = 'Only one of finaltime and duration may be specified'
1111            raise msg
1112        else:
1113            if finaltime is not None:
1114                self.finaltime = float(finaltime)
1115            if duration is not None:
1116                self.finaltime = self.starttime + float(duration)
1117
1118
1119
[4829]1120        N = len(self) # Number of triangles
[4704]1121        self.yieldtime = 0.0 # Track time between 'yields'
[3804]1122
[4704]1123        # Initialise interval of timestep sizes (for reporting only)
[3804]1124        self.min_timestep = max_timestep
1125        self.max_timestep = min_timestep
1126        self.number_of_steps = 0
1127        self.number_of_first_order_steps = 0
1128
[4829]1129
[4704]1130        # Update ghosts
[3804]1131        self.update_ghosts()
1132
[4704]1133        # Initial update of vertex and edge values
[3804]1134        self.distribute_to_vertices_and_edges()
1135
[4704]1136        # Update extrema if necessary (for reporting)
1137        self.update_extrema()
1138       
1139        # Initial update boundary values
[3804]1140        self.update_boundary()
1141
[4704]1142        # Or maybe restore from latest checkpoint
[3804]1143        if self.checkpoint is True:
1144            self.goto_latest_checkpoint()
1145
1146        if skip_initial_step is False:
[4704]1147            yield(self.time)  # Yield initial values
[3804]1148
1149        while True:
1150
[4712]1151            # Evolve One Step, using appropriate timestepping method
1152            if self.get_timestepping_method() == 'euler':
1153                self.evolve_one_euler_step(yieldstep,finaltime)
1154               
1155            elif self.get_timestepping_method() == 'rk2':
[4713]1156                self.evolve_one_rk2_step(yieldstep,finaltime)
[3804]1157
[4713]1158            elif self.get_timestepping_method() == 'rk3':
1159                self.evolve_one_rk3_step(yieldstep,finaltime)               
1160           
[4704]1161            # Update extrema if necessary (for reporting)
1162            self.update_extrema()           
1163
[3804]1164
1165            self.yieldtime += self.timestep
1166            self.number_of_steps += 1
1167            if self._order_ == 1:
1168                self.number_of_first_order_steps += 1
1169
[4704]1170            # Yield results
[3804]1171            if finaltime is not None and self.time >= finaltime-epsilon:
1172
1173                if self.time > finaltime:
[4704]1174                    # FIXME (Ole, 30 April 2006): Do we need this check?
[4771]1175                    # Probably not (Ole, 18 September 2008). Now changed to
1176                    # Exception
1177                    msg = 'WARNING (domain.py): time overshot finaltime. '
1178                    msg += 'Contact Ole.Nielsen@ga.gov.au'
[4735]1179                    raise Exception, msg
1180                   
[3804]1181
1182                # Yield final time and stop
1183                self.time = finaltime
1184                yield(self.time)
1185                break
1186
1187
1188            if self.yieldtime >= yieldstep:
1189                # Yield (intermediate) time and allow inspection of domain
1190
1191                if self.checkpoint is True:
1192                    self.store_checkpoint()
1193                    self.delete_old_checkpoints()
1194
[3817]1195                # Pass control on to outer loop for more specific actions
[3804]1196                yield(self.time)
1197
1198                # Reinitialise
1199                self.yieldtime = 0.0
1200                self.min_timestep = max_timestep
1201                self.max_timestep = min_timestep
1202                self.number_of_steps = 0
1203                self.number_of_first_order_steps = 0
[4829]1204                self.max_speed = zeros(N, Float)
[3804]1205
[4712]1206    def evolve_one_euler_step(self, yieldstep, finaltime):
[4721]1207        """
1208        One Euler Time Step
1209        Q^{n+1} = E(h) Q^n
1210        """
[4712]1211
[5666]1212        # Compute fluxes across each element edge
[4712]1213        self.compute_fluxes()
1214
[5666]1215        # Update timestep to fit yieldstep and finaltime
[4712]1216        self.update_timestep(yieldstep, finaltime)
1217
[5666]1218        # Update conserved quantities
[4712]1219        self.update_conserved_quantities()
1220
[5666]1221        # Update ghosts
[4712]1222        self.update_ghosts()
1223
[5847]1224
1225        # Update time
1226        self.time += self.timestep
1227
[5666]1228        # Update vertex and edge values
[4712]1229        self.distribute_to_vertices_and_edges()
1230
[5666]1231        # Update boundary values
[4712]1232        self.update_boundary()
1233
1234
1235       
1236
1237
[4713]1238    def evolve_one_rk2_step(self, yieldstep, finaltime):
[4721]1239        """
1240        One 2nd order RK timestep
1241        Q^{n+1} = 0.5 Q^n + 0.5 E(h)^2 Q^n
1242        """
[4712]1243
[5666]1244        # Save initial initial conserved quantities values
[4712]1245        self.backup_conserved_quantities()           
1246
1247        #--------------------------------------
[5666]1248        # First euler step
[4712]1249        #--------------------------------------
1250
[5666]1251        # Compute fluxes across each element edge
[4712]1252        self.compute_fluxes()
1253
[5666]1254        # Update timestep to fit yieldstep and finaltime
[4712]1255        self.update_timestep(yieldstep, finaltime)
1256
[5666]1257        # Update conserved quantities
[4712]1258        self.update_conserved_quantities()
1259
[5666]1260        # Update ghosts
[4712]1261        self.update_ghosts()
1262
[5847]1263        # Update time
1264        self.time += self.timestep
1265
[5666]1266        # Update vertex and edge values
[4712]1267        self.distribute_to_vertices_and_edges()
1268
[5666]1269        # Update boundary values
[4712]1270        self.update_boundary()
1271
1272        #------------------------------------
[5666]1273        # Second Euler step
[4712]1274        #------------------------------------
1275           
[5666]1276        # Compute fluxes across each element edge
[4712]1277        self.compute_fluxes()
1278
[5666]1279        # Update conserved quantities
[4712]1280        self.update_conserved_quantities()
1281
1282        #------------------------------------
[5666]1283        # Combine initial and final values
1284        # of conserved quantities and cleanup
[4712]1285        #------------------------------------
[5666]1286       
1287        # Combine steps
[4712]1288        self.saxpy_conserved_quantities(0.5, 0.5)
[4713]1289 
[5666]1290        # Update ghosts
[4713]1291        self.update_ghosts()
[4712]1292
[5666]1293        # Update vertex and edge values
[4713]1294        self.distribute_to_vertices_and_edges()
1295
[5666]1296        # Update boundary values
[4713]1297        self.update_boundary()
1298
1299
1300
1301    def evolve_one_rk3_step(self, yieldstep, finaltime):
[4721]1302        """
1303        One 3rd order RK timestep
1304        Q^(1) = 3/4 Q^n + 1/4 E(h)^2 Q^n  (at time t^n + h/2)
1305        Q^{n+1} = 1/3 Q^n + 2/3 E(h) Q^(1) (at time t^{n+1})
1306        """
[4713]1307
[5666]1308        # Save initial initial conserved quantities values
[4713]1309        self.backup_conserved_quantities()           
1310
1311        initial_time = self.time
1312       
1313        #--------------------------------------
[5666]1314        # First euler step
[4713]1315        #--------------------------------------
1316
[5666]1317        # Compute fluxes across each element edge
[4713]1318        self.compute_fluxes()
1319
[5666]1320        # Update timestep to fit yieldstep and finaltime
[4713]1321        self.update_timestep(yieldstep, finaltime)
1322
[5666]1323        # Update conserved quantities
[4713]1324        self.update_conserved_quantities()
1325
[5666]1326        # Update ghosts
[4713]1327        self.update_ghosts()
1328
[5847]1329        # Update time
1330        self.time += self.timestep
1331
[5666]1332        # Update vertex and edge values
[4713]1333        self.distribute_to_vertices_and_edges()
1334
[5666]1335        # Update boundary values
[4713]1336        self.update_boundary()
1337
1338
[4712]1339        #------------------------------------
[5666]1340        # Second Euler step
[4712]1341        #------------------------------------
1342           
[5666]1343        # Compute fluxes across each element edge
[4713]1344        self.compute_fluxes()
1345
[5666]1346        # Update conserved quantities
[4713]1347        self.update_conserved_quantities()
1348
1349        #------------------------------------
[4721]1350        #Combine steps to obtain intermediate
1351        #solution at time t^n + 0.5 h
[4713]1352        #------------------------------------
[4721]1353
[5666]1354        # Combine steps
[4713]1355        self.saxpy_conserved_quantities(0.25, 0.75)
1356 
[5666]1357        # Update ghosts
[4712]1358        self.update_ghosts()
1359
[5847]1360
1361        # Set substep time
1362        self.time = initial_time + self.timestep*0.5
1363
[5666]1364        # Update vertex and edge values
[4712]1365        self.distribute_to_vertices_and_edges()
1366
[5666]1367        # Update boundary values
[4712]1368        self.update_boundary()
1369
1370
[4713]1371        #------------------------------------
[5666]1372        # Third Euler step
[4713]1373        #------------------------------------
1374           
[5666]1375        # Compute fluxes across each element edge
[4713]1376        self.compute_fluxes()
1377
[5666]1378        # Update conserved quantities
[4713]1379        self.update_conserved_quantities()
1380
1381        #------------------------------------
[5666]1382        # Combine final and initial values
1383        # and cleanup
[4713]1384        #------------------------------------
[5666]1385       
1386        # Combine steps
[4713]1387        self.saxpy_conserved_quantities(2.0/3.0, 1.0/3.0)
1388 
[5666]1389        # Update ghosts
[4713]1390        self.update_ghosts()
1391
[5847]1392        # Set new time
1393        self.time = initial_time + self.timestep       
1394
[5666]1395        # Update vertex and edge values
[4713]1396        self.distribute_to_vertices_and_edges()
1397
[5666]1398        # Update boundary values
[4713]1399        self.update_boundary()
1400
1401       
[3804]1402    def evolve_to_end(self, finaltime = 1.0):
[5847]1403        """Iterate evolve all the way to the end      """
[3804]1404
1405        for _ in self.evolve(yieldstep=None, finaltime=finaltime):
1406            pass
1407
1408
[4712]1409    def backup_conserved_quantities(self):
[4771]1410        N = len(self) # Number_of_triangles
[3804]1411
[4771]1412        # Backup conserved_quantities centroid values
[4712]1413        for name in self.conserved_quantities:
1414            Q = self.quantities[name]
1415            Q.backup_centroid_values()       
1416
1417    def saxpy_conserved_quantities(self,a,b):
1418        N = len(self) #number_of_triangles
1419
[4771]1420        # Backup conserved_quantities centroid values
[4712]1421        for name in self.conserved_quantities:
1422            Q = self.quantities[name]
1423            Q.saxpy_centroid_values(a,b)       
1424   
1425
[3804]1426    def update_boundary(self):
1427        """Go through list of boundary objects and update boundary values
1428        for all conserved quantities on boundary.
[4702]1429        It is assumed that the ordering of conserved quantities is
1430        consistent between the domain and the boundary object, i.e.
1431        the jth element of vector q must correspond to the jth conserved
1432        quantity in domain.
[3804]1433        """
1434
[4771]1435        # FIXME: Update only those that change (if that can be worked out)
1436        # FIXME: Boundary objects should not include ghost nodes.
[3804]1437        for i, ((vol_id, edge_id), B) in enumerate(self.boundary_objects):
[4702]1438            if B is None:
1439                print 'WARNING: Ignored boundary segment %d (None)'
1440            else:
[3804]1441                q = B.evaluate(vol_id, edge_id)
1442
1443                for j, name in enumerate(self.conserved_quantities):
1444                    Q = self.quantities[name]
1445                    Q.boundary_values[i] = q[j]
1446
1447
1448    def compute_fluxes(self):
1449        msg = 'Method compute_fluxes must be overridden by Domain subclass'
1450        raise msg
1451
1452
1453    def update_timestep(self, yieldstep, finaltime):
1454
1455        from anuga.config import min_timestep, max_timestep
1456
[4677]1457       
1458       
1459        # Protect against degenerate timesteps arising from isolated
1460        # triangles
[5242]1461        # FIXME (Steve): This should be in shallow_water as it assumes x and y
1462        # momentum
[4677]1463        if self.protect_against_isolated_degenerate_timesteps is True and\
[4805]1464               self.max_speed > 10.0: # FIXME (Ole): Make this configurable
[4677]1465
1466            # Setup 10 bins for speed histogram
1467            from anuga.utilities.numerical_tools import histogram, create_bins
1468       
1469            bins = create_bins(self.max_speed, 10)
1470            hist = histogram(self.max_speed, bins)
1471
1472            # Look for characteristic signature
1473            if len(hist) > 1 and\
1474                hist[-1] > 0 and\
1475                hist[4] == hist[5] == hist[6] == hist[7] == hist[8] == 0:
1476                    # Danger of isolated degenerate triangles
1477                    # print self.timestepping_statistics(track_speeds=True) 
1478                   
1479                    # Find triangles in last bin
1480                    # FIXME - speed up using Numeric
1481                    d = 0
1482                    for i in range(self.number_of_full_triangles):
1483                        if self.max_speed[i] > bins[-1]:
[4771]1484                            msg = 'Time=%f: Ignoring isolated high ' %self.time
1485                            msg += 'speed triangle ' 
[4677]1486                            msg += '#%d of %d with max speed=%f'\
[4771]1487                                  %(i, self.number_of_full_triangles,
1488                                    self.max_speed[i])
[4677]1489                   
[4771]1490                            # print 'Found offending triangle', i,
1491                            # self.max_speed[i]
[4677]1492                            self.get_quantity('xmomentum').set_values(0.0, indices=[i])
1493                            self.get_quantity('ymomentum').set_values(0.0, indices=[i])
1494                            self.max_speed[i]=0.0
1495                            d += 1
1496                   
1497                    #print 'Adjusted %d triangles' %d       
1498                    #print self.timestepping_statistics(track_speeds=True)     
1499               
1500
1501                       
[3804]1502        # self.timestep is calculated from speed of characteristics
1503        # Apply CFL condition here
[4713]1504        timestep = min(self.CFL*self.flux_timestep, max_timestep)
[3804]1505
[4771]1506        # Record maximal and minimal values of timestep for reporting
[3804]1507        self.max_timestep = max(timestep, self.max_timestep)
1508        self.min_timestep = min(timestep, self.min_timestep)
1509
[4677]1510           
1511 
[4771]1512        # Protect against degenerate time steps
[3804]1513        if timestep < min_timestep:
1514
[4771]1515            # Number of consecutive small steps taken b4 taking action
[3804]1516            self.smallsteps += 1
1517
1518            if self.smallsteps > self.max_smallsteps:
[4771]1519                self.smallsteps = 0 # Reset
[3804]1520
1521                if self._order_ == 1:
1522                    msg = 'WARNING: Too small timestep %.16f reached '\
1523                          %timestep
1524                    msg += 'even after %d steps of 1 order scheme'\
1525                           %self.max_smallsteps
1526                    print msg
[4771]1527                    timestep = min_timestep  # Try enforcing min_step
[3804]1528
[5080]1529                    print self.timestepping_statistics(track_speeds=True)
[4437]1530
[4677]1531                    raise Exception, msg
[3804]1532                else:
[4771]1533                    # Try to overcome situation by switching to 1 order
[3804]1534                    self._order_ = 1
1535
1536        else:
1537            self.smallsteps = 0
1538            if self._order_ == 1 and self.default_order == 2:
1539                self._order_ = 2
1540
1541
[4771]1542        # Ensure that final time is not exceeded
[3804]1543        if finaltime is not None and self.time + timestep > finaltime :
1544            timestep = finaltime-self.time
1545
[4771]1546        # Ensure that model time is aligned with yieldsteps
[3804]1547        if self.yieldtime + timestep > yieldstep:
1548            timestep = yieldstep-self.yieldtime
1549
1550        self.timestep = timestep
1551
1552
1553
1554    def compute_forcing_terms(self):
1555        """If there are any forcing functions driving the system
1556        they should be defined in Domain subclass and appended to
1557        the list self.forcing_terms
1558        """
1559
1560        for f in self.forcing_terms:
1561            f(self)
1562
1563
1564
1565    def update_conserved_quantities(self):
1566        """Update vectors of conserved quantities using previously
1567        computed fluxes specified forcing functions.
1568        """
1569
1570        from Numeric import ones, sum, equal, Float
1571
[4771]1572        N = len(self) # Number_of_triangles
[3804]1573        d = len(self.conserved_quantities)
1574
1575        timestep = self.timestep
1576
[4771]1577        # Compute forcing terms
[3804]1578        self.compute_forcing_terms()
1579
[4771]1580        # Update conserved_quantities
[3804]1581        for name in self.conserved_quantities:
1582            Q = self.quantities[name]
1583            Q.update(timestep)
1584
[4771]1585            # Note that Q.explicit_update is reset by compute_fluxes
1586            # Where is Q.semi_implicit_update reset?
1587           
[3804]1588
1589    def update_ghosts(self):
1590        pass
1591
1592    def distribute_to_vertices_and_edges(self):
1593        """Extrapolate conserved quantities from centroid to
1594        vertices and edge-midpoints for each volume
1595
1596        Default implementation is straight first order,
1597        i.e. constant values throughout each element and
1598        no reference to non-conserved quantities.
1599        """
1600
1601        for name in self.conserved_quantities:
1602            Q = self.quantities[name]
1603            if self._order_ == 1:
1604                Q.extrapolate_first_order()
1605            elif self._order_ == 2:
1606                Q.extrapolate_second_order()
[5162]1607                #Q.limit()
[3804]1608            else:
1609                raise 'Unknown order'
[5162]1610            #Q.interpolate_from_vertices_to_edges()
[3804]1611
1612
1613    def centroid_norm(self, quantity, normfunc):
1614        """Calculate the norm of the centroid values
1615        of a specific quantity, using normfunc.
1616
1617        normfunc should take a list to a float.
1618
1619        common normfuncs are provided in the module utilities.norms
1620        """
1621        return normfunc(self.quantities[quantity].centroid_values)
1622
1623
1624
[4771]1625#------------------
1626# Initialise module
1627#------------------
[3804]1628
[4771]1629# Optimisation with psyco
[3804]1630from anuga.config import use_psyco
1631if use_psyco:
1632    try:
1633        import psyco
1634    except:
1635        import os
1636        if os.name == 'posix' and os.uname()[4] == 'x86_64':
1637            pass
[4771]1638            # Psyco isn't supported on 64 bit systems, but it doesn't matter
[3804]1639        else:
1640            msg = 'WARNING: psyco (speedup) could not import'+\
1641                  ', you may want to consider installing it'
1642            print msg
1643    else:
1644        psyco.bind(Domain.update_boundary)
[4771]1645        #psyco.bind(Domain.update_timestep) # Not worth it
[3804]1646        psyco.bind(Domain.update_conserved_quantities)
1647        psyco.bind(Domain.distribute_to_vertices_and_edges)
1648
1649
1650if __name__ == "__main__":
1651    pass
Note: See TracBrowser for help on using the repository browser.