source: trunk/anuga_work/development/2010-projects/anuga_1d/avalanche-sudi/domain.py @ 8427

Last change on this file since 8427 was 8427, checked in by davies, 13 years ago

Adding the trapezoidal channel validation test, and editing the ANUGA manual

File size: 69.2 KB
Line 
1"""Class Domain - 1D domains for finite-volume computations of
2   the shallow water wave equation
3
4
5   Copyright 2004
6   Ole Nielsen, Stephen Roberts, Duncan Gray, Christopher Zoppou
7   Geoscience Australia
8
9   Sudi Mungkasi, ANU 2010
10"""
11
12from generic_boundary_conditions import *
13
14
15class Domain:
16
17    def __init__(self,
18                 coordinates,
19                 boundary = None,
20                 conserved_quantities = None,
21                 evolved_quantities = None,
22                 other_quantities = None,
23                 tagged_elements = None):
24        """
25        Build 1D elements from x coordinates
26        """
27
28        from Numeric import array, zeros, Float, Int       
29
30        from config import timestepping_method
31        from config import CFL
32       
33        #Store Points
34        self.coordinates = array(coordinates)
35
36
37        #Register number of Elements
38        self.number_of_elements = N = len(self.coordinates)-1
39
40        self.beta = 1.0
41        self.set_limiter("minmod_kurganov")
42        self.set_CFL(CFL)
43        self.set_timestepping_method(timestepping_method)
44
45        self.wet_nodes = zeros((N,2), Int) # should this be here
46
47        #Allocate space for neighbour and boundary structures
48        self.neighbours = zeros((N, 2), Int)
49        self.neighbour_vertices = zeros((N, 2), Int)
50        self.number_of_boundaries = zeros(N, Int)
51        self.surrogate_neighbours = zeros((N, 2), Int)
52       
53        #Allocate space for geometric quantities
54        self.vertices  = zeros((N, 2), Float)
55        self.centroids = zeros(N, Float)
56        self.areas     = zeros(N, Float)
57        self.max_speed_array = zeros(N, Float)
58        self.normals = zeros((N, 2), Float)
59       
60        for i in range(N):
61            xl = self.coordinates[i]
62            xr = self.coordinates[i+1]
63            self.vertices[i,0] = xl
64            self.vertices[i,1] = xr
65
66            centroid = (xl+xr)/2.0
67            self.centroids[i] = centroid
68
69            msg = 'Coordinates should be ordered, smallest to largest'
70            assert xr>xl, msg
71           
72            #The normal vectors
73            # - point outward from each edge
74            # - are orthogonal to the edge
75            # - have unit length
76            # - Are enumerated by left vertex then right vertex normals
77
78            nl = -1.0
79            nr =  1.0
80            self.normals[i,:] = [nl, nr]
81            self.areas[i] = (xr-xl)
82     
83            #Initialise Neighbours (-1 means that it is a boundary neighbour)
84            self.neighbours[i, :] = [-1, -1]
85            #Initialise vertex ids of neighbours
86            #In case of boundaries this slot is not used
87            self.neighbour_vertices[i, :] = [-1, -1]
88
89        self.build_vertexlist()
90
91        #Build neighbour structure
92        self.build_neighbour_structure()
93
94        #Build surrogate neighbour structure
95        self.build_surrogate_neighbour_structure()         
96
97        #Build boundary dictionary mapping (id, vertex) to symbolic tags
98        self.build_boundary_dictionary(boundary)
99
100        #Build tagged element  dictionary mapping (tag) to array of elements
101        self.build_tagged_elements_dictionary(tagged_elements)
102       
103        from quantity import Quantity, Conserved_quantity
104       
105        #List of quantity names entering the conservation equations
106        #(Must be a subset of quantities)
107        if conserved_quantities is None:
108            self.conserved_quantities = []
109        else:
110            self.conserved_quantities = conserved_quantities
111
112        if evolved_quantities is None:
113            self.evolved_quantities = self.conserved_quantities
114        else:
115            self.evolved_quantities = evolved_quantities
116
117        if other_quantities is None:
118            self.other_quantities = []
119        else:
120            self.other_quantities = other_quantities
121
122
123        #Build dictionary of Quantity instances keyed by quantity names
124        self.quantities = {}
125
126        #FIXME: remove later - maybe OK, though....
127        for name in self.evolved_quantities:
128            self.quantities[name] = Quantity(self)
129        for name in self.other_quantities:
130            self.quantities[name] = Quantity(self)
131
132        #Create an empty list for explicit forcing terms
133        self.forcing_terms = []
134
135        #Defaults
136        from config import max_smallsteps, beta_w, beta_h, epsilon, CFL
137        self.beta_w = beta_w
138        self.beta_h = beta_h
139        self.epsilon = epsilon
140
141        #FIXME: Maybe have separate orders for h-limiter and w-limiter?
142        #Or maybe get rid of order altogether and use beta_w and beta_h
143        self.default_order = 1
144        self.order = self.default_order
145
146        self.default_time_order = 1
147        self.time_order = self.default_time_order
148       
149        self.smallsteps = 0
150        self.max_smallsteps = max_smallsteps
151        self.number_of_steps = 0
152        self.number_of_first_order_steps = 0
153
154        #Model time
155        self.time = 0.0
156        self.finaltime = None
157        self.min_timestep = self.max_timestep = 0.0
158        self.starttime = 0 #Physical starttime if any (0 is 1 Jan 1970 00:00:00)
159        #Checkpointing and storage
160        from config import default_datadir
161        self.set_datadir(default_datadir)
162        self.filename = 'domain_avalanche'
163        self.checkpoint = False
164       
165        self.local_truncation_error_CK_h = zeros(N+1, Float)
166        self.local_truncation_error_CK_uh = zeros(N+1, Float)       
167        self.local_truncation_error_KKP_h = zeros(N, Float)
168        self.local_truncation_error_KKP_uh = zeros(N, Float)       
169       
170        self.entropy_production = zeros(N, Float)
171        self.entropy_explicit_update = zeros(N, Float)       
172        self.entropy_centroid_values = zeros(N, Float)
173        self.entropy_centroid_backup_values = zeros(N, Float)       
174
175    def __len__(self):
176        return self.number_of_elements
177
178    def build_vertexlist(self):
179        """Build vertexlist index by vertex ids and for each entry (point id)
180        build a list of (triangles, vertex_id) pairs that use the point
181        as vertex.
182
183        Preconditions:
184          self.coordinates and self.triangles are defined
185
186        Postcondition:
187          self.vertexlist is built
188        """
189        from Numeric import array, Float
190
191        vertexlist = [None]*len(self.coordinates)
192        for i in range(self.number_of_elements):
193            a = i
194            b = i + 1
195
196            #Register the vertices v as lists of
197            #(triangle_id, vertex_id) tuples associated with them
198            #This is used for smoothing
199            #for vertex_id, v in enumerate([a,b,c]):
200            for vertex_id, v in enumerate([a,b]):
201                if vertexlist[v] is None:
202                    vertexlist[v] = []
203
204                vertexlist[v].append( (i, vertex_id) )
205
206        self.vertexlist = vertexlist
207
208       
209    def build_neighbour_structure(self):
210        """Update all registered triangles to point to their neighbours.
211
212        Also, keep a tally of the number of boundaries for each triangle
213
214        Postconditions:
215          neighbours and neighbour_edges is populated
216          neighbours and neighbour_vertices is populated
217          number_of_boundaries integer array is defined.
218        """
219
220        #Step 1:
221        #Build dictionary mapping from segments (2-tuple of points)
222        #to left hand side edge (facing neighbouring triangle)
223
224        N = self.number_of_elements
225        neighbourdict = {}
226        l_vertex = 0
227        r_vertex = 1
228        for i in range(N):
229
230            #Register all segments as keys mapping to current triangle
231            #and segment id
232            a = self.vertices[i,0]
233            b = self.vertices[i,1]
234           
235            neighbourdict[a,l_vertex] = (i, 0) #(id, vertex)
236            neighbourdict[b,r_vertex] = (i, 1) #(id, vertex)
237
238
239        #Step 2:
240        #Go through triangles again, but this time
241        #reverse direction of segments and lookup neighbours.
242        for i in range(N):
243            a = self.vertices[i,0]
244            b = self.vertices[i,1]
245           
246            self.number_of_boundaries[i] = 2
247            if neighbourdict.has_key((b,l_vertex)):
248                self.neighbours[i, 1] = neighbourdict[b,l_vertex][0]
249                self.neighbour_vertices[i, 1] = neighbourdict[b,l_vertex][1]
250                self.number_of_boundaries[i] -= 1
251
252            if neighbourdict.has_key((a,r_vertex)):
253                self.neighbours[i, 0] = neighbourdict[a,r_vertex][0]
254                self.neighbour_vertices[i, 0] = neighbourdict[a,r_vertex][1]
255                self.number_of_boundaries[i] -= 1
256
257    def build_surrogate_neighbour_structure(self):
258        """Build structure where each triangle edge points to its neighbours
259        if they exist. Otherwise point to the triangle itself.
260
261        The surrogate neighbour structure is useful for computing gradients
262        based on centroid values of neighbours.
263
264        Precondition: Neighbour structure is defined
265        Postcondition:
266          Surrogate neighbour structure is defined:
267          surrogate_neighbours: i0, i1, i2 where all i_k >= 0 point to
268          triangles.
269
270        """
271
272        N = self.number_of_elements
273        for i in range(N):
274            #Find all neighbouring volumes that are not boundaries
275            #for k in range(3):
276            for k in range(2):   
277                if self.neighbours[i, k] < 0:
278                    self.surrogate_neighbours[i, k] = i #Point this triangle
279                else:
280                    self.surrogate_neighbours[i, k] = self.neighbours[i, k]
281
282    def build_boundary_dictionary(self, boundary = None):
283        """Build or check the dictionary of boundary tags.
284         self.boundary is a dictionary of tags,
285         keyed by volume id and edge:
286         { (id, edge): tag, ... }
287
288         Postconditions:
289            self.boundary is defined.
290        """
291
292        from config import default_boundary_tag
293
294        if boundary is None:
295            boundary = {}
296            for vol_id in range(self.number_of_elements):
297                for vertex_id in range(0, 2):   
298                    if self.neighbours[vol_id, vertex_id] < 0:   
299                        boundary[(vol_id, vertex_id)] = default_boundary_tag
300        else:
301            #Check that all keys in given boundary exist
302            #for vol_id, edge_id in boundary.keys():
303            for vol_id, vertex_id in boundary.keys():   
304                msg = 'Segment (%d, %d) does not exist' %(vol_id, vertex_id)
305                a, b = self.neighbours.shape
306                assert vol_id < a and vertex_id < b, msg
307
308                #FIXME: This assert violates internal boundaries (delete it)
309                #msg = 'Segment (%d, %d) is not a boundary' %(vol_id, edge_id)
310                #assert self.neighbours[vol_id, edge_id] < 0, msg
311
312            #Check that all boundary segments are assigned a tag
313            for vol_id in range(self.number_of_elements):
314                for vertex_id in range(0, 2):   
315                    if self.neighbours[vol_id, vertex_id] < 0:   
316                        if not boundary.has_key( (vol_id, vertex_id) ):   
317                            msg = 'WARNING: Given boundary does not contain '
318                            msg += 'tags for vertex (%d, %d). '\
319                                   %(vol_id, vertex_id)
320                            msg += 'Assigning default tag (%s).'\
321                                   %default_boundary_tag
322                            boundary[ (vol_id, vertex_id) ] =\
323                                      default_boundary_tag
324
325        self.boundary = boundary
326
327    def build_tagged_elements_dictionary(self, tagged_elements = None):
328        """Build the dictionary of element tags.
329         self.tagged_elements is a dictionary of element arrays,
330         keyed by tag:
331         { (tag): [e1, e2, e3..] }
332
333         Postconditions:
334            self.element_tag is defined
335        """
336        from Numeric import array, Float     
337
338        if tagged_elements is None:
339            tagged_elements = {}
340        else:
341            #Check that all keys in given boundary exist
342            for tag in tagged_elements.keys():
343                tagged_elements[tag] = array(tagged_elements[tag]).astype(Int)
344
345                msg = 'Not all elements exist. '
346                assert max(tagged_elements[tag]) < self.number_of_elements, msg
347        self.tagged_elements = tagged_elements
348
349
350    def set_quantities_to_be_stored(self, q):
351        """Specify which quantities will be stored in the sww file.
352
353        q must be either:
354          - the name of a quantity
355          - a list of quantity names
356          - None
357
358        In the two first cases, the named quantities will be stored at each
359        yieldstep
360        (This is in addition to the quantities elevation and friction) 
361        If q is None, storage will be switched off altogether.
362        """
363
364
365        if q is None:
366            self.quantities_to_be_stored = []           
367            self.store = False
368            return
369
370        if isinstance(q, basestring):
371            q = [q] # Turn argument into a list
372
373        #Check correcness   
374        for quantity_name in q:
375            msg = 'Quantity %s is not a valid conserved quantity' %quantity_name
376            assert quantity_name in self.conserved_quantities, msg
377       
378        self.quantities_to_be_stored = q
379       
380
381
382
383
384    def get_boundary_tags(self):
385        """Return list of available boundary tags
386        """
387
388        tags = {}
389        for v in self.boundary.values():
390            tags[v] = 1
391
392        return tags.keys()
393       
394    def get_vertex_coordinates(self, obj = False):
395        """Return all vertex coordinates.
396        Return all vertex coordinates for all triangles as an Nx6 array
397        (ordered as x0, y0, x1, y1, x2, y2 for each triangle)
398
399        if obj is True, the x/y pairs are returned in a 3*N x 2 array.
400        FIXME, we might make that the default.
401        FIXME Maybe use keyword: continuous = False for this condition?
402
403       
404        """
405
406        if obj is True:
407            from numpy import concatenate, reshape
408            V = self.vertices
409            N = V.shape[0]
410            return reshape(V, (N, 2))
411        else:   
412            return self.vertices
413   
414    def get_conserved_quantities(self, vol_id, vertex=None):#, edge=None):
415        """Get conserved quantities at volume vol_id
416
417        If vertex is specified use it as index for vertex values
418        If edge is specified use it as index for edge values
419        If neither are specified use centroid values
420        If both are specified an exeception is raised
421
422        Return value: Vector of length == number_of_conserved quantities
423
424        """
425
426        from Numeric import zeros, Float       
427        q = zeros( len(self.conserved_quantities), Float)
428        for i, name in enumerate(self.conserved_quantities):
429            Q = self.quantities[name]
430            if vertex is not None:
431                q[i] = Q.vertex_values[vol_id, vertex]
432            else:
433                q[i] = Q.centroid_values[vol_id]
434
435        return q
436
437
438    def get_evolved_quantities(self, vol_id, vertex=None):#, edge=None):
439        """Get evolved quantities at volume vol_id
440
441        If vertex is specified use it as index for vertex values
442        If edge is specified use it as index for edge values
443        If neither are specified use centroid values
444        If both are specified an exeception is raised
445
446        Return value: Vector of length == number_of_evolved quantities
447
448        """
449
450        from Numeric import zeros, Float       
451        q = zeros( len(self.evolved_quantities), Float)
452
453        for i, name in enumerate(self.evolved_quantities):
454            Q = self.quantities[name]
455            if vertex is not None:
456                q[i] = Q.vertex_values[vol_id, vertex]
457            else:
458                q[i] = Q.centroid_values[vol_id]
459
460        return q
461       
462
463    def get_centroids(self):
464        """Return all coordinates of centroids
465        Return x coordinate of centroid for each element as a N array
466        """
467
468        return self.centroids
469
470    def get_vertices(self):
471        """Return all coordinates of centroids
472        Return x coordinate of centroid for each element as a N array
473        """
474
475        return self.vertices
476
477    def get_coordinate(self, elem_id, vertex=None):
478        """Return coordinate of centroid,
479        or left or right vertex.
480        Left vertex (vertex=0). Right vertex (vertex=1)
481        """
482
483        if vertex is None:
484            return self.centroids[elem_id]
485        else:
486            return self.vertices[elem_id,vertex]
487
488    def get_area(self, elem_id):
489        """Return area of element id
490        """
491
492        return self.areas[elem_id]
493
494    def get_quantity(self, name, location='vertices', indices = None):
495        """Get values for named quantity
496
497        name: Name of quantity
498
499        In case of location == 'centroids' the dimension values must
500        be a list of a Numerical array of length N, N being the number
501        of elements. Otherwise it must be of dimension Nx3.
502
503        Indices is the set of element ids that the operation applies to.
504
505        The values will be stored in elements following their
506        internal ordering.
507        """
508
509        return self.quantities[name].get_values( location, indices = indices)
510
511    def get_centroid_coordinates(self):
512        """Return all centroid coordinates.
513        Return all centroid coordinates for all triangles as an Nx2 array
514        (ordered as x0, y0 for each triangle)
515        """
516        return self.centroids
517
518
519    def get_timestepping_method(self):
520        return self.timestepping_method
521
522    def set_timestepping_method(self,timestepping_method):
523       
524        if timestepping_method in ['euler', 'rk2', 'rk3']:
525            self.timestepping_method = timestepping_method
526            return
527
528        msg = '%s is an incorrect timestepping type'% timestepping_method
529        raise Exception, msg
530
531
532    def set_quantity(self, name, *args, **kwargs):
533        """Set values for named quantity
534
535
536        One keyword argument is documented here:
537        expression = None, # Arbitrary expression
538
539        expression:
540          Arbitrary expression involving quantity names
541
542        See Quantity.set_values for further documentation.
543        """
544        #Do the expression stuff
545        if kwargs.has_key('expression'):
546            expression = kwargs['expression']
547            del kwargs['expression']
548
549            Q = self.create_quantity_from_expression(expression)
550            kwargs['quantity'] = Q
551       
552        #Assign values
553        self.quantities[name].set_values(*args, **kwargs)
554
555    def set_boundary(self, boundary_map):
556        """Associate boundary objects with tagged boundary segments.
557
558        Input boundary_map is a dictionary of boundary objects keyed
559        by symbolic tags to matched against tags in the internal dictionary
560        self.boundary.
561
562        As result one pointer to a boundary object is stored for each vertex
563        in the list self.boundary_objects.
564        More entries may point to the same boundary object
565
566        Schematically the mapping is from two dictionaries to one list
567        where the index is used as pointer to the boundary_values arrays
568        within each quantity.
569
570        self.boundary:          (vol_id, edge_id): tag
571        boundary_map (input):   tag: boundary_object
572        ----------------------------------------------
573        self.boundary_objects:  ((vol_id, edge_id), boundary_object)
574
575
576        Pre-condition:
577          self.boundary has been built.
578
579        Post-condition:
580          self.boundary_objects is built
581
582        If a tag from the domain doesn't appear in the input dictionary an
583        exception is raised.
584        However, if a tag is not used to the domain, no error is thrown.
585        FIXME: This would lead to implementation of a
586        default boundary condition
587
588        Note: If a segment is listed in the boundary dictionary and if it is
589        not None, it *will* become a boundary -
590        even if there is a neighbouring triangle.
591        This would be the case for internal boundaries
592
593        Boundary objects that are None will be skipped.
594
595        FIXME: If set_boundary is called multiple times and if Boundary
596        object is changed into None, the neighbour structure will not be
597        restored!!!
598        """
599
600        self.boundary_objects = []
601        self.boundary_map = boundary_map  #Store for use with eg. boundary_stats.
602
603        #FIXME: Try to remove the sorting and fix test_mesh.py
604        x = self.boundary.keys()
605        x.sort()
606
607        #Loop through edges that lie on the boundary and associate them with
608        #callable boundary objects depending on their tags
609        for k, (vol_id, vertex_id) in enumerate(x):
610            tag = self.boundary[ (vol_id, vertex_id) ]
611            if boundary_map.has_key(tag):
612                B = boundary_map[tag]  #Get callable boundary object
613                if B is not None:
614                    #self.boundary_objects.append( ((vol_id, edge_id), B) )
615                    #self.neighbours[vol_id, edge_id] = -len(self.boundary_objects)
616                    self.boundary_objects.append( ((vol_id, vertex_id), B) )
617                    self.neighbours[vol_id, vertex_id] = -len(self.boundary_objects)
618                else:
619                    pass
620                    #FIXME: Check and perhaps fix neighbour structure
621
622            else:
623                msg = 'ERROR (domain.py): Tag "%s" has not been ' %tag
624                msg += 'bound to a boundary object.\n'
625                msg += 'All boundary tags defined in domain must appear '
626                msg += 'in the supplied dictionary.\n'
627                msg += 'The tags are: %s' %self.get_boundary_tags()
628                raise msg
629
630
631
632    def check_integrity(self):
633        #Mesh.check_integrity(self)   
634        for quantity in self.conserved_quantities:
635            msg = 'Conserved quantities must be a subset of all quantities'
636            assert quantity in self.quantities, msg
637
638        for quantity in self.evolved_quantities:
639            msg = 'Evolved quantities must be a subset of all quantities'
640            assert quantity in self.quantities, msg           
641
642    def write_time(self):
643        print self.timestepping_statistics()
644
645    def get_time(self):
646        print self.time
647       
648
649    def timestepping_statistics(self):
650        """Return string with time stepping statistics for printing or logging
651        """
652
653        msg = ''
654        if self.min_timestep == self.max_timestep:
655            msg += 'Time = %.4f, delta t = %.8f, steps=%d (%d)'\
656                   %(self.time, self.min_timestep, self.number_of_steps,
657                     self.number_of_first_order_steps)
658        elif self.min_timestep > self.max_timestep:
659            msg += 'Time = %.4f, steps=%d (%d)'\
660                   %(self.time, self.number_of_steps,
661                     self.number_of_first_order_steps)
662        else:
663            msg += 'Time = %.4f, delta t in [%.8f, %.8f], steps=%d (%d)'\
664                   %(self.time, self.min_timestep,
665                     self.max_timestep, self.number_of_steps,
666                     self.number_of_first_order_steps)
667
668        return msg
669
670    def get_name(self):
671        return self.filename
672
673    def set_name(self, name):
674        self.filename = name
675
676    def get_datadir(self):
677        return self.datadir
678
679    def set_datadir(self, name):
680        self.datadir = name
681
682    def set_CFL(self, cfl):
683        if cfl > 1.0:
684            print 'WARNING: Setting CFL condition to %f which is greater than 1' % cfl
685        self.CFL = cfl
686
687    def get_CFL(self):
688        return self.CFL
689   
690    def set_filename(self, name):
691        self.filename = name
692
693    def get_filename(self):
694        return self.filename
695
696    def get_limiter(self):
697        return self.limiter
698
699    def set_limiter(self,limiter):
700
701        possible_limiters = \
702        ['pyvolution', 'minmod_steve', 'minmod', 'minmod_kurganov', 'superbee', 'vanleer', 'vanalbada']
703
704        if limiter in possible_limiters:
705            self.limiter = limiter
706            return
707
708        msg = '%s is an incorrect limiter type.\n'% limiter
709        msg += 'Possible types are: '+ ", ".join(["%s" % el for el in possible_limiters])
710        raise Exception, msg
711
712       
713    #--------------------------
714    # Main components of evolve
715    #--------------------------   
716
717    def evolve(self, yieldstep = None,
718               finaltime = None,
719               duration = None,
720               skip_initial_step = False):
721        """Evolve model through time starting from self.starttime.
722
723
724        yieldstep: Interval between yields where results are stored,
725                   statistics written and domain inspected or
726                   possibly modified. If omitted the internal predefined
727                   max timestep is used.
728                   Internally, smaller timesteps may be taken.
729
730        duration: Duration of simulation
731
732        finaltime: Time where simulation should end. This is currently
733        relative time.  So it's the same as duration.
734
735        If both duration and finaltime are given an exception is thrown.
736
737
738        skip_initial_step: Boolean flag that decides whether the first
739        yield step is skipped or not. This is useful for example to avoid
740        duplicate steps when multiple evolve processes are dove tailed.
741
742
743        Evolve is implemented as a generator and is to be called as such, e.g.
744
745        for t in domain.evolve(yieldstep, finaltime):
746            <Do something with domain and t>
747
748
749        All times are given in seconds
750
751        """
752
753        from config import min_timestep, max_timestep, epsilon
754
755        # FIXME: Maybe lump into a larger check prior to evolving
756        msg = 'Boundary tags must be bound to boundary objects before '
757        msg += 'evolving system, '
758        msg += 'e.g. using the method set_boundary.\n'
759        msg += 'This system has the boundary tags %s '\
760               %self.get_boundary_tags()
761        assert hasattr(self, 'boundary_objects'), msg
762
763        if yieldstep is None:
764            yieldstep = max_timestep
765        else:
766            yieldstep = float(yieldstep)
767
768        self._order_ = self.default_order
769
770        if finaltime is not None and duration is not None:
771            # print 'F', finaltime, duration
772            msg = 'Only one of finaltime and duration may be specified'
773            raise msg
774        else:
775            if finaltime is not None:
776                self.finaltime = float(finaltime)
777            if duration is not None:
778                self.finaltime = self.starttime + float(duration)
779
780        N = len(self) # Number of triangles
781        self.yieldtime = 0.0 # Track time between 'yields'
782
783        # Initialise interval of timestep sizes (for reporting only)
784        self.min_timestep = max_timestep
785        self.max_timestep = min_timestep
786        self.number_of_steps = 0
787        self.number_of_first_order_steps = 0
788
789
790        # Update ghosts
791        self.update_ghosts()
792
793        # Initial update of vertex and edge values
794        self.distribute_to_vertices_and_edges()
795
796        # Update extrema if necessary (for reporting)
797        self.update_extrema()
798       
799        # Initial update boundary values
800        self.update_boundary() 
801
802        # Or maybe restore from latest checkpoint
803        if self.checkpoint is True:
804            self.goto_latest_checkpoint()
805
806        if skip_initial_step is False:
807            yield(self.time)  # Yield initial values
808
809        while True:
810
811            #Compute entropy before evolving the quantity and the entropy
812            #Sudi, 16 Aug 2010
813            h_C = self.quantities['height'].centroid_values
814            u_C = self.quantities['velocity'].centroid_values
815            z_C = self.quantities['elevation'].centroid_values
816            from config import g
817            for i in range(N):
818                self.entropy_centroid_values[i] = 0.5*h_C[i]*u_C[i]**2.0 + 0.5*g*h_C[i]**2.0 + g*h_C[i]*z_C[i]                   
819
820            # Evolve One Step, using appropriate timestepping method
821            if self.get_timestepping_method() == 'euler':
822                self.evolve_one_euler_step(yieldstep,finaltime)
823               
824            elif self.get_timestepping_method() == 'rk2':
825                self.evolve_one_rk2_step(yieldstep,finaltime)
826
827            elif self.get_timestepping_method() == 'rk3':
828                self.evolve_one_rk3_step(yieldstep,finaltime)               
829           
830            # Update extrema if necessary (for reporting)
831            self.update_extrema()
832
833            #print "self.timestep=",self.timestep
834            self.yieldtime += self.timestep
835            self.number_of_steps += 1
836            if self._order_ == 1:
837                self.number_of_first_order_steps += 1
838
839
840            # Yield results
841            if finaltime is not None and self.time >= finaltime-epsilon:
842
843                if self.time > finaltime:
844                    msg = 'WARNING (domain.py): time overshot finaltime. '
845                    msg += 'Contact Ole.Nielsen@ga.gov.au'
846                    raise Exception, msg
847                   
848
849                # Yield final time and stop
850                self.time = finaltime
851                yield(self.time)
852                break
853
854            if self.yieldtime >= yieldstep-0.5*min_timestep:
855                # Yield (intermediate) time and allow inspection of domain
856
857                if self.checkpoint is True:
858                    self.store_checkpoint()
859                    self.delete_old_checkpoints()
860
861                # Pass control on to outer loop for more specific actions
862                # print "Test if passing here.....,Sudi."
863                yield(self.time)
864
865                # Reinitialise
866                self.yieldtime = 0.0
867                self.min_timestep = max_timestep
868                self.max_timestep = min_timestep
869                self.number_of_steps = 0
870                self.number_of_first_order_steps = 0
871
872                if finaltime is not None and self.time >= finaltime-0.5*min_timestep:
873                    break
874
875
876    def evolve_one_euler_step(self, yieldstep, finaltime):
877        """
878        One Euler Time Step
879        Q^{n+1} = E(h) Q^n
880        """
881
882        # Back up evolved (ALL) quantities, Sudi 13 July 2010
883        self.backup_evolved_quantities() # Actually, it is not used in this evolve_one_euler_step
884     
885        # Compute fluxes across each element edge
886        self.compute_fluxes() # Added entropy computation in this function!
887
888        # Update timestep to fit yieldstep and finaltime
889        self.update_timestep(yieldstep, finaltime)
890
891        # Update conserved quantities
892        self.update_conserved_quantities() # Added entropy computation in this function!
893
894        # Update ghosts
895        self.update_ghosts()
896
897        # Update vertex and edge values
898        self.distribute_to_vertices_and_edges()
899
900        # Update boundary values
901        self.update_boundary()
902
903        # Update time
904        self.time += self.timestep
905
906        # Compute numerical entropy production, Sudi 13 Aug 2010
907        self.produce_entropy()# Added entropy computation in this function!
908
909        # Detect shock by local truncation error of Constantine-Kurganov, Sudi 13 July 2010
910        # See "Adaptive Central-Upwind Schemes for Hyperbolic Systems of Conservation Laws"
911        self.compute_local_truncation_error_CK_h()
912        self.compute_local_truncation_error_CK_uh()       
913
914        # Detect shock by local truncation error of Karni-Kurganov-Petrova, Sudi 31 August 2010
915        # See "A Smoothness Indicator for Adaptive Algorithms for Hyperbolic Systems"
916        self.compute_local_truncation_error_KKP_h()
917        self.compute_local_truncation_error_KKP_uh()       
918       
919
920
921    def evolve_one_rk2_step(self, yieldstep, finaltime):
922        """
923        One 2nd order RK timestep
924        Q^{n+1} = 0.5 Q^n + 0.5 E(h)^2 Q^n
925        """
926       
927        # Save initial conserved quantities values
928        self.backup_conserved_quantities()# Added entropy computation in this function!
929       
930        # Back up non_conserved (ehv) quantities, Sudi 13 July 2010
931        self.backup_ehv_quantities()   
932
933        #--------------------------------------
934        # First euler step
935        #--------------------------------------
936
937        # Compute fluxes across each element edge
938        self.compute_fluxes()# Added entropy computation in this function!
939
940        # Update timestep to fit yieldstep and finaltime
941        self.update_timestep(yieldstep, finaltime)
942
943        # Update conserved quantities
944        self.update_conserved_quantities()# Added entropy computation in this function!
945
946        # Update ghosts
947        self.update_ghosts()
948
949        # Update vertex and edge values
950        self.distribute_to_vertices_and_edges()
951
952        # Update boundary values
953        self.update_boundary()
954
955        # Update time
956        self.time += self.timestep
957
958        #------------------------------------
959        # Second Euler step                     
960        #------------------------------------
961           
962        # Compute fluxes across each element edge
963        self.compute_fluxes()# Added entropy computation in this function!
964
965        # Update conserved quantities
966        self.update_conserved_quantities()# Added entropy computation in this function!
967
968        #------------------------------------
969        # Combine initial and final values
970        # of conserved quantities and cleanup
971        #------------------------------------
972       
973        # Combine steps
974        self.saxpy_conserved_quantities(0.5, 0.5)# Added entropy computation in this function!
975
976        #-----------------------------------
977        # clean up vertex values
978        #-----------------------------------
979 
980        # Update ghosts
981        self.update_ghosts()
982
983        # Update vertex and edge values
984        self.distribute_to_vertices_and_edges()
985
986        # Update boundary values
987        self.update_boundary()
988
989        # Compute numerical entropy production, Sudi 13 Aug 2010
990        self.produce_entropy()# Added entropy computation in this function!
991
992        # Detect shock by local truncation error of Constantine-Kurganov, Sudi 13 July 2010
993        # See "Adaptive Central-Upwind Schemes for Hyperbolic Systems of Conservation Laws"
994        self.compute_local_truncation_error_CK_h()   
995        self.compute_local_truncation_error_CK_uh()         
996
997        # Detect shock by local truncation error of Karni-Kurganov-Petrova, Sudi 31 August 2010
998        # See "A Smoothness Indicator for Adaptive Algorithms for Hyperbolic Systems"
999        self.compute_local_truncation_error_KKP_h() 
1000        self.compute_local_truncation_error_KKP_uh()         
1001
1002
1003
1004    def evolve_one_rk3_step(self, yieldstep, finaltime):
1005        """
1006        One 3rd order RK timestep
1007        Q^(1) = 3/4 Q^n + 1/4 E(h)^2 Q^n  (at time t^n + h/2)
1008        Q^{n+1} = 1/3 Q^n + 2/3 E(h) Q^(1) (at time t^{n+1})
1009        """
1010
1011        # Save initial initial conserved quantities values
1012        self.backup_conserved_quantities()# Add entropy computation in this function!
1013
1014        # Back up non_conserved (ehv) quantities, Sudi 13 July 2010
1015        self.backup_ehv_quantities()
1016
1017        initial_time = self.time
1018       
1019        #--------------------------------------
1020        # First euler step
1021        #--------------------------------------
1022
1023        # Compute fluxes across each element edge
1024        self.compute_fluxes()# Add entropy computation in this function!
1025
1026        # Update timestep to fit yieldstep and finaltime
1027        self.update_timestep(yieldstep, finaltime)
1028
1029        # Update conserved quantities
1030        self.update_conserved_quantities()# Add entropy computation in this function!
1031
1032        # Update ghosts
1033        self.update_ghosts()
1034
1035        # Update vertex and edge values
1036        self.distribute_to_vertices_and_edges()
1037
1038        # Update boundary values
1039        self.update_boundary()
1040
1041        # Update time
1042        self.time += self.timestep
1043
1044        #------------------------------------
1045        # Second Euler step
1046        #------------------------------------
1047           
1048        # Compute fluxes across each element edge
1049        self.compute_fluxes()# Add entropy computation in this function!
1050
1051        # Update conserved quantities
1052        self.update_conserved_quantities()# Add entropy computation in this function!
1053
1054        #------------------------------------
1055        #Combine steps to obtain intermediate
1056        #solution at time t^n + 0.5 h
1057        #------------------------------------
1058
1059        # Combine steps
1060        self.saxpy_conserved_quantities(0.25, 0.75)# Add entropy computation in this function!
1061 
1062        # Update ghosts
1063        self.update_ghosts()
1064
1065        # Update vertex and edge values
1066        self.distribute_to_vertices_and_edges()
1067
1068        # Update boundary values
1069        self.update_boundary()
1070
1071        # Set substep time
1072        self.time = initial_time + self.timestep*0.5
1073
1074        #------------------------------------
1075        # Third Euler step
1076        #------------------------------------
1077           
1078        # Compute fluxes across each element edge
1079        self.compute_fluxes()# Add entropy computation in this function!
1080
1081        # Update conserved quantities
1082        self.update_conserved_quantities()# Add entropy computation in this function!
1083
1084        #------------------------------------
1085        # Combine final and initial values
1086        # and cleanup
1087        #------------------------------------
1088       
1089        # Combine steps
1090        self.saxpy_conserved_quantities(2.0/3.0, 1.0/3.0)# Add entropy computation in this function!
1091 
1092        # Update ghosts
1093        self.update_ghosts()
1094
1095        # Update vertex and edge values
1096        self.distribute_to_vertices_and_edges()
1097
1098        # Update boundary values
1099        self.update_boundary()
1100
1101        # Set new time
1102        self.time = initial_time + self.timestep
1103
1104        # Compute numerical entropy production, Sudi 13 Aug 2010
1105        self.produce_entropy()           
1106
1107        # Detect shock by local truncation error of Constantine-Kurganov, Sudi 13 July 2010
1108        # See "Adaptive Central-Upwind Schemes for Hyperbolic Systems of Conservation Laws"
1109        self.compute_local_truncation_error_CK_h()   
1110        self.compute_local_truncation_error_CK_uh()         
1111
1112        # Detect shock by local truncation error of Karni-Kurganov-Petrova, Sudi 31 August 2010
1113        # See "A Smoothness Indicator for Adaptive Algorithms for Hyperbolic Systems"
1114        self.compute_local_truncation_error_KKP_h()     
1115        self.compute_local_truncation_error_KKP_uh()         
1116       
1117
1118    def backup_conserved_quantities(self):
1119        N = len(self) # Number_of_triangles
1120        # Backup the backup of conserved quantities centroid values
1121        for name in self.conserved_quantities:
1122            Q = self.quantities[name]
1123            Q.backup_the_backup_centroid_values()
1124            Q.backup_the_backup_vertex_values()           
1125        # Back up entropy centroid values
1126        #self.centroid_backup_values[:] = (self.centroid_values).astype('f')
1127        self.entropy_centroid_backup_values[:] = (self.entropy_centroid_values).astype('f')       
1128        # Backup conserved_quantities centroid values
1129        for name in self.conserved_quantities:
1130            Q = self.quantities[name]
1131            Q.backup_centroid_values()
1132            Q.backup_vertex_values()           
1133
1134    def backup_evolved_quantities(self):
1135        N = len(self) # Number_of_triangles
1136        # Backup the backup of evolved quantities centroid values
1137        for name in self.evolved_quantities:
1138            Q = self.quantities[name]
1139            Q.backup_the_backup_centroid_values()
1140            Q.backup_the_backup_vertex_values()           
1141        # Backup evolved_quantities centroid values
1142        for name in self.evolved_quantities:
1143            Q = self.quantities[name]
1144            Q.backup_centroid_values()
1145            Q.backup_vertex_values()             
1146
1147    def backup_ehv_quantities(self):
1148        N = len(self) # Number_of_triangles
1149        # Backup the backup of ehv quantities centroid values
1150        for name in ['elevation', 'height', 'velocity']:
1151            Q = self.quantities[name]
1152            Q.backup_the_backup_centroid_values()
1153            Q.backup_the_backup_vertex_values()           
1154        # Backup NON-conserved_quantities centroid values
1155        for name in ['elevation', 'height', 'velocity']:
1156            Q = self.quantities[name]
1157            Q.backup_centroid_values()
1158            Q.backup_vertex_values()           
1159
1160    def compute_local_truncation_error_KKP_h(self):
1161        h_2past= self.quantities['height'].backup_of_centroid_backup_values
1162        h_1past= self.quantities['height'].centroid_backup_values
1163        h_now  = self.quantities['height'].centroid_values
1164        u_2past= self.quantities['velocity'].backup_of_centroid_backup_values
1165        u_1past= self.quantities['velocity'].centroid_backup_values
1166        u_now  = self.quantities['velocity'].centroid_values
1167        Ver = self.vertices
1168        N = self.number_of_elements
1169        E = self.local_truncation_error_KKP_h
1170        E[0] = E[N-1] = 0.0
1171        #from parameters import points
1172        for i in range(N-2):
1173            k = i+1
1174            interval = Ver[k]
1175            dx = interval[1]-interval[0]
1176            dt = self.timestep
1177            E[k] = (dx/12.0) * (h_now[k+1]-h_2past[k+1] + 4.0*h_now[k]-4.0*h_2past[k] + h_now[k-1]-h_2past[k-1])
1178            E[k] += (dt/12.0) * (h_now[k+1]*u_now[k+1] - h_now[k-1]*u_now[k-1]
1179                                 + 4.0*h_1past[k+1]*u_1past[k+1] - 4.0*h_1past[k-1]*u_1past[k-1]
1180                                 + h_2past[k+1]*u_2past[k+1] - h_2past[k-1]*u_2past[k-1])
1181        E[0] = E[N-1] = E[N-2] = 0.0
1182
1183    def compute_local_truncation_error_KKP_uh(self):
1184        h_2past= self.quantities['height'].backup_of_centroid_backup_values
1185        h_1past= self.quantities['height'].centroid_backup_values
1186        h_now  = self.quantities['height'].centroid_values
1187        u_2past= self.quantities['velocity'].backup_of_centroid_backup_values
1188        u_1past= self.quantities['velocity'].centroid_backup_values
1189        u_now  = self.quantities['velocity'].centroid_values
1190        #z_2past= self.quantities['elevation'].backup_of_centroid_backup_values
1191        #z_1past= self.quantities['elevation'].centroid_backup_values
1192        #z_now  = self.quantities['elevation'].centroid_values
1193
1194        z_2past= self.quantities['elevation'].backup_of_vertex_backup_values
1195        z_1past= self.quantities['elevation'].vertex_backup_values
1196        z_now  = self.quantities['elevation'].vertex_values       
1197        Ver = self.vertices
1198        N = self.number_of_elements
1199        E = self.local_truncation_error_KKP_uh
1200        E[0] = E[1] = E[N-1] = 0.0
1201        #from parameters import points
1202        from config import g
1203        for i in range(N-4):
1204            #print "i=",i
1205            k = i+2
1206            #print "k=",k
1207            interval = Ver[k]
1208            dx = interval[1]-interval[0]
1209            dt = self.timestep
1210            E[k] = (dx/12.0) * (h_now[k+1]*u_now[k+1] -h_2past[k+1]*u_2past[k+1]
1211                                + 4.0*h_now[k]*u_now[k] -4.0*h_2past[k]*u_2past[k]
1212                                +h_now[k-1]*u_now[k-1] -h_2past[k-1]*u_2past[k-1])
1213            E[k] += (dt/12.0) * ( (h_now[k+1]*u_now[k+1]**2 +0.5*g*h_now[k+1]**2)
1214                                  -(h_now[k-1]*u_now[k-1]**2 +0.5*g*h_now[k-1]**2)
1215                                 + (4.0*h_1past[k+1]*u_1past[k+1]**2 + 4.0*0.5*g*h_1past[k+1]**2)
1216                                  - (4.0*h_1past[k-1]*u_1past[k-1]**2 + 4.0*0.5*g*h_1past[k-1]**2)
1217                                 + (h_2past[k+1]*u_2past[k+1]**2 +0.5*g*h_2past[k+1]**2)
1218                                  - (h_2past[k-1]*u_2past[k-1]**2 +0.5*g*h_2past[k-1]**2) )
1219            #The following is used if the vertex values of the elevation are taken.
1220            E[k] +=(g*dt/36.0)*(h_2past[k-1]*(z_2past[k-1,1]-z_2past[k-1,0])
1221                                +4.0*h_1past[k-1]*(z_1past[k-1,1]-z_1past[k-1,0])
1222                                +h_now[k-1]*(z_now[k-1,1]-z_now[k-1,0])
1223                                + 4.0*h_2past[k]*(z_2past[k,1]-z_2past[k,0])
1224                                + 16.0*h_1past[k]*(z_1past[k,1]-z_1past[k,0])
1225                                + 4.0*h_now[k]*(z_now[k,1]-z_now[k,0])
1226                                + h_2past[k+1]*(z_2past[k+1,1]-z_2past[k+1,0])
1227                                + 4.0*h_1past[k+1]*(z_1past[k+1,1]-z_1past[k+1,0])
1228                                + h_now[k+1]*(z_now[k+1,1]-z_now[k+1,0]))           
1229
1230            #The following is used if the centroid values of the elevation are taken.
1231            #E[k] +=(g*dt/36.0)*(h_2past[k-1]*0.5*(z_2past[k]-z_2past[k-2])
1232            #                    +4.0*h_1past[k-1]*0.5*(z_1past[k]-z_1past[k-2])
1233            #                    +h_now[k-1]*0.5*(z_now[k]-z_now[k-2])
1234            #                    + 4.0*h_2past[k]*0.5*(z_2past[k+1]-z_2past[k-1])
1235            #                    + 16.0*h_1past[k]*0.5*(z_1past[k+1]-z_1past[k-1])
1236            #                    + 4.0*h_now[k]*0.5*(z_now[k+1]-z_now[k-1])
1237            #                    + h_2past[k+1]*0.5*(z_2past[k+2]-z_2past[k])
1238            #                    + 4.0*h_1past[k+1]*0.5*(z_1past[k+2]-z_1past[k])
1239            #                    + h_now[k+1]*0.5*(z_now[k+2]-z_now[k]))
1240            #The following is for the averaging of the height.
1241            #E[k] +=(g*dt/36.0)*(0.5*(h_2past[k]+h_2past[k-2])*0.5*(z_2past[k]-z_2past[k-2])
1242            #                    +4.0*0.5*(h_1past[k]+h_1past[k-2])*0.5*(z_1past[k]-z_1past[k-2])
1243            #                    +0.5*(h_now[k]+h_now[k-2])*0.5*(z_now[k]-z_now[k-2])
1244            #                    + 4.0*0.5*(h_2past[k+1]+h_2past[k-1])*0.5*(z_2past[k+1]-z_2past[k-1])
1245            #                    + 16.0*0.5*(h_1past[k+1]+h_1past[k-1])*0.5*(z_1past[k+1]-z_1past[k-1])
1246            #                    + 4.0*0.5*(h_now[k+1]+h_now[k-1])*0.5*(z_now[k+1]-z_now[k-1])
1247            #                    + 0.5*(h_2past[k+2]+h_2past[k])*0.5*(z_2past[k+2]-z_2past[k])
1248            #                    + 4.0*0.5*(h_1past[k+2]+h_1past[k])*0.5*(z_1past[k+2]-z_1past[k])
1249            #                    + 0.5*(h_now[k+2]+h_now[k])*0.5*(z_now[k+2]-z_now[k]))           
1250        E[0] = E[1] = E[N-1] = E[N-2] = 0.0       
1251
1252    def compute_local_truncation_error_CK_h(self):
1253        h_prev = self.quantities['height'].centroid_backup_values
1254        h_now  = self.quantities['height'].centroid_values
1255        u_prev = self.quantities['velocity'].centroid_backup_values
1256        u_now  = self.quantities['velocity'].centroid_values
1257        Ver = self.vertices
1258        N = self.number_of_elements
1259        E = self.local_truncation_error_CK_h
1260        E[0] = E[N] = E[N-1] = 0.0
1261        #from parameters import points
1262        for i in range(N-2):
1263            k = i+1
1264            interval = Ver[k]
1265            dx = interval[1]-interval[0]
1266            dt = self.timestep
1267            E[k+1] = 0.5 * dx *(h_now[k] - h_prev[k] + h_now[k+1] - h_prev[k+1])#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
1268            E[k+1] += 0.5 * dt *(h_prev[k+1]*u_prev[k+1] - h_prev[k]*u_prev[k]#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
1269                               + h_now[k+1]*u_now[k+1] - h_now[k]*u_now[k])
1270        E[0] = E[1] = E[N] = E[N-1] = E[N-2] = 0.0
1271
1272        ##    def compute_local_truncation_error_CK_uh(self):
1273        ##        h_prev = self.quantities['height'].centroid_backup_values
1274        ##        h_now  = self.quantities['height'].centroid_values
1275        ##        u_prev = self.quantities['velocity'].centroid_backup_values
1276        ##        u_now  = self.quantities['velocity'].centroid_values
1277        ##        z_prev = self.quantities['elevation'].centroid_backup_values
1278        ##        z_now  = self.quantities['elevation'].centroid_values
1279        ##        #z_prev = self.quantities['elevation'].vertex_backup_values
1280        ##        #z_now  = self.quantities['elevation'].vertex_values       
1281        ##        Ver = self.vertices
1282        ##        N = self.number_of_elements
1283        ##        E = self.local_truncation_error_CK_uh
1284        ##        E[0] = E[1] = E[N] = E[N-1] = E[N-2]= E[N-3] = 0.0
1285        ##        #from parameters import points
1286        ##        from config import g
1287        ##        for i in range(N-4):
1288        ##            #print "i=",i
1289        ##            k = i+2
1290        ##            #print "k-1=",k-1
1291        ##            #print "k=",k
1292        ##            #print "k+1=",k+1
1293        ##            #print "z_prev[k+1]=",z_prev[k+1]
1294        ##            #print "==============================="
1295        ##            interval = Ver[k]
1296        ##            dx = interval[1]-interval[0]
1297        ##            dt = self.timestep
1298        ##            E[k] = 0.5 * dx *(h_now[k]*u_now[k] - h_prev[k]*u_prev[k] + h_now[k+1]*u_now[k+1] - h_prev[k+1]*u_prev[k+1])
1299        ##            E[k] += 0.5 * dt *( (h_prev[k+1]*u_prev[k+1]**2 +0.5*g*h_prev[k+1]**2) - (h_prev[k]*u_prev[k]**2 +0.5*g*h_prev[k]**2)
1300        ##                               + (h_now[k+1]*u_now[k+1]**2 +0.5*g*h_now[k+1]**2) - (h_now[k]*u_now[k]**2 +0.5*g*h_now[k]**2) )
1301        ##            #The following is used if the vertex values of the elevation are taken.           
1302        ##            #E[k] +=2.0*(g*dt/9.0) *(h_prev[k]*(z_prev[k,1]-z_prev[k,0]) + h_now[k]*(z_now[k,1]-z_now[k,0])
1303        ##            #                    + h_prev[k+1]*(z_prev[k+1,1]-z_prev[k+1,0]) + h_now[k+1]*(z_now[k+1,1]-z_now[k+1,0]))     
1304        ##            #The following is used if the centroid values of the elevation are taken.           
1305        ##            E[k] +=1.125*2.0*(g*dt/9.0) *(h_prev[k]*0.5*(z_prev[k+1]-z_prev[k-1]) + h_now[k]*0.5*(z_now[k+1]-z_now[k-1])
1306        ##                                + h_prev[k+1]*0.5*(z_prev[k+2]-z_prev[k]) + h_now[k+1]*0.5*(z_now[k+2]-z_now[k]))           
1307        ##        E[0] = E[1] = E[N] = E[N-1] = E[N-2]= E[N-3] = 0.0
1308
1309    def compute_local_truncation_error_CK_uh(self):
1310        h_prev = self.quantities['height'].centroid_backup_values
1311        h_now  = self.quantities['height'].centroid_values
1312        u_prev = self.quantities['velocity'].centroid_backup_values
1313        u_now  = self.quantities['velocity'].centroid_values
1314        z_prev = self.quantities['elevation'].centroid_backup_values
1315        z_now  = self.quantities['elevation'].centroid_values
1316        #z_prev = self.quantities['elevation'].vertex_backup_values
1317        #z_now  = self.quantities['elevation'].vertex_values           
1318        Ver = self.vertices
1319        N = self.number_of_elements
1320        E = self.local_truncation_error_CK_uh
1321        E[0] = E[1] = E[N] = E[N-1] = E[N-2]= E[N-3] = 0.0 
1322        #from parameters import points
1323        from config import g
1324        for i in range(N-4):
1325            k = i+2
1326            interval = Ver[k]
1327            dx = interval[1]-interval[0]
1328            dt = self.timestep
1329            E[k]  = 0.25 * dx *(h_now[k]*u_now[k] - h_prev[k]*u_prev[k] + h_now[k+1]*u_now[k+1] - h_prev[k+1]*u_prev[k+1])
1330            E[k] += 0.25 * dt *( (h_prev[k+1]*u_prev[k+1]**2 +0.5*g*h_prev[k+1]**2) - (h_prev[k]*u_prev[k]**2 +0.5*g*h_prev[k]**2)
1331                               + (h_now[k+1]*u_now[k+1]**2 +0.5*g*h_now[k+1]**2) - (h_now[k]*u_now[k]**2 +0.5*g*h_now[k]**2) )           
1332            #The following is used if the vertex values of the elevation are taken.           
1333            #E[k] += 0.25*g*dt *(h_prev[k]*(z_prev[k,1]-z_prev[k,0]) + h_now[k]*(z_now[k,1]-z_now[k,0])
1334            #                    + h_prev[k+1]*(z_prev[k+1,1]-z_prev[k+1,0]) + h_now[k+1]*(z_now[k+1,1]-z_now[k+1,0]))     
1335            #The following is used if the centroid values of the elevation are taken.           
1336            E[k] +=0.125*g*dt *(h_prev[k]*0.5*(z_prev[k+1]-z_prev[k-1]) + h_now[k]*0.5*(z_now[k+1]-z_now[k-1])
1337                                + h_prev[k+1]*0.5*(z_prev[k+2]-z_prev[k]) + h_now[k+1]*0.5*(z_now[k+2]-z_now[k]))         
1338           
1339        E[0] = E[1] = E[N] = E[N-1] = E[N-2]= E[N-3] = 0.0         
1340       
1341
1342    def produce_entropy(self):
1343        from config import g       
1344        h_C  = self.quantities['height'].centroid_values
1345        u_C  = self.quantities['velocity'].centroid_values
1346        z_C  = self.quantities['elevation'].centroid_values
1347        #Cen= self.centroids
1348        #Ver= self.vertices
1349        N = self.number_of_elements
1350        EP = self.entropy_production
1351        EP[0] = EP[N-1] = 0.0
1352        dt = self.timestep
1353        for i in range(N-2):
1354            k = i+1
1355            #interval = Ver[k]
1356            #dx = interval[1]-interval[0]
1357            entropy_now = 0.5*h_C[k]*u_C[k]**2.0 + 0.5*g*h_C[k]**2.0 + g*h_C[k]*z_C[k]
1358            EP[k]   = (1.0/dt)*(entropy_now - self.entropy_centroid_values[k])
1359        EP[0] = EP[N-1] = EP[N-2] = 0.0
1360
1361
1362    def saxpy_conserved_quantities(self,a,b):
1363        N = len(self) #number_of_triangles
1364        #self.centroid_values[:] = (a*self.centroid_values + b*self.centroid_backup_values).astype('f')
1365        self.entropy_centroid_values[:] = (a*self.entropy_centroid_values + b*self.entropy_centroid_backup_values).astype('f')
1366        # Backup conserved_quantities centroid values
1367        for name in self.conserved_quantities:
1368            Q = self.quantities[name]
1369            Q.saxpy_centroid_values(a,b) 
1370   
1371    def solve_inhomogenous_second_order(self,yieldstep, finaltime):
1372
1373        #Update timestep to fit yieldstep and finaltime
1374        self.update_timestep(yieldstep, finaltime)
1375        #Compute forcing terms
1376        self.compute_forcing_terms()
1377        #Update conserved quantities
1378        self.update_conserved_quantities(0.5*self.timestep)
1379        #Update vertex and edge values
1380        self.distribute_to_vertices_and_edges()
1381        #Update boundary values
1382        self.update_boundary()
1383
1384    def  solve_homogenous_second_order(self,yieldstep,finaltime):
1385        """Use Shu Second order timestepping to update
1386        conserved quantities
1387
1388        q^{n+1/2} = q^{n}+0.5*dt*F^{n}
1389        q^{n+1} = q^{n}+dt*F^{n+1/2}
1390        """
1391        import copy
1392        from Numeric import zeros, Float       
1393
1394        N = self.number_of_elements
1395
1396        self.compute_fluxes()
1397        #Update timestep to fit yieldstep and finaltime
1398        self.update_timestep(yieldstep, finaltime)
1399        #Compute forcing terms
1400        #NOT NEEDED FOR 2ND ORDER STRANG SPLIITING
1401        #ADDING THIS WILL NEED TO REMOVE ZEROING IN COMPUTE_FORCING
1402        #self.compute_forcing_terms()
1403
1404        QC = zeros((N,len(self.conserved_quantities)),Float)
1405        QF = zeros((N,len(self.conserved_quantities)),Float)
1406
1407        i = 0
1408        for name in self.conserved_quantities:
1409            Q = self.quantities[name]
1410            #Store the centroid values at time t^n
1411            QC[:,i] = copy.copy(Q.centroid_values)
1412            QF[:,i] = copy.copy(Q.explicit_update)
1413            #Update conserved quantities
1414            Q.update(self.timestep)
1415            i+=1
1416           
1417        #Update vertex and edge values
1418        self.distribute_to_vertices_and_edges()
1419        #Update boundary values
1420        self.update_boundary()
1421       
1422        self.compute_fluxes()
1423        self.update_timestep(yieldstep, finaltime)
1424        #Compute forcing terms
1425        #NOT NEEDED FOR 2ND ORDER STRANG SPLIITING
1426        #self.compute_forcing_terms()
1427
1428        i = 0
1429        for name in self.conserved_quantities:
1430            Q = self.quantities[name]
1431            Q.centroid_values = QC[:,i]
1432            Q.explicit_update = 0.5*(Q.explicit_update+QF[:,i]) 
1433            #Update conserved quantities
1434            Q.update(self.timestep)
1435            i+=1
1436       
1437        #Update vertex and edge values
1438        self.distribute_to_vertices_and_edges()
1439        #Update boundary values
1440        self.update_boundary()
1441
1442    def  solve_homogenous_second_order_harten(self,yieldstep,finaltime):
1443        """Use Harten Second order timestepping to update
1444        conserved quantities
1445
1446        q^{n+1/2} = q^{n}+0.5*dt*F^{n}
1447        q^{n+1} = q^{n}+dt*F^{n+1/2}
1448        """
1449        import copy
1450        from Numeric import zeros, Float       
1451
1452        N = self.number_of_elements
1453
1454        self.compute_fluxes()
1455        #Update timestep to fit yieldstep and finaltime
1456        self.update_timestep(yieldstep, finaltime)
1457        #Compute forcing terms
1458        #NOT NEEDED FOR 2ND ORDER STRANG SPLIITING
1459        #ADDING THIS WILL NEED TO REMOVE ZEROING IN COMPUTE_FORCING
1460        #self.compute_forcing_terms()
1461
1462        QC = zeros((N,len(self.conserved_quantities)),Float)
1463
1464        i = 0
1465        for name in self.conserved_quantities:
1466            Q = self.quantities[name]
1467            #Store the centroid values at time t^n
1468            QC[:,i] = copy.copy(Q.centroid_values)
1469            #Update conserved quantities
1470            Q.update(0.5*self.timestep)
1471            i+=1
1472           
1473        #Update vertex and edge values
1474        self.distribute_to_vertices_and_edges()
1475        #Update boundary values
1476        self.update_boundary()
1477       
1478        self.compute_fluxes()
1479        self.update_timestep(yieldstep, finaltime)
1480        #Compute forcing terms
1481        #NOT NEEDED FOR 2ND ORDER STRANG SPLIITING
1482        #self.compute_forcing_terms()
1483
1484        i = 0
1485        for name in self.conserved_quantities:
1486            Q = self.quantities[name]
1487            Q.centroid_values = QC[:,i] 
1488            #Update conserved quantities
1489            Q.update(self.timestep)
1490            i+=1
1491       
1492        #Update vertex and edge values
1493        self.distribute_to_vertices_and_edges()
1494        #Update boundary values
1495        self.update_boundary()
1496       
1497    def distribute_to_vertices_and_edges(self):
1498        """Extrapolate conserved quantities from centroid to
1499        vertices and edge-midpoints for each volume
1500
1501        Default implementation is straight first order,
1502        i.e. constant values throughout each element and
1503        no reference to non-conserved quantities.
1504        """
1505
1506        for name in self.conserved_quantities:
1507            Q = self.quantities[name]
1508            if self.order == 1:
1509                Q.extrapolate_first_order()
1510            elif self.order == 2:
1511                Q.extrapolate_second_order()
1512                #Q.limit()
1513            else:
1514                raise 'Unknown order'
1515
1516
1517##    def update_ghosts(self):
1518##        pass
1519##        """
1520##        n = self.number_of_elements
1521##        from sf_parameters import uh_upstream, h_downstream, cell_len       
1522##        w_C = self.quantities['stage'].centroid_values
1523##        p_C = self.quantities['xmomentum'].centroid_values
1524##        z_C = self.quantities['elevation'].centroid_values
1525##        h_C = self.quantities['height'].centroid_values
1526##        u_C = self.quantities['velocity'].centroid_values
1527##       
1528##        w_C[n-1] = w_C[n-2] = h_downstream
1529##        p_C[n-1] = p_C[n-2] = p_C[n-3]
1530##        z_C[n-1] = z_C[n-2] = 0.0
1531##        h_C[n-1] = h_C[n-2] = h_downstream
1532##        u_C[n-1] = u_C[n-2] = u_C[n-3]
1533##
1534##        #w_C[1] = w_C[0] = h_downstream + uh_upstream/cell_len*0.001#self.timestep
1535##        #p_C[1] = p_C[0] = p_C[2]
1536##        #z_C[1] = z_C[0] = 0.0
1537##        #h_C[1] = h_C[0] = h_downstream + uh_upstream/cell_len*0.001#self.timestep
1538##        #u_C[1] = u_C[0] = u_C[2]
1539##        """
1540##       
1541##   
1542##    def update_boundary(self):
1543##        #Go through list of boundary objects and update boundary values for all conserved quantities on boundary.
1544##       
1545##        for i, ((vol_id, vertex_id), B) in enumerate(self.boundary_objects):
1546##            q = B.evaluate(vol_id, vertex_id)
1547##            for j, name in enumerate(self.evolved_quantities):
1548##                #print 'name %s j = %f \n'%(name,j)
1549##                Q = self.quantities[name]
1550##                Q.boundary_values[i] = q[j]
1551##       
1552##        """
1553##        ##!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!       
1554##        for j, name in enumerate(self.evolved_quantities):
1555##            BV = self.quantities[name].boundary_values
1556##            VV = self.quantities[name].vertex_values
1557##            n = len(VV)
1558##            VV[0,0] = BV[0]
1559##            VV[n-1,1] = BV[1]   
1560##       
1561##        #Below is for fixing the ghost cell.
1562##        from sf_parameters import uh_upstream, h_downstream, cell_len
1563##        n = self.number_of_elements
1564##        w_C = self.quantities['stage'].centroid_values
1565##        p_C = self.quantities['xmomentum'].centroid_values
1566##        z_C = self.quantities['elevation'].centroid_values
1567##        h_C = self.quantities['height'].centroid_values
1568##        u_C = self.quantities['velocity'].centroid_values
1569##
1570##        w_V = self.quantities['stage'].vertex_values
1571##        p_V = self.quantities['xmomentum'].vertex_values
1572##        z_V = self.quantities['elevation'].vertex_values
1573##        h_V = self.quantities['height'].vertex_values
1574##        u_V = self.quantities['velocity'].vertex_values
1575##        w_V[n-1,0] = w_V[n-1,1] = w_V[n-2,0] = w_V[n-2,1] = h_downstream
1576##        p_V[n-1,0] = p_V[n-1,1] = p_V[n-2,0] = p_V[n-2,1] = p_C[n-3]       
1577##        z_V[n-1,0] = z_V[n-1,1] = z_V[n-2,0] = z_V[n-2,1] = 0.0
1578##        h_V[n-1,0] = h_V[n-1,1] = h_V[n-2,0] = h_V[n-2,1] = h_downstream
1579##        u_V[n-1,0] = u_V[n-1,1] = u_V[n-2,0] = u_V[n-2,1] = u_C[n-3]
1580##
1581##        #w_V[1,0] = w_V[1,1] = w_V[0,0] = w_V[0,1] = h_downstream + uh_upstream/cell_len*self.timestep
1582##        #p_V[1,0] = p_V[1,1] = p_V[0,0] = p_V[0,1] = p_C[2]       
1583##        #z_V[1,0] = z_V[1,1] = z_V[0,0] = z_V[0,1] = 0.0
1584##        #h_V[1,0] = h_V[1,1] = h_V[0,0] = h_V[0,1] = h_downstream + uh_upstream/cell_len*self.timestep
1585##        #u_V[1,0] = u_V[1,1] = u_V[0,0] = u_V[0,1] = u_C[2]
1586##        """
1587
1588    def update_ghosts(self):
1589        #pass
1590       
1591        n = len(self.quantities['stage'].vertex_values)
1592        from parameters import bed_slope, cell_len       
1593
1594        self.quantities['stage'].centroid_values[n-1]     = self.quantities['stage'].boundary_values[1] - 0.5*bed_slope*cell_len
1595        self.quantities['xmomentum'].centroid_values[n-1] = self.quantities['xmomentum'].boundary_values[1]
1596        self.quantities['elevation'].centroid_values[n-1] = self.quantities['elevation'].boundary_values[1] - 0.5*bed_slope*cell_len
1597        self.quantities['height'].centroid_values[n-1]    = self.quantities['height'].boundary_values[1]
1598        self.quantities['velocity'].centroid_values[n-1]  = self.quantities['velocity'].boundary_values[1]
1599        #Below is additional condition, after meeting Steve, to make smooth dry bed.
1600        self.quantities['stage'].centroid_values[0]     = self.quantities['stage'].boundary_values[0] + 0.5*bed_slope*cell_len
1601        self.quantities['xmomentum'].centroid_values[0] = self.quantities['xmomentum'].boundary_values[0]
1602        self.quantities['elevation'].centroid_values[0] = self.quantities['elevation'].boundary_values[0] + 0.5*bed_slope*cell_len
1603        self.quantities['height'].centroid_values[0]    = self.quantities['height'].boundary_values[0]
1604        self.quantities['velocity'].centroid_values[0]  = self.quantities['velocity'].boundary_values[0]       
1605       
1606   
1607    def update_boundary(self):
1608        """Go through list of boundary objects and update boundary values
1609        for all conserved quantities on boundary.
1610        """       
1611        for i, ((vol_id, vertex_id), B) in enumerate(self.boundary_objects):
1612            q = B.evaluate(vol_id, vertex_id)
1613            for j, name in enumerate(self.evolved_quantities):
1614                #print 'name %s j = %f \n'%(name,j)
1615                Q = self.quantities[name]
1616                Q.boundary_values[i] = q[j]
1617       
1618        ##!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!       
1619        for j, name in enumerate(self.evolved_quantities):
1620            BV = self.quantities[name].boundary_values
1621            VV = self.quantities[name].vertex_values
1622            n = len(VV)
1623            VV[0,0] = BV[0]
1624            VV[n-1,1] = BV[1]
1625
1626       
1627        #Below is for fixing the ghost cell.
1628        from parameters import bed_slope, cell_len
1629        n = len(self.quantities['stage'].vertex_values)
1630        self.quantities['stage'].vertex_values[n-1,0]     = self.quantities['stage'].boundary_values[1] - bed_slope*cell_len
1631        self.quantities['xmomentum'].vertex_values[n-1,0] = self.quantities['xmomentum'].boundary_values[1]
1632        self.quantities['elevation'].vertex_values[n-1,0] = self.quantities['elevation'].boundary_values[1] - bed_slope*cell_len
1633        self.quantities['height'].vertex_values[n-1,0]    = self.quantities['height'].boundary_values[1]
1634        self.quantities['velocity'].vertex_values[n-1,0]  = self.quantities['velocity'].boundary_values[1]
1635        #Below is additional condition, after meeting Steve, to make smooth dry bed.
1636        self.quantities['stage'].vertex_values[0,1]     = self.quantities['stage'].boundary_values[0] + bed_slope*cell_len
1637        self.quantities['xmomentum'].vertex_values[0,1] = self.quantities['xmomentum'].boundary_values[0]
1638        self.quantities['elevation'].vertex_values[0,1] = self.quantities['elevation'].boundary_values[0] + bed_slope*cell_len
1639        self.quantities['height'].vertex_values[0,1]    = self.quantities['height'].boundary_values[0]
1640        self.quantities['velocity'].vertex_values[0,1]  = self.quantities['velocity'].boundary_values[0]
1641       
1642        """
1643        ################
1644        n = len(self.quantities['stage'].vertex_values)
1645        self.quantities['stage'].vertex_values[n-1,0]     = self.quantities['stage'].centroid_values[n-1]
1646        self.quantities['xmomentum'].vertex_values[n-1,0] = self.quantities['xmomentum'].centroid_values[n-1]
1647        self.quantities['elevation'].vertex_values[n-1,0] = self.quantities['elevation'].centroid_values[n-1]
1648        self.quantities['height'].vertex_values[n-1,0]    = self.quantities['height'].centroid_values[n-1]
1649        self.quantities['velocity'].vertex_values[n-1,0]  = self.quantities['velocity'].centroid_values[n-1]
1650
1651        self.quantities['stage'].vertex_values[n-1,1]     = self.quantities['stage'].centroid_values[n-1]
1652        self.quantities['xmomentum'].vertex_values[n-1,1] = self.quantities['xmomentum'].centroid_values[n-1]
1653        self.quantities['elevation'].vertex_values[n-1,1] = self.quantities['elevation'].centroid_values[n-1]
1654        self.quantities['height'].vertex_values[n-1,1]    = self.quantities['height'].centroid_values[n-1]
1655        self.quantities['velocity'].vertex_values[n-1,1]  = self.quantities['velocity'].centroid_values[n-1]
1656        """
1657       
1658    def update_timestep(self, yieldstep, finaltime):
1659
1660        from config import min_timestep, max_timestep
1661
1662        # self.timestep is calculated from speed of characteristics
1663        # Apply CFL condition here
1664        timestep = min(self.CFL*self.flux_timestep, max_timestep)
1665
1666        #Record maximal and minimal values of timestep for reporting
1667        self.max_timestep = max(timestep, self.max_timestep)
1668        self.min_timestep = min(timestep, self.min_timestep)
1669
1670        #Protect against degenerate time steps
1671        if timestep < min_timestep:
1672
1673            #Number of consecutive small steps taken b4 taking action
1674            self.smallsteps += 1
1675
1676            if self.smallsteps > self.max_smallsteps:
1677                self.smallsteps = 0 #Reset
1678
1679                if self.order == 1:
1680                    msg = 'WARNING: Too small timestep %.16f reached '\
1681                          %timestep
1682                    msg += 'even after %d steps of 1 order scheme'\
1683                           %self.max_smallsteps
1684                    print msg
1685                    timestep = min_timestep  #Try enforcing min_step
1686
1687                    #raise msg
1688                else:
1689                    #Try to overcome situation by switching to 1 order
1690                    print "changing Order 1"
1691                    self.order = 1
1692
1693        else:
1694            self.smallsteps = 0
1695            if self.order == 1 and self.default_order == 2:
1696                self.order = 2
1697
1698
1699        #Ensure that final time is not exceeded
1700        if finaltime is not None and self.time + timestep > finaltime:
1701            timestep = finaltime-self.time
1702
1703        #Ensure that model time is aligned with yieldsteps
1704        if self.yieldtime + timestep > yieldstep:
1705            timestep = yieldstep-self.yieldtime
1706
1707        self.timestep = timestep
1708
1709    def update_extrema(self):
1710        pass
1711
1712    def compute_forcing_terms(self):
1713        """If there are any forcing functions driving the system
1714        they should be defined in Domain subclass and appended to
1715        the list self.forcing_terms
1716        """
1717        #Clears explicit_update needed for second order method
1718        if self.time_order == 2:
1719            for name in self.conserved_quantities:
1720                Q = self.quantities[name]
1721                Q.explicit_update[:] = 0.0
1722
1723        for f in self.forcing_terms:
1724            f(self)
1725           
1726
1727    def update_derived_quantites(self):
1728        pass
1729   
1730    #def update_conserved_quantities(self):
1731    def update_conserved_quantities(self):
1732        """Update vectors of conserved quantities using previously
1733        computed fluxes specified forcing functions.
1734        """
1735
1736        from numpy import ones, sum, equal       
1737
1738        N = self.number_of_elements
1739        d = len(self.conserved_quantities)
1740
1741        timestep = self.timestep
1742
1743        #Compute forcing terms
1744        self.compute_forcing_terms()
1745
1746        #Compute the evolved entropy after one timestep, Sudi 16 Aug 2010
1747        #Note that entropy_centroid_values is INITIALLY defined in
1748        #compute_fluxes or compute_fluxes_quantity_and_entropy function
1749        self.entropy_centroid_values += timestep*self.entropy_explicit_update
1750
1751        #Update conserved_quantities
1752        for name in self.conserved_quantities:
1753            Q = self.quantities[name]
1754            Q.update(timestep)
1755
1756
1757
1758if __name__ == "__main__":
1759
1760    points1 = [0.0, 1.0, 2.0, 3.0]
1761    D1 = Domain(points1)
1762
1763    print D1.get_coordinate(0)
1764    print D1.get_coordinate(0,1)
1765    print 'Number of Elements = ',D1.number_of_elements
1766
1767    try:
1768        print D1.get_coordinate(3)
1769    except:
1770        pass
1771    else:
1772        msg =  'Should have raised an out of bounds exception'
1773        raise msg
Note: See TracBrowser for help on using the repository browser.