source: inundation/parallel/documentation/code/RunParallelSwMerimbulaMetis.py @ 3315

Last change on this file since 3315 was 3185, checked in by linda, 19 years ago

Updated shallow water equation to broadcast timestep after flux
calculation

  • Property svn:executable set to *
File size: 5.1 KB
Line 
1#!/usr/bin/env python
2###
3#########################################################
4#
5#  Main file for parallel mesh testing. Runs a shallow
6# water simulation using the merimbula mesh
7#
8#
9#
10# *) The (new) files that have been added to manage the
11# grid partitioning are
12#    +) pmesh_divide_metis.py: subdivide a pmesh
13#    +) build_submesh.py: build the submeshes on the host
14# processor.
15#    +) build_local.py: build the GA mesh datastructure
16# on each processor.
17#    +) build_commun.py: handle the communication between
18# the host and processors
19#
20#  Authors: Linda Stals, Steve Roberts and Matthew Hardy,
21# June 2005
22#
23#
24#
25#########################################################
26import sys
27from pypar_dist import pypar   # The Python-MPI interface
28import time
29
30from os import sep
31
32# ADD directory ..../anuga/inundation to PYTHONPATH instead
33#sys.path.append('..'+sep+'pyvolution')
34#sys.path.append('..'+sep+'parallel')
35
36# Numeric arrays
37
38from Numeric import array, zeros, Float
39
40# pmesh
41
42from pyvolution.shallow_water import Domain
43from parallel.parallel_shallow_water import Parallel_Domain
44from pyvolution.pmesh2domain import pmesh_to_domain_instance
45
46# Mesh partition routines
47
48from parallel.pmesh_divide import pmesh_divide_metis
49from parallel.build_submesh import build_submesh
50from parallel.build_local   import build_local_mesh
51from parallel.build_commun  import send_submesh, rec_submesh, extract_hostmesh
52
53###############################
54# Read in processor information
55###############################
56
57numprocs = pypar.size()
58myid = pypar.rank()
59processor_name = pypar.Get_processor_name()
60
61############################
62# Set the initial conditions
63############################
64
65rect = zeros( 4, Float) # Buffer for results
66
67class Set_Stage:
68    """Set an initial condition with constant water height, for x<x0
69    """
70
71    def __init__(self, x0=0.25, x1=0.5, h=1.0):
72        self.x0 = x0
73        self.x1 = x1
74        self.= h
75
76    def __call__(self, x, y):
77        return self.h*((x>self.x0)&(x<self.x1))
78
79#######################
80# Partition the mesh
81#######################
82
83if myid == 0:
84
85    # Read in the test files
86
87    filename = 'parallel/merimbula_10785_1.tsh'
88
89    # Build the whole mesh
90   
91    mesh_full = pmesh_to_domain_instance(filename, Domain)
92
93    # Define the domain boundaries for visualisation
94
95    rect = array(mesh_full.xy_extent, Float)
96
97    # Initialise the wave
98
99    mesh_full.set_quantity('stage', Set_Stage(756000.0,756500.0,2.0))
100
101    # Subdivide the mesh
102   
103    nodes, triangles, boundary, triangles_per_proc, quantities = \
104         pmesh_divide_metis(mesh_full, numprocs)
105
106    # Build the mesh that should be assigned to each processor,
107    # this includes ghost nodes and the communicaiton pattern
108   
109    submesh = build_submesh(nodes, triangles, boundary,\
110                            quantities, triangles_per_proc)
111
112    # Send the mesh partition to the appropriate processor
113
114    for p in range(1, numprocs):
115      send_submesh(submesh, triangles_per_proc, p)
116
117    # Build the local mesh for processor 0
118   
119    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \
120              extract_hostmesh(submesh, triangles_per_proc)
121
122else:
123   
124    # Read in the mesh partition that belongs to this
125    # processor (note that the information is in the
126    # correct form for the GA data structure)
127
128    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict \
129            = rec_submesh(0)
130
131
132###########################################
133# Start the computations on each subpartion
134###########################################
135
136# The visualiser needs to know the size of the whole domain
137
138pypar.broadcast(rect,0)
139
140# Build the domain for this processor
141
142domain = Parallel_Domain(points, vertices, boundary,
143                         full_send_dict  = full_send_dict,
144                         ghost_recv_dict = ghost_recv_dict)
145
146# Visualise the domain
147
148try:
149    domain.initialise_visualiser(rect=rect)
150    domain.visualiser.scale_z['stage'] = 0.2
151    domain.visualiser.scale_z['elevation'] = 0.05
152except:
153    print 'No visualiser'
154
155
156domain.default_order = 1
157
158# Define the boundaries, including the ghost boundary
159
160from parallel.parallel_shallow_water import Transmissive_boundary, Reflective_boundary
161
162T = Transmissive_boundary(domain)
163R = Reflective_boundary(domain)
164domain.set_boundary( {'outflow': R, 'inflow': R, 'inner':R, 'exterior': R, \
165                      'open':R, 'ghost':None} )
166
167
168# Set the initial quantities
169
170domain.set_quantity('stage', quantities['stage'])
171domain.set_quantity('elevation', quantities['elevation'])
172
173domain.store = False
174
175# Set the number of time steps, as well as the start and end time
176
177t0 = time.time()
178yieldstep = 1
179finaltime = 90
180
181
182# Start the evolve calculations
183
184for t in domain.evolve(yieldstep = yieldstep, finaltime = finaltime):
185    if myid == 0:
186        domain.write_time()
187
188# Print some timing statistics
189
190if myid == 0:
191    print 'That took %.2f seconds' %(time.time()-t0)
192    print 'Communication time %.2f seconds'%domain.communication_time
193    print 'Reduction Communication time %.2f seconds'\
194          %domain.communication_reduce_time
195    print 'Broadcast time %.2f seconds'\
196          %domain.communication_broadcast_time
Note: See TracBrowser for help on using the repository browser.