source: inundation/parallel/documentation/code/RunParallelSwMerimbulaMetis.py @ 2950

Last change on this file since 2950 was 2909, checked in by linda, 19 years ago

The parallel listed in the documentation is stored in the documentation/code directory

  • Property svn:executable set to *
File size: 5.0 KB
Line 
1#!/usr/bin/env python
2###
3#########################################################
4#
5#  Main file for parallel mesh testing. Runs a shallow
6# water simulation using the merimbula mesh
7#
8#
9#
10# *) The (new) files that have been added to manage the
11# grid partitioning are
12#    +) pmesh_divide_metis.py: subdivide a pmesh
13#    +) build_submesh.py: build the submeshes on the host
14# processor.
15#    +) build_local.py: build the GA mesh datastructure
16# on each processor.
17#    +) build_commun.py: handle the communication between
18# the host and processors
19#
20#  Authors: Linda Stals, Steve Roberts and Matthew Hardy,
21# June 2005
22#
23#
24#
25#########################################################
26import sys
27import pypar    # The Python-MPI interface
28import time
29
30from os import sep
31sys.path.append('..'+sep+'pyvolution')
32sys.path.append('..'+sep+'parallel')
33
34# Numeric arrays
35
36from Numeric import array, zeros, Float
37
38# pmesh
39
40from shallow_water import Domain
41from parallel_shallow_water import Parallel_Domain
42from pmesh2domain import pmesh_to_domain_instance
43
44# Mesh partition routines
45
46from pmesh_divide import pmesh_divide_metis
47from build_submesh import build_submesh
48from build_local   import build_local_mesh
49from build_commun  import send_submesh, rec_submesh, extract_hostmesh
50
51###############################
52# Read in processor information
53###############################
54
55numprocs = pypar.size()
56myid = pypar.rank()
57processor_name = pypar.Get_processor_name()
58
59############################
60# Set the initial conditions
61############################
62
63rect = zeros( 4, Float) # Buffer for results
64
65class Set_Stage:
66    """Set an initial condition with constant water height, for x<x0
67    """
68
69    def __init__(self, x0=0.25, x1=0.5, h=1.0):
70        self.x0 = x0
71        self.x1 = x1
72        self.= h
73
74    def __call__(self, x, y):
75        return self.h*((x>self.x0)&(x<self.x1))
76
77#######################
78# Partition the mesh
79#######################
80
81if myid == 0:
82
83    # Read in the test files
84
85    filename = 'merimbula_10785_1.tsh'
86
87    # Build the whole mesh
88   
89    mesh_full = pmesh_to_domain_instance(filename, Domain)
90
91    # Define the domain boundaries for visualisation
92
93    rect = array(mesh_full.xy_extent, Float)
94
95    # Initialise the wave
96
97    mesh_full.set_quantity('stage', Set_Stage(756000.0,756500.0,2.0))
98
99    # Subdivide the mesh
100   
101    nodes, triangles, boundary, triangles_per_proc, quantities = \
102         pmesh_divide_metis(mesh_full, numprocs)
103
104    # Build the mesh that should be assigned to each processor,
105    # this includes ghost nodes and the communicaiton pattern
106   
107    submesh = build_submesh(nodes, triangles, boundary,\
108                            quantities, triangles_per_proc)
109
110    # Send the mesh partition to the appropriate processor
111
112    for p in range(1, numprocs):
113      send_submesh(submesh, triangles_per_proc, p)
114
115    # Build the local mesh for processor 0
116   
117    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \
118              extract_hostmesh(submesh, triangles_per_proc)
119
120else:
121   
122    # Read in the mesh partition that belongs to this
123    # processor (note that the information is in the
124    # correct form for the GA data structure)
125
126    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict \
127            = rec_submesh(0)
128
129
130###########################################
131# Start the computations on each subpartion
132###########################################
133
134# The visualiser needs to know the size of the whole domain
135
136pypar.broadcast(rect,0)
137
138# Build the domain for this processor
139
140domain = Parallel_Domain(points, vertices, boundary,
141                         full_send_dict  = full_send_dict,
142                         ghost_recv_dict = ghost_recv_dict)
143
144# Visualise the domain
145
146try:
147    domain.initialise_visualiser(rect=rect)
148    domain.visualiser.scale_z['stage'] = 0.2
149    domain.visualiser.scale_z['elevation'] = 0.05
150except:
151    print 'No visualiser'
152
153
154domain.default_order = 1
155
156# Define the boundaries, including the ghost boundary
157
158from parallel_shallow_water import Transmissive_boundary, Reflective_boundary
159
160T = Transmissive_boundary(domain)
161R = Reflective_boundary(domain)
162domain.set_boundary( {'outflow': R, 'inflow': R, 'inner':R, 'exterior': R, \
163                      'open':R, 'ghost':None} )
164
165
166# Set the initial quantities
167
168domain.set_quantity('stage', quantities['stage'])
169domain.set_quantity('elevation', quantities['elevation'])
170
171domain.store = False
172
173# Set the number of time steps, as well as the start and end time
174
175t0 = time.time()
176yieldstep = 1
177finaltime = 90
178
179
180# Start the evolve calculations
181
182for t in domain.evolve(yieldstep = yieldstep, finaltime = finaltime):
183    if myid == 0:
184        domain.write_time()
185
186# Print some timing statistics
187
188if myid == 0:
189    print 'That took %.2f seconds' %(time.time()-t0)
190    print 'Communication time %.2f seconds'%domain.communication_time
191    print 'Reduction Communication time %.2f seconds'\
192          %domain.communication_reduce_time
193    print 'Broadcast time %.2f seconds'\
194          %domain.communication_broadcast_time
Note: See TracBrowser for help on using the repository browser.