source: inundation/parallel/run_parallel_sw_merimbula_metis.py @ 2131

Last change on this file since 2131 was 2130, checked in by linda, 19 years ago

Modified the parallel code to agree with the python style files

  • Property svn:executable set to *
File size: 6.1 KB
Line 
1#!/usr/bin/env python
2###
3# Same as run_parallel_sw_merimbula.py, but uses pmesh_divide_metis
4# to partition the mesh.
5#########################################################
6#
7#  Main file for parallel mesh testing.
8#
9#  This is a modification of the run_parallel_advection.py
10# file.
11#
12#  *) The test files currently avaliable are of the form
13# test*.out, eg test_5l_4c.out. The term infront of the l
14# corresponds to the number of levels of refinement
15# required to build the grid, i.e. a higher number
16# corresponds to a finer grid. The term infront of the c
17# corresponds to the number of processors.
18#
19# *) The (new) files that have been added to manage the
20# grid partitioning are
21#    +) pmesh_divide.py: subdivide a pmesh
22#    +) build_submesh.py: build the submeshes on the host
23# processor.
24#    +) build_local.py: build the GA mesh datastructure
25# on each processor.
26#    +) build_commun.py: handle the communication between
27# the host and processors
28#
29# *) Things still to do:
30#    +) Overlap the communication and computation: The
31# communication routines in build_commun.py should be
32# interdispersed in the build_submesh.py and build_local.py
33# files. This will overlap the communication and
34# computation and will be far more efficient. This should
35# be done after more testing and there more confidence in
36# the subpartioning.
37#    +) Much more testing especially with large numbers of
38# processors.
39#  Authors: Linda Stals, Steve Roberts and Matthew Hardy,
40# June 2005
41#
42#
43#
44#########################################################
45import sys
46import pypar    # The Python-MPI interface
47import time
48
49
50from os import sep
51sys.path.append('..'+sep+'pyvolution')
52
53from Numeric import array
54# pmesh
55
56#from shallow_water import Domain
57
58from shallow_water import Domain
59from parallel_shallow_water import Parallel_Domain
60
61# mesh partition routines
62
63from pmesh_divide import pmesh_divide_metis
64from build_submesh import *
65from build_local import *
66from build_commun import *
67from pmesh2domain import pmesh_to_domain_instance
68
69# read in the processor information
70
71numprocs = pypar.size()
72myid = pypar.rank()
73processor_name = pypar.Get_processor_name()
74
75#-------
76# Domain
77rect = zeros( 4, Float) # Buffer for results
78
79class Set_Stage:
80    """Set an initial condition with constant water height, for x<x0
81    """
82
83    def __init__(self, x0=0.25, x1=0.5, h=1.0):
84        self.x0 = x0
85        self.x1 = x1
86        self.= h
87
88    def __call__(self, x, y):
89        return self.h*((x>self.x0)&(x<self.x1))
90
91
92if myid == 0:
93
94    # read in the test files
95
96#    filename = 'test-100.tsh'
97    filename = 'merimbula_10785_1.tsh'
98
99    domain_full = pmesh_to_domain_instance(filename, Domain)
100
101#    domain_full.set_quantity('stage', Set_Stage(200.0,300.0,1.0))
102    domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,2.0))
103
104    # Note the different arguments compared with pmesh_divide,
105    # pmesh_divide_steve etc.
106   
107    nodes, triangles, boundary, triangles_per_proc, quantities = \
108         pmesh_divide_metis(domain_full, numprocs)
109
110    rect = array(domain_full.xy_extent, Float)
111
112    submesh = build_submesh(nodes, triangles, boundary,\
113                            quantities, triangles_per_proc)
114
115    # send the mesh partition to the appropriate processor
116
117    for p in range(1, numprocs):
118      send_submesh(submesh, triangles_per_proc, p)
119
120    hostmesh = extract_hostmesh(submesh)
121    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \
122             build_local_mesh(hostmesh, 0, triangles_per_proc[0], numprocs)
123
124# read in the mesh partition that belongs to this
125# processor (note that the information is in the
126# correct form for the GA data structure
127
128else:
129    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict \
130            = rec_submesh(0)
131
132#if myid == 0:
133#    print 'ghost'
134#    print ghost_recv_dict
135#processor_name
136#if myid == 0:
137#    print 'full'
138#    print full_send_dict
139
140
141pypar.broadcast(rect,0)
142#print rect
143
144domain = Parallel_Domain(points, vertices, boundary,
145                         full_send_dict  = full_send_dict,
146                         ghost_recv_dict = ghost_recv_dict)
147
148
149#try:
150#    domain.initialise_visualiser(rect=rect)
151#    #domain.visualiser.coloring['stage'] = True
152#    domain.visualiser.scale_z['stage'] = 0.2
153#    domain.visualiser.scale_z['elevation'] = 0.05
154#except:
155#    print 'No visualiser'
156
157
158domain.default_order = 1
159
160#Boundaries
161from parallel_shallow_water import Transmissive_boundary, Reflective_boundary
162
163T = Transmissive_boundary(domain)
164R = Reflective_boundary(domain)
165domain.set_boundary( {'outflow': R, 'inflow': R, 'inner':R, 'exterior': R, 'open':R} )
166
167
168domain.set_quantity('stage', quantities['stage'])
169domain.set_quantity('elevation', quantities['elevation'])
170
171#domain.store = True
172#domain.filename = 'merimbula-%d' %domain.processor
173
174#---------
175# Evolution
176t0 = time.time()
177
178print 'Processor %d on %s: No of elements %d'%(domain.processor,processor_name,domain.number_of_elements)
179yieldstep = 0.05
180finaltime = 500.0
181
182yieldstep = 1
183finaltime = 30
184
185#yieldstep = 1
186#finaltime = 1
187#processor_name
188#for t in domain.evolve(yieldstep = yieldstep, finaltime = finaltime):
189#    if myid == 0:
190#        domain.write_time()
191        #print 'Processor %d, Integral of stage %d'%\
192        #       (domain.processor,domain.quantities['stage'].get_integral())
193
194# Profiling
195import profile
196profiler = profile.Profile()
197s = '''for t in domain.evolve(yieldstep = yieldstep, finaltime = finaltime):
198  if myid == 0:
199    domain.write_time()
200'''
201result = profiler.runctx(s, globals(), locals())
202result.dump_stats("profile." + str(numprocs) + "." + str(myid) + ".dat")
203
204#print 'P%d: That took %.2f seconds' %(myid, time.time()-t0)
205#print 'P%d: Communication time %.2f seconds' %(myid, domain.communication_time)
206#print 'P%d: Reduction Communication time %.2f seconds' %(myid, domain.communication_reduce_time)
207#print 'P%d: Broadcast time %.2f seconds' %(myid, domain.communication_broadcast_time)
208
209
210
211if myid == 0:
212    print 'That took %.2f seconds' %(time.time()-t0)
213    print 'Communication time %.2f seconds'%domain.communication_time
214    print 'Reduction Communication time %.2f seconds'%domain.communication_reduce_time
215    print 'Broadcast time %.2f seconds'%domain.communication_broadcast_time
Note: See TracBrowser for help on using the repository browser.