source: inundation/parallel/run_parallel_sw_merimbula.py @ 2130

Last change on this file since 2130 was 2130, checked in by linda, 19 years ago

Modified the parallel code to agree with the python style files

File size: 6.2 KB
Line 
1#!/usr/bin/env python
2#########################################################
3#
4#  Main file for parallel mesh testing.
5#
6#  This is a modification of the run_parallel_advection.py
7# file.
8#
9#  *) The test files currently avaliable are of the form
10# test*.out, eg test_5l_4c.out. The term infront of the l
11# corresponds to the number of levels of refinement
12# required to build the grid, i.e. a higher number
13# corresponds to a finer grid. The term infront of the c
14# corresponds to the number of processors.
15#
16# *) The (new) files that have been added to manage the
17# grid partitioning are
18#    +) pmesh_divide.py: subdivide a pmesh
19#    +) build_submesh.py: build the submeshes on the host
20# processor.
21#    +) build_local.py: build the GA mesh datastructure
22# on each processor.
23#    +) build_commun.py: handle the communication between
24# the host and processors
25#
26# *) Things still to do:
27#    +) Overlap the communication and computation: The
28# communication routines in build_commun.py should be
29# interdispersed in the build_submesh.py and build_local.py
30# files. This will overlap the communication and
31# computation and will be far more efficient. This should
32# be done after more testing and there more confidence in
33# the subpartioning.
34#    +) Much more testing especially with large numbers of
35# processors.
36#  Authors: Linda Stals, Steve Roberts and Matthew Hardy,
37# June 2005
38#
39#
40#
41#########################################################
42import sys
43import pypar    # The Python-MPI interface
44import time
45
46
47from os import sep
48sys.path.append('..'+sep+'pyvolution')
49
50from Numeric import array
51# pmesh
52
53#from shallow_water import Domain
54
55from shallow_water import Domain
56from parallel_shallow_water import Parallel_Domain
57
58# mesh partition routines
59
60from pmesh_divide import pmesh_divide, pmesh_divide_steve
61from build_submesh import *
62from build_local import *
63from build_commun import *
64from pmesh2domain import pmesh_to_domain_instance
65
66# read in the processor information
67
68numprocs = pypar.size()
69myid = pypar.rank()
70processor_name = pypar.Get_processor_name()
71
72#-------
73# Domain
74rect = zeros( 4, Float) # Buffer for results
75
76class Set_Stage:
77    """Set an initial condition with constant water height, for x<x0
78    """
79
80    def __init__(self, x0=0.25, x1=0.5, h=1.0):
81        self.x0 = x0
82        self.x1 = x1
83        self.= h
84
85    def __call__(self, x, y):
86        return self.h*((x>self.x0)&(x<self.x1))
87
88
89if myid == 0:
90
91    # read in the test files
92
93    filename = 'test-100.tsh'
94#    filename = 'merimbula_10785_1.tsh'
95    nx = numprocs
96    ny = 1
97    if nx*ny != numprocs:
98        print "WARNING: number of subboxes is not equal to the number of proc"
99
100    domain_full = pmesh_to_domain_instance(filename, Domain)
101
102    domain_full.set_quantity('stage', Set_Stage(200.0,300.0,1.0))
103#    domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,2.0))
104
105    nodes, triangles, boundary, triangles_per_proc, quantities = \
106         pmesh_divide_steve(domain_full, nx, ny)
107
108    rect = array(domain_full.xy_extent, Float)
109
110    submesh = build_submesh(nodes, triangles, boundary,\
111                            quantities, triangles_per_proc)
112   
113    # send the mesh partition to the appropriate processor
114
115    for p in range(1, numprocs):
116      send_submesh(submesh, triangles_per_proc, p)
117
118    hostmesh = extract_hostmesh(submesh)
119   
120    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \
121             build_local_mesh(hostmesh, 0, triangles_per_proc[0], numprocs)
122
123# read in the mesh partition that belongs to this
124# processor (note that the information is in the
125# correct form for the GA data structure
126
127else:
128    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict \
129            = rec_submesh(0)
130
131
132##    ########### start profile testing
133##    import profile, pstats
134   
135##    # Create a profiler instance.
136##    pobject = profile.Profile()
137   
138   
139##    # Profile the test function and print the statistics.
140##    s = 'points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict \
141##            = rec_submesh(0)'
142
143##    presult = pobject.runctx(s,globals(),locals())
144##    presult.dump_stats("/tmp/profile.dat")
145   
146##    stats = pstats.Stats("/tmp/profile.dat")
147##    print stats.sort_stats('time').print_stats(40)
148
149######## end profile testing
150   
151   
152pypar.broadcast(rect,0)
153#print rect
154
155domain = Parallel_Domain(points, vertices, boundary,
156                         full_send_dict  = full_send_dict,
157                         ghost_recv_dict = ghost_recv_dict)
158
159
160try:
161    domain.initialise_visualiser(rect=rect)
162    #domain.visualiser.coloring['stage'] = True
163    domain.visualiser.scale_z['stage'] = 0.2
164    domain.visualiser.scale_z['elevation'] = 0.05
165except:
166    print 'No visualiser'
167
168
169domain.default_order = 1
170
171#Boundaries
172from parallel_shallow_water import Transmissive_boundary, Reflective_boundary
173
174T = Transmissive_boundary(domain)
175R = Reflective_boundary(domain)
176domain.set_boundary( {'outflow': R, 'inflow': R, 'inner':R, 'exterior': R, 'open':R} )
177
178
179domain.set_quantity('stage', quantities['stage'])
180domain.set_quantity('elevation', quantities['elevation'])
181
182#domain.store = True
183#domain.filename = 'merimbula-%d' %domain.processor
184
185#---------
186# Evolution
187t0 = time.time()
188
189print 'Processor %d on %s: No of elements %d'%(domain.processor,processor_name,domain.number_of_elements)
190yieldstep = 0.05
191finaltime = 500.0
192
193yieldstep = 1
194finaltime = 30
195#processor_name
196for t in domain.evolve(yieldstep = yieldstep, finaltime = finaltime):
197    if myid == 0:
198        domain.write_time()
199        #print 'Processor %d, Integral of stage %d'%\
200        #       (domain.processor,domain.quantities['stage'].get_integral())
201
202
203#print 'P%d: That took %.2f seconds' %(myid, time.time()-t0)
204#print 'P%d: Communication time %.2f seconds' %(myid, domain.communication_time)
205#print 'P%d: Reduction Communication time %.2f seconds' %(myid, domain.communication_reduce_time)
206#print 'P%d: Broadcast time %.2f seconds' %(myid, domain.communication_broadcast_time)
207
208
209
210if myid == 0:
211    print 'That took %.2f seconds' %(time.time()-t0)
212    print 'Communication time %.2f seconds'%domain.communication_time
213    print 'Reduction Communication time %.2f seconds'%domain.communication_reduce_time
214    print 'Broadcast time %.2f seconds'%domain.communication_broadcast_time
Note: See TracBrowser for help on using the repository browser.