source: inundation/parallel/run_parallel_sw_merimbula_metis.py @ 2094

Last change on this file since 2094 was 2090, checked in by linda, 19 years ago

Removed pypar test files and commented pmesh_divide

  • Property svn:executable set to *
File size: 5.9 KB
Line 
1#!/usr/bin/env python
2###
3# Same as run_parallel_sw_merimbula.py, but uses pmesh_divide_metis
4# to partition the mesh.
5#########################################################
6#
7#  Main file for parallel mesh testing.
8#
9#  This is a modification of the run_parallel_advection.py
10# file.
11#
12#  *) The test files currently avaliable are of the form
13# test*.out, eg test_5l_4c.out. The term infront of the l
14# corresponds to the number of levels of refinement
15# required to build the grid, i.e. a higher number
16# corresponds to a finer grid. The term infront of the c
17# corresponds to the number of processors.
18#
19# *) The (new) files that have been added to manage the
20# grid partitioning are
21#    +) mg2ga.py: read in the test files.
22#    +) pmesh_divide.py: subdivide a pmesh
23#    +) build_submesh.py: build the submeshes on the host
24# processor.
25#    +) build_local.py: build the GA mesh datastructure
26# on each processor.
27#    +) build_commun.py: handle the communication between
28# the host and processors
29#
30# *) Things still to do:
31#    +) Overlap the communication and computation: The
32# communication routines in build_commun.py should be
33# interdispersed in the build_submesh.py and build_local.py
34# files. This will overlap the communication and
35# computation and will be far more efficient. This should
36# be done after more testing and there more confidence in
37# the subpartioning.
38#    +) Much more testing especially with large numbers of
39# processors.
40#  Authors: Linda Stals, Steve Roberts and Matthew Hardy,
41# June 2005
42#
43#
44#
45#########################################################
46import sys
47import pypar    # The Python-MPI interface
48import time
49
50
51from os import sep
52sys.path.append('..'+sep+'pyvolution')
53
54from Numeric import array
55# pmesh
56
57#from shallow_water import Domain
58
59from shallow_water import Domain
60from parallel_shallow_water import Parallel_Domain
61
62# mesh partition routines
63
64from pmesh_divide import pmesh_divide_metis
65from build_submesh import *
66from build_local import *
67from build_commun import *
68from pmesh2domain import pmesh_to_domain_instance
69
70# read in the processor information
71
72numprocs = pypar.size()
73myid = pypar.rank()
74processor_name = pypar.Get_processor_name()
75
76#-------
77# Domain
78rect = zeros( 4, Float) # Buffer for results
79
80class Set_Stage:
81    """Set an initial condition with constant water height, for x<x0
82    """
83
84    def __init__(self, x0=0.25, x1=0.5, h=1.0):
85        self.x0 = x0
86        self.x1 = x1
87        self.= h
88
89    def __call__(self, x, y):
90        return self.h*((x>self.x0)&(x<self.x1))
91
92
93if myid == 0:
94
95    # read in the test files
96
97#    filename = 'test-100.tsh'
98    filename = 'merimbula_10785_1.tsh'
99
100    domain_full = pmesh_to_domain_instance(filename, Domain)
101
102#    domain_full.set_quantity('stage', Set_Stage(200.0,300.0,1.0))
103    domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,2.0))
104
105    # Note the different arguments compared with pmesh_divide,
106    # pmesh_divide_steve etc.
107   
108    nodes, triangles, boundary, triangles_per_proc, quantities = \
109         pmesh_divide_metis(domain_full, numprocs)
110
111    rect = array(domain_full.xy_extent, Float)
112
113    submesh = build_submesh(nodes, triangles, boundary,\
114                            quantities, triangles_per_proc)
115
116    # send the mesh partition to the appropriate processor
117
118    for p in range(1, numprocs):
119      send_submesh(submesh, triangles_per_proc, p)
120
121    hostmesh = extract_hostmesh(submesh)
122    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \
123             build_local_mesh(hostmesh, 0, triangles_per_proc[0], numprocs)
124
125# read in the mesh partition that belongs to this
126# processor (note that the information is in the
127# correct form for the GA data structure
128
129else:
130    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict \
131            = rec_submesh(0)
132
133#if myid == 0:
134#    print 'ghost'
135#    print ghost_recv_dict
136#processor_name
137#if myid == 0:
138#    print 'full'
139#    print full_send_dict
140
141
142pypar.broadcast(rect,0)
143#print rect
144
145domain = Parallel_Domain(points, vertices, boundary,
146                         full_send_dict  = full_send_dict,
147                         ghost_recv_dict = ghost_recv_dict)
148
149
150try:
151    domain.initialise_visualiser(rect=rect)
152    #domain.visualiser.coloring['stage'] = True
153    domain.visualiser.scale_z['stage'] = 0.2
154    domain.visualiser.scale_z['elevation'] = 0.05
155except:
156    print 'No visualiser'
157
158
159domain.default_order = 1
160
161#Boundaries
162from parallel_shallow_water import Transmissive_boundary, Reflective_boundary
163
164T = Transmissive_boundary(domain)
165R = Reflective_boundary(domain)
166domain.set_boundary( {'outflow': R, 'inflow': R, 'inner':R, 'exterior': R, 'open':R} )
167
168
169domain.set_quantity('stage', quantities['stage'])
170domain.set_quantity('elevation', quantities['elevation'])
171
172#domain.store = True
173#domain.filename = 'merimbula-%d' %domain.processor
174
175#---------
176# Evolution
177t0 = time.time()
178
179print 'Processor %d on %s: No of elements %d'%(domain.processor,processor_name,domain.number_of_elements)
180yieldstep = 0.05
181finaltime = 500.0
182
183yieldstep = 1
184finaltime = 30
185
186#yieldstep = 1
187#finaltime = 1
188#processor_name
189for t in domain.evolve(yieldstep = yieldstep, finaltime = finaltime):
190    if myid == 0:
191        domain.write_time()
192        #print 'Processor %d, Integral of stage %d'%\
193        #       (domain.processor,domain.quantities['stage'].get_integral())
194
195
196#print 'P%d: That took %.2f seconds' %(myid, time.time()-t0)
197#print 'P%d: Communication time %.2f seconds' %(myid, domain.communication_time)
198#print 'P%d: Reduction Communication time %.2f seconds' %(myid, domain.communication_reduce_time)
199#print 'P%d: Broadcast time %.2f seconds' %(myid, domain.communication_broadcast_time)
200
201
202
203if myid == 0:
204    print 'That took %.2f seconds' %(time.time()-t0)
205    print 'Communication time %.2f seconds'%domain.communication_time
206    print 'Reduction Communication time %.2f seconds'%domain.communication_reduce_time
207    print 'Broadcast time %.2f seconds'%domain.communication_broadcast_time
Note: See TracBrowser for help on using the repository browser.