source: inundation/parallel/run_parallel_sw_merimbula.py @ 1740

Last change on this file since 1740 was 1629, checked in by steve, 20 years ago
File size: 6.0 KB
Line 
1#!/usr/bin/env/python
2#########################################################
3#
4#  Main file for parallel mesh testing.
5#
6#  This is a modification of the run_parallel_advection.py
7# file.
8#
9#  *) The test files currently avaliable are of the form
10# test*.out, eg test_5l_4c.out. The term infront of the l
11# corresponds to the number of levels of refinement
12# required to build the grid, i.e. a higher number
13# corresponds to a finer grid. The term infront of the c
14# corresponds to the number of processors.
15#
16# *) The (new) files that have been added to manage the
17# grid partitioning are
18#    +) mg2ga.py: read in the test files.
19#    +) pmesh_divide.py: subdivide a pmesh
20#    +) build_submesh.py: build the submeshes on the host
21# processor.
22#    +) build_local.py: build the GA mesh datastructure
23# on each processor.
24#    +) build_commun.py: handle the communication between
25# the host and processors
26#
27# *) Things still to do:
28#    +) Overlap the communication and computation: The
29# communication routines in build_commun.py should be
30# interdispersed in the build_submesh.py and build_local.py
31# files. This will overlap the communication and
32# computation and will be far more efficient. This should
33# be done after more testing and there more confidence in
34# the subpartioning.
35#    +) Much more testing especially with large numbers of
36# processors.
37#  Authors: Linda Stals, Steve Roberts and Matthew Hardy,
38# June 2005
39#
40#
41#
42#########################################################
43import sys
44import pypar    # The Python-MPI interface
45import time
46
47
48from os import sep
49sys.path.append('..'+sep+'pyvolution')
50
51from Numeric import array
52# pmesh
53
54#from shallow_water import Domain
55
56from shallow_water import Domain
57from parallel_shallow_water import Parallel_Domain
58
59# mesh partition routines
60
61from pmesh_divide import pmesh_divide, pmesh_divide_steve
62from build_submesh import *
63from build_local import *
64from build_commun import *
65from pmesh2domain import pmesh_to_domain_instance
66
67# read in the processor information
68
69numprocs = pypar.size()
70myid = pypar.rank()
71processor_name = pypar.Get_processor_name()
72
73#-------
74# Domain
75rect = zeros( 4, Float) # Buffer for results
76
77class Set_Stage:
78    """Set an initial condition with constant water height, for x<x0
79    """
80
81    def __init__(self, x0=0.25, x1=0.5, h=1.0):
82        self.x0 = x0
83        self.x1 = x1
84        self.= h
85
86    def __call__(self, x, y):
87        return self.h*((x>self.x0)&(x<self.x1))
88
89
90if myid == 0:
91
92    # read in the test files
93
94    filename = 'test-100.tsh'
95#    filename = 'merimbula_10785_1.tsh'
96    nx = numprocs
97    ny = 1
98    if nx*ny != numprocs:
99        print "WARNING: number of subboxes is not equal to the number of proc"
100
101    domain_full = pmesh_to_domain_instance(filename, Domain)
102
103    domain_full.set_quantity('stage', Set_Stage(200.0,300.0,1.0))
104#    domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,2.0))
105
106    nodes, triangles, boundary, triangles_per_proc, quantities = \
107         pmesh_divide_steve(domain_full, nx, ny)
108
109    rect = array(domain_full.xy_extent, Float)
110
111    submesh = build_submesh(nodes, triangles, boundary,\
112                            quantities, triangles_per_proc)
113
114    # send the mesh partition to the appropriate processor
115
116    for p in range(1, numprocs):
117      send_submesh(submesh, triangles_per_proc, p)
118
119    hostmesh = extract_hostmesh(submesh)
120    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \
121             build_local_mesh(hostmesh, 0, triangles_per_proc[0], numprocs)
122
123# read in the mesh partition that belongs to this
124# processor (note that the information is in the
125# correct form for the GA data structure
126
127else:
128    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict \
129            = rec_submesh(0)
130
131#if myid == 0:
132#    print 'ghost'
133#    print ghost_recv_dict
134#processor_name
135#if myid == 0:
136#    print 'full'
137#    print full_send_dict
138
139
140pypar.broadcast(rect,0)
141#print rect
142
143domain = Parallel_Domain(points, vertices, boundary,
144                         full_send_dict  = full_send_dict,
145                         ghost_recv_dict = ghost_recv_dict)
146
147
148try:
149    domain.initialise_visualiser(rect=rect)
150    #domain.visualiser.coloring['stage'] = True
151    domain.visualiser.scale_z['stage'] = 0.2
152    domain.visualiser.scale_z['elevation'] = 0.05
153except:
154    print 'No visualiser'
155
156
157domain.default_order = 1
158
159#Boundaries
160from parallel_shallow_water import Transmissive_boundary, Reflective_boundary
161
162T = Transmissive_boundary(domain)
163R = Reflective_boundary(domain)
164domain.set_boundary( {'outflow': R, 'inflow': R, 'inner':R, 'exterior': R, 'open':R} )
165
166
167domain.set_quantity('stage', quantities['stage'])
168domain.set_quantity('elevation', quantities['elevation'])
169
170#domain.store = True
171#domain.filename = 'merimbula-%d' %domain.processor
172
173#---------
174# Evolution
175t0 = time.time()
176
177print 'Processor %d on %s: No of elements %d'%(domain.processor,processor_name,domain.number_of_elements)
178yieldstep = 0.05
179finaltime = 500.0
180
181#yieldstep = 1
182#finaltime = 1
183#processor_name
184for t in domain.evolve(yieldstep = yieldstep, finaltime = finaltime):
185    if myid == 0:
186        domain.write_time()
187        #print 'Processor %d, Integral of stage %d'%\
188        #       (domain.processor,domain.quantities['stage'].get_integral())
189
190
191#print 'P%d: That took %.2f seconds' %(myid, time.time()-t0)
192#print 'P%d: Communication time %.2f seconds' %(myid, domain.communication_time)
193#print 'P%d: Reduction Communication time %.2f seconds' %(myid, domain.communication_reduce_time)
194#print 'P%d: Broadcast time %.2f seconds' %(myid, domain.communication_broadcast_time)
195
196
197
198if myid == 0:
199    print 'That took %.2f seconds' %(time.time()-t0)
200    print 'Communication time %.2f seconds'%domain.communication_time
201    print 'Reduction Communication time %.2f seconds'%domain.communication_reduce_time
202    print 'Broadcast time %.2f seconds'%domain.communication_broadcast_time
Note: See TracBrowser for help on using the repository browser.