source: inundation/parallel/run_parallel_mesh.py @ 1718

Last change on this file since 1718 was 1580, checked in by linda, 20 years ago

communicate quantities in parallel

  • Property svn:executable set to *
File size: 4.4 KB
Line 
1#!/usr/bin/env python
2#########################################################
3#   
4#  Main file for parallel mesh testing.
5#
6#  This is a modification of the run_parallel_advection.py
7# file.
8#
9#  *) The test files currently avaliable are of the form
10# test*.out, eg test_5l_4c.out. The term infront of the l
11# corresponds to the number of levels of refinement
12# required to build the grid, i.e. a higher number
13# corresponds to a finer grid. The term infront of the c
14# corresponds to the number of processors.
15#
16# *) The (new) files that have been added to manage the
17# grid partitioning are
18#    +) mg2ga.py: read in the test files.
19#    +) build_submesh.py: build the submeshes on the host
20# processor.
21#    +) build_local.py: build the GA mesh datastructure
22# on each processor.
23#    +) build_commun.py: handle the communication between
24# the host and processors
25#
26# *) Things still to do:
27#    +) Overlap the communication and computation: The
28# communication routines in build_commun.py should be
29# interdispersed in the build_submesh.py and build_local.py
30# files. This will overlap the communication and
31# computation and will be far more efficient. This should
32# be done after more testing and there more confidence in
33# the subpartioning.
34#    +) Much more testing especially with large numbers of
35# processors.
36#  Authors: Linda Stals, Steve Roberts and Matthew Hardy,
37# June 2005
38#
39#
40#
41#########################################################
42
43import pypar    # The Python-MPI interface
44import sys
45from os import sep
46sys.path.append('..'+sep+'pyvolution')
47
48# advection routines
49
50from config import g, epsilon
51from Numeric import allclose, array, zeros, ones, Float
52from parallel_advection import *
53from Numeric import array
54
55# mesh partition routines
56
57from mg2ga import *
58from build_submesh import *
59from build_local import *
60from build_commun import *
61import pdb
62
63# define the initial time step
64
65class Set_Stage:
66    """Set an initial condition with constant water height, for x<x0
67    """
68
69    def __init__(self, x0=0.25, x1=0.5, h=1.0):
70        self.x0 = x0
71        self.x1 = x1
72        self.= h
73
74    def __call__(self, x, y):
75        return self.h*((x>self.x0)&(x<self.x1))
76
77# read in the processor information
78
79numprocs = pypar.size()
80myid = pypar.rank()
81processor_name = pypar.Get_processor_name()
82
83print "PROCESSOR ", myid, " HAS STARTED"
84
85# if this is the host processor
86
87print 'trace'
88pdb.set_trace()
89print 'after trace'
90
91
92if myid == 0:
93
94    # read in the test files
95   
96    f=open('test_3l_2c.out', 'r')
97    [nodes, triangles, boundary, triangles_per_proc] = mg2ga(f)
98    quantities = {}
99   
100    # subdivide the mesh
101   
102    submesh = build_submesh(nodes, triangles, boundary, quantities, \
103                            triangles_per_proc)
104
105    # send the mesh partition to the appropriate processor
106   
107    for p in range(1, numprocs):
108      send_submesh(submesh, triangles_per_proc, p)
109
110    hostmesh = extract_hostmesh(submesh)
111    [points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict] = \
112             build_local_mesh(hostmesh, 0, triangles_per_proc[0], \
113                              numprocs)
114   
115# read in the mesh partition that belongs to this
116# processor (note that the information is in the
117# correct form for the GA data structure
118
119else:
120    [points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict] = rec_submesh(0)
121
122# define the computation domain
123
124domain = Parallel_Domain(points, vertices, boundary,
125                        full_send_dict, ghost_recv_dict, velocity=[1.0, 0.0])
126
127# start the initialisation routines
128
129domain.initialise_visualiser()
130#domain.visualise=False
131 
132#Boundaries
133
134T = Transmissive_boundary(domain)
135D = Dirichlet_boundary(array([1.0]))
136
137# ????????????
138domain.default_order = 2
139
140# set the boundary information ('exterior; corresponds
141# to the ghost triangles sitting along the boundary
142
143domain.set_boundary( {'37': T, 'exterior': T} )
144
145# check the triangularisation
146
147domain.check_integrity()
148
149# set the initial time step
150
151domain.set_quantity('stage', Set_Stage(0.2,0.4,1.0))
152
153
154# step through time evolving the grid
155
156if myid == 0:
157    import time
158    t0 = time.time()
159for t in domain.evolve(yieldstep = 0.1, finaltime = 2.0):
160    if myid == 0:
161      domain.write_time()
162if myid == 0:
163    print 'That took %.2f seconds' %(time.time()-t0)
164    print 'Communication time %.2f seconds'%domain.communication_time
165    print 'Reduction Communication time %.2f seconds'%domain.communication_reduce_time
166
167 
168pypar.finalize()
Note: See TracBrowser for help on using the repository browser.