source: trunk/anuga_core/source/anuga_parallel/sequential_distribute.py @ 8618

Last change on this file since 8618 was 8618, checked in by steve, 12 years ago

Only counting volume in full cells for integrals

File size: 9.6 KB
Line 
1"""Trying to lump parallel stuff into simpler interface
2
3
4"""
5
6import numpy as num
7
8from anuga_parallel.distribute_mesh  import send_submesh
9from anuga_parallel.distribute_mesh  import rec_submesh
10from anuga_parallel.distribute_mesh  import extract_submesh
11
12# Mesh partitioning using Metis
13from anuga_parallel.distribute_mesh import build_submesh
14from anuga_parallel.distribute_mesh import pmesh_divide_metis_with_map
15
16from anuga_parallel.parallel_shallow_water import Parallel_domain
17
18
19
20def sequential_distribute_dump(domain, numprocs=1, verbose=False, debug=False, parameters = None):
21    """ Distribute the domain, create parallel domain and pickle result
22    """
23
24
25    if debug:
26        verbose = True
27
28
29
30    # FIXME: Dummy assignment (until boundaries are refactored to
31    # be independent of domains until they are applied)
32    bdmap = {}
33    for tag in domain.get_boundary_tags():
34        bdmap[tag] = None
35
36    domain.set_boundary(bdmap)
37
38
39    domain_name = domain.get_name()
40    domain_dir = domain.get_datadir()
41    domain_store = domain.get_store()
42    domain_minimum_storable_height = domain.minimum_storable_height
43    domain_flow_algorithm = domain.get_flow_algorithm()
44    domain_minimum_allowed_height = domain.get_minimum_allowed_height()
45    georef = domain.geo_reference
46    number_of_global_triangles = domain.number_of_triangles
47    number_of_global_nodes = domain.number_of_nodes
48    boundary_map = domain.boundary_map
49
50
51    #sequential_distribute_mesh(domain, numprocs, verbose=verbose, debug=debug, parameters=parameters)
52
53
54    # Subdivide the mesh
55    if verbose: print 'sequential_distribute: Subdivide mesh'
56    new_nodes, new_triangles, new_boundary, triangles_per_proc, quantities, \
57           s2p_map, p2s_map = \
58           pmesh_divide_metis_with_map(domain, numprocs)
59
60    #PETE: s2p_map (maps serial domain triangles to parallel domain triangles)
61    #      sp2_map (maps parallel domain triangles to domain triangles)
62
63
64
65    # Build the mesh that should be assigned to each processor,
66    # this includes ghost nodes and the communication pattern
67    if verbose: print 'sequential_distribute: Build submeshes'
68    submesh = build_submesh(new_nodes, new_triangles, new_boundary, quantities, triangles_per_proc, parameters)
69
70    if debug:
71        for p in range(numprocs):
72            N = len(submesh['ghost_nodes'][p])
73            M = len(submesh['ghost_triangles'][p])
74            print 'There are %d ghost nodes and %d ghost triangles on proc %d'\
75                  %(N, M, p)
76
77    #if debug:
78    #    from pprint import pprint
79    #    pprint(submesh)
80
81
82    # extract data to create parallel domain
83    if verbose: print 'sequential_distribute: Distribute submeshes'
84    for p in range(0, numprocs):
85
86        # Build the local mesh for processor 0
87        points, vertices, boundary, quantities, \
88            ghost_recv_dict, full_send_dict, tri_map, node_map, ghost_layer_width =\
89              extract_submesh(submesh, triangles_per_proc, p)
90
91
92#        from pprint import pprint
93#        print '='*80
94#        print p
95#        print '='*80
96#        pprint(tri_map)
97#        print len(tri_map)
98
99        # Keep track of the number full nodes and triangles.
100        # This is useful later if one needs access to a ghost-free domain
101        # Here, we do it for process 0. The others are done in rec_submesh.
102        number_of_full_nodes = len(submesh['full_nodes'][p])
103        number_of_full_triangles = len(submesh['full_triangles'][p])
104
105        # Extract l2g maps
106        tri_l2g  = extract_l2g_map(tri_map)
107        node_l2g = extract_l2g_map(node_map)
108
109
110        s2p_map = None
111        p2s_map = None
112
113        #------------------------------------------------------------------------
114        # Build the parallel domain for this processor using partion structures
115        #------------------------------------------------------------------------
116
117        if verbose:
118            print 'sequential_distribute: P%g, no_full_nodes = %g, no_full_triangles = %g' % (p, number_of_full_nodes, number_of_full_triangles)
119
120
121        #args = [points, vertices, boundary]
122
123        kwargs = {'full_send_dict': full_send_dict,
124                'ghost_recv_dict': ghost_recv_dict,
125                'number_of_full_nodes': number_of_full_nodes,
126                'number_of_full_triangles': number_of_full_triangles,
127                'geo_reference': georef,
128                'number_of_global_triangles':  number_of_global_triangles,
129                'number_of_global_nodes':  number_of_global_nodes,
130                'processor':  p,
131                'numproc':  numprocs,
132                's2p_map':  s2p_map,
133                'p2s_map':  p2s_map, ## jj added this
134                'tri_l2g':  tri_l2g, ## SR added this
135                'node_l2g':  node_l2g,
136                'ghost_layer_width':  ghost_layer_width}
137
138#        parallel_domain = Parallel_domain(points, vertices, boundary, **kwargs)
139
140
141
142        #------------------------------------------------------------------------
143        # Transfer initial conditions to each subdomain
144        #------------------------------------------------------------------------
145#        for q in quantities:
146#            parallel_domain.set_quantity(q, quantities[q])
147
148
149        #------------------------------------------------------------------------
150        # Transfer boundary conditions to each subdomain
151        #------------------------------------------------------------------------
152#        boundary_map['ghost'] = None  # Add binding to ghost boundary
153#        parallel_domain.set_boundary(boundary_map)
154
155
156        #------------------------------------------------------------------------
157        # Transfer other attributes to each subdomain
158        #------------------------------------------------------------------------
159#        parallel_domain.set_name(domain_name)
160#        parallel_domain.set_datadir(domain_dir)
161#        parallel_domain.set_store(domain_store)
162#        parallel_domain.set_minimum_storable_height(domain_minimum_storable_height)
163#        parallel_domain.set_minimum_allowed_height(domain_minimum_allowed_height)
164#        parallel_domain.set_flow_algorithm(domain_flow_algorithm)
165#        parallel_domain.geo_reference = georef
166
167
168
169        #-----------------------------------------------------------------------
170        # Now let's store the parallel_domain via cPickle
171        #-----------------------------------------------------------------------
172#        import cPickle
173#        pickle_name = domain_name + '_P%g_%g.pickle'% (numprocs,p)
174#        f = file(pickle_name, 'wb')
175#        cPickle.dump(parallel_domain, f, protocol=cPickle.HIGHEST_PROTOCOL)
176#        f.close()
177
178
179        #FIXME SR: Looks like we could reduce storage by a factor of 4 by just
180        # storing the data to create the parallel_domain instead of pickling
181        # a created domain
182        import cPickle
183        pickle_name = domain_name + '_P%g_%g.pickle'% (numprocs,p)
184        f = file(pickle_name, 'wb')
185        tostore = (kwargs, points, vertices, boundary, quantities, boundary_map, domain_name, domain_dir, domain_store, domain_minimum_storable_height, \
186                   domain_minimum_allowed_height, domain_flow_algorithm, georef)
187        cPickle.dump( tostore, f, protocol=cPickle.HIGHEST_PROTOCOL)
188
189    return
190
191
192def sequential_distribute_load(filename = 'domain', verbose = False):
193
194
195    from anuga_parallel import myid, numprocs
196
197
198    #---------------------------------------------------------------------------
199    # Open pickle files
200    #---------------------------------------------------------------------------
201    import cPickle
202    pickle_name = filename+'_P%g_%g.pickle'% (numprocs,myid)
203    f = file(pickle_name, 'rb')
204    kwargs, points, vertices, boundary, quantities, boundary_map, domain_name, domain_dir, domain_store, domain_minimum_storable_height, \
205                   domain_minimum_allowed_height, domain_flow_algorithm, georef = cPickle.load(f)
206    f.close()
207
208    #---------------------------------------------------------------------------
209    # Create parallel domain
210    #---------------------------------------------------------------------------
211    parallel_domain = Parallel_domain(points, vertices, boundary, **kwargs)
212
213
214    #------------------------------------------------------------------------
215    # Copy in quantity data
216    #------------------------------------------------------------------------
217    for q in quantities:
218        parallel_domain.set_quantity(q, quantities[q])
219
220
221    #------------------------------------------------------------------------
222    # Transfer boundary conditions to each subdomain
223    #------------------------------------------------------------------------
224    boundary_map['ghost'] = None  # Add binding to ghost boundary
225    parallel_domain.set_boundary(boundary_map)
226
227
228    #------------------------------------------------------------------------
229    # Transfer other attributes to each subdomain
230    #------------------------------------------------------------------------
231    parallel_domain.set_name(domain_name)
232    parallel_domain.set_datadir(domain_dir)
233    parallel_domain.set_store(domain_store)
234    parallel_domain.set_minimum_storable_height(domain_minimum_storable_height)
235    parallel_domain.set_minimum_allowed_height(domain_minimum_allowed_height)
236    parallel_domain.set_flow_algorithm(domain_flow_algorithm)
237    parallel_domain.geo_reference = georef
238
239
240    return parallel_domain
241
242def extract_l2g_map(map):
243    # Extract l2g data  from corresponding map
244    # Maps
245
246    import numpy as num
247
248    b = num.arange(len(map))
249
250    l_ids = num.extract(map>-1,map)
251    g_ids = num.extract(map>-1,b)
252
253
254#    print len(g_ids)
255#    print len(l_ids)
256#    print l_ids
257#    print g_ids
258
259    l2g = num.zeros_like(g_ids)
260    l2g[l_ids] = g_ids
261
262    return l2g
263
264
265
266
267
Note: See TracBrowser for help on using the repository browser.