source: production/wollongong_2006/run_flagstaff.py @ 3391

Last change on this file since 3391 was 3123, checked in by ole, 18 years ago

Work on parallel example

File size: 8.0 KB
Line 
1"""Script for running a tsunami inundation scenario for Flagstaff pt,
2Wollongong harbour, NSW, Australia.
3
4Source data such as elevation and boundary data is assumed to be available in
5directories specified by project.py
6
7The scenario is defined by a triangular mesh created from project.polygon,
8the elevation data and a hypothetical boundary condition.
9
10Ole Nielsen and Duncan Gray, GA - 2005, Nick Bartzis and Jane Sexton, GA - 2006
11"""
12
13
14#------------------------------------------------------------------------------
15# Import necessary modules
16#------------------------------------------------------------------------------
17
18
19# Standard modules
20import os, sys, time 
21from os import sep
22from os.path import dirname, basename
23from Numeric import zeros, Float
24
25# Related major packages
26from pyvolution.shallow_water import Domain
27from pyvolution.shallow_water import Dirichlet_boundary
28from pyvolution.shallow_water import Time_boundary
29from pyvolution.shallow_water import Reflective_boundary
30from pyvolution.data_manager import convert_dem_from_ascii2netcdf, dem2pts
31from pmesh.mesh_interface import create_mesh_from_regions
32from pmesh.mesh import importUngenerateFile, Segment
33
34# Parallelism
35from pypar_dist import pypar   # The Python-MPI interface
36from parallel.pmesh_divide import pmesh_divide_metis
37from parallel.build_submesh import build_submesh
38from parallel.build_local   import build_local_mesh
39from parallel.build_commun  import send_submesh, rec_submesh, extract_hostmesh
40from parallel.parallel_shallow_water import Parallel_Domain
41
42
43# Application specific imports
44import project
45
46
47#------------------------------------------------------------------------------
48# Read in processor information
49#------------------------------------------------------------------------------
50
51numprocs = pypar.size()
52myid = pypar.rank()
53processor_name = pypar.Get_processor_name()
54print 'I am processor %d of %d on node %s' %(myid, numprocs, processor_name)
55
56
57#------------------------------------------------------------------------------
58# Preparation of topographic data
59#
60# Convert ASC 2 DEM 2 PTS using source data and store result in source data
61#------------------------------------------------------------------------------
62
63
64max_area = project.base_resolution
65if myid == 0:
66   
67    # Create DEM from asc data
68    #convert_dem_from_ascii2netcdf(project.demname,
69    #                              use_cache=True,
70    #                              verbose=True)
71
72    # Create pts file from DEM
73    #dem2pts(project.demname,
74    #        easting_min=project.xllcorner,
75    #        easting_max=project.xurcorner,
76    #        northing_min=project.yllcorner,
77    #        northing_max= project.yurcorner,
78    #        use_cache=True,
79    #        verbose=True)
80
81
82    #--------------------------------------------------------------------------
83    # Create the triangular mesh based on overall clipping polygon with a
84    # tagged boundary and interior regions defined in project.py along with
85    # resolutions (maximal area of per triangle) for each polygon
86    #--------------------------------------------------------------------------
87
88
89    ## Generate basic mesh
90    mesh = create_mesh_from_regions(project.bounding_polygon,
91                                    boundary_tags=project.boundary_tags,
92                                    maximum_triangle_area=max_area,
93                                    interior_regions=project.interior_regions)
94   
95    # Add buildings
96    # This should bind to a Reflective boundary
97    mesh.import_ungenerate_file(project.buildings_filename, tag='wall')
98
99    # Generate and write mesh to file
100    mesh.generate_mesh(maximum_triangle_area=max_area,
101                       verbose=True)
102    mesh.export_mesh_file(project.mesh_filename)
103
104
105    #--------------------------------------------------------------------------
106    # Setup computational domain
107    #--------------------------------------------------------------------------
108
109    domain = Domain(project.mesh_filename, use_cache = False, verbose = True)
110    print domain.statistics()
111
112    #domain.set_name(project.basename)
113    #domain.set_datadir(project.outputdir)
114    #domain.set_quantities_to_be_stored(['stage', 'xmomentum', 'ymomentum'])
115    #domain.set_quantities_to_be_stored(None)
116
117    domain.set_quantity('elevation', 
118                        filename=project.demname + '.pts',
119                        use_cache=True,
120                        verbose=True)   
121
122
123    # Subdivide the mesh
124    print 'Subdivide mesh'
125    nodes, triangles, boundary, triangles_per_proc, quantities = \
126           pmesh_divide_metis(domain, numprocs)
127
128    # Build the mesh that should be assigned to each processor,
129    # this includes ghost nodes and the communicaiton pattern
130    print 'Build submeshes'   
131    submesh = build_submesh(nodes, triangles, boundary,\
132                            quantities, triangles_per_proc)
133
134    # Send the mesh partition to the appropriate processor
135    print 'Distribute submeshes'       
136    for p in range(1, numprocs):
137      send_submesh(submesh, triangles_per_proc, p)
138
139    # Build the local mesh for processor 0
140    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \
141              extract_hostmesh(submesh, triangles_per_proc)
142
143    print 'Communication done'       
144   
145else:
146    # Read in the mesh partition that belongs to this
147    # processor (note that the information is in the
148    # correct form for the GA data structure)
149
150    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict \
151            = rec_submesh(0)
152
153
154
155
156#------------------------------------------------------------------------------
157# Start the computations on each subpartion
158#------------------------------------------------------------------------------
159
160
161# Build the domain for this processor
162domain = Parallel_Domain(points, vertices, boundary,
163                         full_send_dict  = full_send_dict,
164                         ghost_recv_dict = ghost_recv_dict)
165
166# FIXME (Ole): Name currently has to be set here to get the processor number
167# right. It would be easy to build into Parallel_Domain
168domain.set_name(project.basename)
169domain.set_datadir(project.outputdir)
170
171#------------------------------------------------------------------------------
172# Setup initial conditions
173#------------------------------------------------------------------------------
174
175domain.set_quantity('stage', project.initial_sealevel)
176domain.set_quantity('friction', 0.03)
177
178#
179# FIXME (Ole): This one segfaults which is bad, because set_quantity is
180# time consuming and should be done here rather than on processor 0
181#
182#domain.set_quantity('elevation',
183#                    filename=project.demname + '.pts',
184#                    use_cache=True,
185#                    verbose=True)
186
187
188#------------------------------------------------------------------------------
189# Setup boundary conditions
190#------------------------------------------------------------------------------
191
192D = Dirichlet_boundary([project.initial_sealevel, 0, 0])
193R = Reflective_boundary(domain)
194W = Time_boundary(domain = domain,
195                  f=lambda t: [project.initial_sealevel + (60<t<480)*6, 0, 0])
196
197domain.set_boundary({'exterior': D,
198                     'side': D,
199                     'wall': R,
200                     'ocean': W,
201                     'ghost': None})
202
203
204
205print 'P%d: Ready to evolve. Value of store is %s' %(myid, str(domain.store))
206
207
208#------------------------------------------------------------------------------
209# Evolve system through time
210#------------------------------------------------------------------------------
211
212t0 = time.time()
213for t in domain.evolve(yieldstep = 1, finaltime = 1200):
214    if myid == 0:
215        domain.write_time()
216        #domain.write_boundary_statistics(tags = 'ocean')     
217 
218
219if myid == 0:
220    print 'That took %.2f seconds' %(time.time()-t0)
221    print 'Communication time %.2f seconds'%domain.communication_time
222    print 'Reduction Communication time %.2f seconds'\
223          %domain.communication_reduce_time
224    print 'Broadcast time %.2f seconds'\
225          %domain.communication_broadcast_time
226
227pypar.finalize()
Note: See TracBrowser for help on using the repository browser.