source: production/wollongong_2006/run_flagstaff.py @ 3391

Last change on this file since 3391 was 3123, checked in by ole, 18 years ago

Work on parallel example

File size: 8.0 KB
RevLine 
[3040]1"""Script for running a tsunami inundation scenario for Flagstaff pt,
2Wollongong harbour, NSW, Australia.
3
4Source data such as elevation and boundary data is assumed to be available in
5directories specified by project.py
6
7The scenario is defined by a triangular mesh created from project.polygon,
8the elevation data and a hypothetical boundary condition.
9
10Ole Nielsen and Duncan Gray, GA - 2005, Nick Bartzis and Jane Sexton, GA - 2006
11"""
12
13
14#------------------------------------------------------------------------------
15# Import necessary modules
16#------------------------------------------------------------------------------
17
18
19# Standard modules
20import os, sys, time 
21from os import sep
22from os.path import dirname, basename
[3105]23from Numeric import zeros, Float
[3040]24
25# Related major packages
26from pyvolution.shallow_water import Domain
[3043]27from pyvolution.shallow_water import Dirichlet_boundary
28from pyvolution.shallow_water import Time_boundary
29from pyvolution.shallow_water import Reflective_boundary
[3040]30from pyvolution.data_manager import convert_dem_from_ascii2netcdf, dem2pts
31from pmesh.mesh_interface import create_mesh_from_regions
[3043]32from pmesh.mesh import importUngenerateFile, Segment
[3040]33
[3105]34# Parallelism
35from pypar_dist import pypar   # The Python-MPI interface
36from parallel.pmesh_divide import pmesh_divide_metis
37from parallel.build_submesh import build_submesh
38from parallel.build_local   import build_local_mesh
39from parallel.build_commun  import send_submesh, rec_submesh, extract_hostmesh
40from parallel.parallel_shallow_water import Parallel_Domain
41
42
[3040]43# Application specific imports
44import project
45
46
47#------------------------------------------------------------------------------
[3105]48# Read in processor information
49#------------------------------------------------------------------------------
50
51numprocs = pypar.size()
52myid = pypar.rank()
53processor_name = pypar.Get_processor_name()
54print 'I am processor %d of %d on node %s' %(myid, numprocs, processor_name)
55
56
57#------------------------------------------------------------------------------
[3040]58# Preparation of topographic data
59#
60# Convert ASC 2 DEM 2 PTS using source data and store result in source data
61#------------------------------------------------------------------------------
62
63
[3105]64max_area = project.base_resolution
65if myid == 0:
[3118]66   
[3105]67    # Create DEM from asc data
[3118]68    #convert_dem_from_ascii2netcdf(project.demname,
69    #                              use_cache=True,
70    #                              verbose=True)
[3040]71
[3105]72    # Create pts file from DEM
[3118]73    #dem2pts(project.demname,
74    #        easting_min=project.xllcorner,
75    #        easting_max=project.xurcorner,
76    #        northing_min=project.yllcorner,
77    #        northing_max= project.yurcorner,
78    #        use_cache=True,
79    #        verbose=True)
[3040]80
81
[3105]82    #--------------------------------------------------------------------------
83    # Create the triangular mesh based on overall clipping polygon with a
84    # tagged boundary and interior regions defined in project.py along with
85    # resolutions (maximal area of per triangle) for each polygon
86    #--------------------------------------------------------------------------
[3040]87
88
[3105]89    ## Generate basic mesh
90    mesh = create_mesh_from_regions(project.bounding_polygon,
91                                    boundary_tags=project.boundary_tags,
92                                    maximum_triangle_area=max_area,
93                                    interior_regions=project.interior_regions)
94   
95    # Add buildings
96    # This should bind to a Reflective boundary
97    mesh.import_ungenerate_file(project.buildings_filename, tag='wall')
[3040]98
[3105]99    # Generate and write mesh to file
100    mesh.generate_mesh(maximum_triangle_area=max_area,
101                       verbose=True)
102    mesh.export_mesh_file(project.mesh_filename)
[3043]103
[3105]104
105    #--------------------------------------------------------------------------
106    # Setup computational domain
107    #--------------------------------------------------------------------------
108
109    domain = Domain(project.mesh_filename, use_cache = False, verbose = True)
110    print domain.statistics()
111
[3115]112    #domain.set_name(project.basename)
[3123]113    #domain.set_datadir(project.outputdir)
[3115]114    #domain.set_quantities_to_be_stored(['stage', 'xmomentum', 'ymomentum'])
[3123]115    #domain.set_quantities_to_be_stored(None)
[3105]116
[3115]117    domain.set_quantity('elevation', 
118                        filename=project.demname + '.pts',
119                        use_cache=True,
120                        verbose=True)   
[3105]121
[3115]122
[3105]123    # Subdivide the mesh
124    print 'Subdivide mesh'
125    nodes, triangles, boundary, triangles_per_proc, quantities = \
126           pmesh_divide_metis(domain, numprocs)
127
128    # Build the mesh that should be assigned to each processor,
129    # this includes ghost nodes and the communicaiton pattern
130    print 'Build submeshes'   
131    submesh = build_submesh(nodes, triangles, boundary,\
132                            quantities, triangles_per_proc)
133
134    # Send the mesh partition to the appropriate processor
135    print 'Distribute submeshes'       
136    for p in range(1, numprocs):
137      send_submesh(submesh, triangles_per_proc, p)
138
139    # Build the local mesh for processor 0
140    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \
141              extract_hostmesh(submesh, triangles_per_proc)
142
143    print 'Communication done'       
144   
145else:
146    # Read in the mesh partition that belongs to this
147    # processor (note that the information is in the
148    # correct form for the GA data structure)
149
150    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict \
151            = rec_submesh(0)
152
153
154
155
[3040]156#------------------------------------------------------------------------------
[3105]157# Start the computations on each subpartion
[3040]158#------------------------------------------------------------------------------
159
160
[3105]161# Build the domain for this processor
162domain = Parallel_Domain(points, vertices, boundary,
163                         full_send_dict  = full_send_dict,
164                         ghost_recv_dict = ghost_recv_dict)
[3040]165
[3115]166# FIXME (Ole): Name currently has to be set here to get the processor number
167# right. It would be easy to build into Parallel_Domain
168domain.set_name(project.basename)
[3123]169domain.set_datadir(project.outputdir)
[3105]170
[3040]171#------------------------------------------------------------------------------
172# Setup initial conditions
173#------------------------------------------------------------------------------
174
175domain.set_quantity('stage', project.initial_sealevel)
[3046]176domain.set_quantity('friction', 0.03)
[3040]177
[3115]178#
179# FIXME (Ole): This one segfaults which is bad, because set_quantity is
180# time consuming and should be done here rather than on processor 0
181#
182#domain.set_quantity('elevation',
183#                    filename=project.demname + '.pts',
184#                    use_cache=True,
185#                    verbose=True)
[3040]186
[3115]187
[3040]188#------------------------------------------------------------------------------
189# Setup boundary conditions
190#------------------------------------------------------------------------------
191
192D = Dirichlet_boundary([project.initial_sealevel, 0, 0])
193R = Reflective_boundary(domain)
194W = Time_boundary(domain = domain,
[3046]195                  f=lambda t: [project.initial_sealevel + (60<t<480)*6, 0, 0])
[3040]196
197domain.set_boundary({'exterior': D,
198                     'side': D,
[3043]199                     'wall': R,
[3105]200                     'ocean': W,
201                     'ghost': None})
[3040]202
[3115]203
204
205print 'P%d: Ready to evolve. Value of store is %s' %(myid, str(domain.store))
206
207
[3040]208#------------------------------------------------------------------------------
209# Evolve system through time
210#------------------------------------------------------------------------------
211
212t0 = time.time()
[3105]213for t in domain.evolve(yieldstep = 1, finaltime = 1200):
214    if myid == 0:
215        domain.write_time()
216        #domain.write_boundary_statistics(tags = 'ocean')     
[3040]217 
[3105]218
219if myid == 0:
220    print 'That took %.2f seconds' %(time.time()-t0)
221    print 'Communication time %.2f seconds'%domain.communication_time
222    print 'Reduction Communication time %.2f seconds'\
223          %domain.communication_reduce_time
224    print 'Broadcast time %.2f seconds'\
225          %domain.communication_broadcast_time
226
227pypar.finalize()
Note: See TracBrowser for help on using the repository browser.