"""Script for running a tsunami inundation scenario for Flagstaff pt, Wollongong harbour, NSW, Australia. Source data such as elevation and boundary data is assumed to be available in directories specified by project.py The scenario is defined by a triangular mesh created from project.polygon, the elevation data and a hypothetical boundary condition. Ole Nielsen and Duncan Gray, GA - 2005, Nick Bartzis and Jane Sexton, GA - 2006 """ #------------------------------------------------------------------------------ # Import necessary modules #------------------------------------------------------------------------------ # Standard modules import os, sys, time from os import sep from os.path import dirname, basename from Numeric import zeros, Float # Related major packages from anuga.shallow_water import Domain from anuga.shallow_water import Dirichlet_boundary from anuga.shallow_water import Time_boundary from anuga.shallow_water import Reflective_boundary from anuga.pmesh.mesh_interface import create_mesh_from_regions from anuga.pmesh.mesh import importUngenerateFile, Segment # Parallelism import pypar # The Python-MPI interface from anuga_parallel.pmesh_divide import pmesh_divide_metis from anuga_parallel.build_submesh import build_submesh from anuga_parallel.build_local import build_local_mesh from anuga_parallel.build_commun import send_submesh, rec_submesh, extract_hostmesh from anuga_parallel.parallel_shallow_water import Parallel_Domain # Application specific imports import project #------------------------------------------------------------------------------ # Read in processor information #------------------------------------------------------------------------------ numprocs = pypar.size() myid = pypar.rank() processor_name = pypar.Get_processor_name() print 'I am processor %d of %d on node %s' %(myid, numprocs, processor_name) if myid == 0: #-------------------------------------------------------------------------- # Create the triangular mesh based on overall clipping polygon with a # tagged boundary and interior regions defined in project.py along with # resolutions (maximal area of per triangle) for each polygon #-------------------------------------------------------------------------- print 'Generate mesh' # Generate basic mesh max_area = project.base_resolution mesh = create_mesh_from_regions(project.bounding_polygon, boundary_tags=project.boundary_tags, maximum_triangle_area=max_area, interior_regions=project.interior_regions) # Add buildings that will bind to a Reflective boundary mesh.import_ungenerate_file(project.buildings_filename, tag='wall') # Generate and write mesh to file mesh.generate_mesh(maximum_triangle_area=max_area, verbose=True) mesh.export_mesh_file(project.mesh_filename) #-------------------------------------------------------------------------- # Setup computational domain #-------------------------------------------------------------------------- domain = Domain(project.mesh_filename, use_cache = False, verbose = True) print domain.statistics() domain.set_quantity('elevation', filename=project.demname + '.pts', use_cache=True, verbose=True) # Subdivide the mesh print 'Subdivide mesh' nodes, triangles, boundary, triangles_per_proc, quantities = \ pmesh_divide_metis(domain, numprocs) # Build the mesh that should be assigned to each processor, # this includes ghost nodes and the communicaiton pattern print 'Build submeshes' submesh = build_submesh(nodes, triangles, boundary,\ quantities, triangles_per_proc) # Send the mesh partition to the appropriate processor print 'Distribute submeshes' for p in range(1, numprocs): send_submesh(submesh, triangles_per_proc, p) # Build the local mesh for processor 0 points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \ extract_hostmesh(submesh, triangles_per_proc) print 'Communication done' else: # Read in the mesh partition that belongs to this # processor (note that the information is in the # correct form for the GA data structure) points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict \ = rec_submesh(0) #------------------------------------------------------------------------------ # Start the computations on each subpartion #------------------------------------------------------------------------------ # Build the domain for this processor domain = Parallel_Domain(points, vertices, boundary, full_send_dict = full_send_dict, ghost_recv_dict = ghost_recv_dict) # Name etc currently has to be set here as they are not transferred from the # original domain domain.set_name(project.basename) domain.set_datadir(project.outputdir) #------------------------------------------------------------------------------ # Setup initial conditions #------------------------------------------------------------------------------ domain.set_quantity('elevation', quantities['elevation']) # Distribute elevation domain.set_quantity('stage', project.initial_sealevel) domain.set_quantity('friction', 0.03) # # FIXME (Ole): This one segfaults which is bad, because set_quantity is # time consuming and should be done here rather than on processor 0 # It did not segfault today 2 Aug 2006 !!! # But values are zero ??.... # #domain.set_quantity('elevation', # filename=project.demname + '.pts', # use_cache=False, # verbose=True) #------------------------------------------------------------------------------ # Setup boundary conditions #------------------------------------------------------------------------------ D = Dirichlet_boundary([project.initial_sealevel, 0, 0]) R = Reflective_boundary(domain) W = Time_boundary(domain = domain, f=lambda t: [project.initial_sealevel + (60