"""Script for running a tsunami inundation scenario for Flagstaff pt, Wollongong harbour, NSW, Australia. Source data such as elevation and boundary data is assumed to be available in directories specified by project.py The scenario is defined by a triangular mesh created from project.polygon, the elevation data and a hypothetical boundary condition. Ole Nielsen and Duncan Gray, GA - 2005, Nick Bartzis and Jane Sexton, GA - 2006 """ #------------------------------------------------------------------------------ # Import necessary modules #------------------------------------------------------------------------------ # Standard modules import os, sys, time from os import sep from os.path import dirname, basename from Numeric import zeros, Float # Related major packages from pyvolution.shallow_water import Domain from pyvolution.shallow_water import Dirichlet_boundary from pyvolution.shallow_water import Time_boundary from pyvolution.shallow_water import Reflective_boundary from pyvolution.data_manager import convert_dem_from_ascii2netcdf, dem2pts from pmesh.mesh_interface import create_mesh_from_regions from pmesh.mesh import importUngenerateFile, Segment # Parallelism from pypar_dist import pypar # The Python-MPI interface from parallel.pmesh_divide import pmesh_divide_metis from parallel.build_submesh import build_submesh from parallel.build_local import build_local_mesh from parallel.build_commun import send_submesh, rec_submesh, extract_hostmesh from parallel.parallel_shallow_water import Parallel_Domain # Application specific imports import project #------------------------------------------------------------------------------ # Read in processor information #------------------------------------------------------------------------------ numprocs = pypar.size() myid = pypar.rank() processor_name = pypar.Get_processor_name() print 'I am processor %d of %d on node %s' %(myid, numprocs, processor_name) #------------------------------------------------------------------------------ # Preparation of topographic data # # Convert ASC 2 DEM 2 PTS using source data and store result in source data #------------------------------------------------------------------------------ max_area = project.base_resolution if myid == 0: # Create DEM from asc data #convert_dem_from_ascii2netcdf(project.demname, # use_cache=True, # verbose=True) # Create pts file from DEM #dem2pts(project.demname, # easting_min=project.xllcorner, # easting_max=project.xurcorner, # northing_min=project.yllcorner, # northing_max= project.yurcorner, # use_cache=True, # verbose=True) #-------------------------------------------------------------------------- # Create the triangular mesh based on overall clipping polygon with a # tagged boundary and interior regions defined in project.py along with # resolutions (maximal area of per triangle) for each polygon #-------------------------------------------------------------------------- ## Generate basic mesh mesh = create_mesh_from_regions(project.bounding_polygon, boundary_tags=project.boundary_tags, maximum_triangle_area=max_area, interior_regions=project.interior_regions) # Add buildings # This should bind to a Reflective boundary mesh.import_ungenerate_file(project.buildings_filename, tag='wall') # Generate and write mesh to file mesh.generate_mesh(maximum_triangle_area=max_area, verbose=True) mesh.export_mesh_file(project.mesh_filename) #-------------------------------------------------------------------------- # Setup computational domain #-------------------------------------------------------------------------- domain = Domain(project.mesh_filename, use_cache = False, verbose = True) print domain.statistics() #domain.set_name(project.basename) #domain.set_datadir(project.outputdir) #domain.set_quantities_to_be_stored(['stage', 'xmomentum', 'ymomentum']) #domain.set_quantities_to_be_stored(None) domain.set_quantity('elevation', filename=project.demname + '.pts', use_cache=True, verbose=True) # Subdivide the mesh print 'Subdivide mesh' nodes, triangles, boundary, triangles_per_proc, quantities = \ pmesh_divide_metis(domain, numprocs) # Build the mesh that should be assigned to each processor, # this includes ghost nodes and the communicaiton pattern print 'Build submeshes' submesh = build_submesh(nodes, triangles, boundary,\ quantities, triangles_per_proc) # Send the mesh partition to the appropriate processor print 'Distribute submeshes' for p in range(1, numprocs): send_submesh(submesh, triangles_per_proc, p) # Build the local mesh for processor 0 points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \ extract_hostmesh(submesh, triangles_per_proc) print 'Communication done' else: # Read in the mesh partition that belongs to this # processor (note that the information is in the # correct form for the GA data structure) points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict \ = rec_submesh(0) #------------------------------------------------------------------------------ # Start the computations on each subpartion #------------------------------------------------------------------------------ # Build the domain for this processor domain = Parallel_Domain(points, vertices, boundary, full_send_dict = full_send_dict, ghost_recv_dict = ghost_recv_dict) # FIXME (Ole): Name currently has to be set here to get the processor number # right. It would be easy to build into Parallel_Domain domain.set_name(project.basename) domain.set_datadir(project.outputdir) #------------------------------------------------------------------------------ # Setup initial conditions #------------------------------------------------------------------------------ domain.set_quantity('stage', project.initial_sealevel) domain.set_quantity('friction', 0.03) # # FIXME (Ole): This one segfaults which is bad, because set_quantity is # time consuming and should be done here rather than on processor 0 # #domain.set_quantity('elevation', # filename=project.demname + '.pts', # use_cache=True, # verbose=True) #------------------------------------------------------------------------------ # Setup boundary conditions #------------------------------------------------------------------------------ D = Dirichlet_boundary([project.initial_sealevel, 0, 0]) R = Reflective_boundary(domain) W = Time_boundary(domain = domain, f=lambda t: [project.initial_sealevel + (60