Changeset 5789 for anuga_work/production/geraldton/run_geraldton.py
- Timestamp:
- Sep 25, 2008, 3:14:42 PM (16 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
anuga_work/production/geraldton/run_geraldton.py
r5751 r5789 1 """Script for running tsunami inundation scenario for Dampier, WA, Australia. 2 3 Source data such as elevation and boundary data is assumed to be available in 4 directories specified by project.py 5 The output sww file is stored in project.output_run_time_dir 1 """Script for running a tsunami inundation scenario for geraldton, WA, Australia. 6 2 7 3 The scenario is defined by a triangular mesh created from project.polygon, 8 the elevation data and a simulated tsunami generated with URS code. 9 10 Ole Nielsen and Duncan Gray, GA - 2005 and Jane Sexton, Nick Bartzis, GA - 2006 4 the elevation data is compiled into a pts file through build_geraldton.py 5 and a simulated tsunami is generated through an sts file from build_boundary.py. 6 7 Input: sts file (build_boundary.py for respective event) 8 pts file (build_geraldton.py) 9 information from project file 10 Outputs: sww file stored in project.output_run_time_dir 11 The export_results_all.py and get_timeseries.py is reliant 12 on the outputs of this script 13 14 Ole Nielsen and Duncan Gray, GA - 2005, Jane Sexton, Nick Bartzis, GA - 2006 15 Ole Nielsen, Jane Sexton and Kristy Van Putten - 2008 11 16 """ 12 17 … … 32 37 from Numeric import allclose 33 38 from anuga.shallow_water.data_manager import export_grid, create_sts_boundary 34 35 39 from anuga.pmesh.mesh_interface import create_mesh_from_regions 36 40 from anuga.shallow_water.data_manager import start_screen_catcher, copy_code_files,store_parameters 37 #from anuga_parallel.parallel_api import distribute, numprocs, myid, barrier38 41 from anuga_parallel.parallel_abstraction import get_processor_name 39 42 from anuga.caching import myhash … … 42 45 from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon 43 46 from anuga.geospatial_data.geospatial_data import find_optimal_smoothing_parameter 44 from Scientific.IO.NetCDF import NetCDFFile45 47 from polygon import Polygon_function 48 46 49 # Application specific imports 47 import project 50 import project # Definition of file names and polygons 48 51 numprocs = 1 49 52 myid = 0 … … 51 54 def run_model(**kwargs): 52 55 53 54 56 #------------------------------------------------------------------------------ 55 57 # Copy scripts to time stamped output directory and capture screen … … 59 61 60 62 #copy script must be before screen_catcher 61 #print kwargs62 63 63 64 print 'output_dir',kwargs['output_dir'] 64 if myid == 0: 65 copy_code_files(kwargs['output_dir'],__file__, 66 dirname(project.__file__)+sep+ project.__name__+'.py' ) 67 68 store_parameters(**kwargs) 69 70 # barrier() 65 66 copy_code_files(kwargs['output_dir'],__file__, 67 dirname(project.__file__)+sep+ project.__name__+'.py' ) 68 69 store_parameters(**kwargs) 71 70 72 71 start_screen_catcher(kwargs['output_dir'], myid, numprocs) 73 72 74 73 print "Processor Name:",get_processor_name() 75 76 74 77 75 #----------------------------------------------------------------------- … … 80 78 81 79 # Read in boundary from ordered sts file 82 urs_bounding_polygon=create_sts_boundary(os.path.join(project.boundaries_dir ,project.scenario_name))80 urs_bounding_polygon=create_sts_boundary(os.path.join(project.boundaries_dir_event,project.scenario_name)) 83 81 84 82 # Reading the landward defined points, this incorporates the original clipping 85 83 # polygon minus the 100m contour 86 landward_bounding_polygon = read_polygon(project. boundaries_dir+'landward_boundary_polygon.txt')84 landward_bounding_polygon = read_polygon(project.landward_dir) 87 85 88 86 # Combine sts polyline with landward points … … 91 89 # counting segments 92 90 N = len(urs_bounding_polygon)-1 93 boundary_tags={'back': [N+2,N+3], 'side': [N,N+1,N+4],'ocean': range(N)} 94 95 print 'boundary tags',boundary_tags96 91 92 # boundary tags refer to project.landward 4 points equals 5 segments start at N 93 boundary_tags={'back': [N+2,N+3], 'side': [N,N+1,N+4], 'ocean': range(N)} 94 97 95 #-------------------------------------------------------------------------- 98 # Create the triangular mesh based on overall clipping polygon with a 99 # tagged 96 # Create the triangular mesh based on overall clipping polygon with a tagged 100 97 # boundary and interior regions defined in project.py along with 101 98 # resolutions (maximal area of per triangle) for each polygon 102 99 #-------------------------------------------------------------------------- 103 100 104 # IMPORTANT don't cache create_mesh_from_region and Domain(mesh....) as it101 # IMPORTANT don't cache create_mesh_from_region and Domain(mesh....) as it 105 102 # causes problems with the ability to cache set quantity which takes alot of times 106 if myid == 0: 107 108 print 'start create mesh from regions' 109 110 create_mesh_from_regions(bounding_polygon, 111 boundary_tags=boundary_tags, 112 maximum_triangle_area=project.res_poly_all, 113 interior_regions=project.interior_regions, 114 filename=project.meshes_dir_name+'.msh', 115 use_cache=True, 116 verbose=True) 117 # barrier() 118 119 ## covariance_value,alpha = find_optimal_smoothing_parameter (data_file= kwargs['elevation_file'], 120 ## alpha_list=[0.001, 0.01, 0.1, 0.15, 0.2, 0.25, 0.3, 0.4, 0.5], 121 ## mesh_file = project.meshes_dir_name+'.msh') 122 ## print 'optimal alpha', covariance_value,alpha 123 103 104 print 'start create mesh from regions' 105 106 create_mesh_from_regions(bounding_polygon, 107 boundary_tags=boundary_tags, 108 maximum_triangle_area=project.res_poly_all, 109 interior_regions=project.interior_regions, 110 filename=project.meshes_dir_name, 111 use_cache=True, 112 verbose=True) 113 124 114 #------------------------------------------------------------------------- 125 115 # Setup computational domain … … 127 117 print 'Setup computational domain' 128 118 129 domain = Domain(project.meshes_dir_name +'.msh', use_cache=False, verbose=True)119 domain = Domain(project.meshes_dir_name, use_cache=False, verbose=True) 130 120 print 'memory usage before del domain',mem_usage() 131 121 … … 139 129 # Setup initial conditions 140 130 #------------------------------------------------------------------------- 141 if myid == 0: 142 143 print 'Setup initial conditions' 144 145 from polygon import Polygon_function 146 #following sets the stage/water to be offcoast only 147 IC = Polygon_function( [(project.poly_mainland, 0)], default = kwargs['tide'], 148 geo_reference = domain.geo_reference) 149 domain.set_quantity('stage', IC) 150 #domain.set_quantity('stage',kwargs['tide'] ) 151 domain.set_quantity('friction', kwargs['friction']) 152 153 print 'Start Set quantity',kwargs['elevation_file'] 154 155 domain.set_quantity('elevation', 156 filename = kwargs['elevation_file'], 157 use_cache = False, 158 verbose = True, 159 alpha = kwargs['alpha']) 160 print 'Finished Set quantity' 161 #barrier() 162 131 print 'Setup initial conditions' 132 133 # sets the initial stage in the offcoast region only 134 IC = Polygon_function( [(project.poly_mainland, 0)], default = kwargs['tide'], 135 geo_reference = domain.geo_reference) 136 domain.set_quantity('stage', IC) 137 #domain.set_quantity('stage',kwargs['tide'] ) 138 domain.set_quantity('friction', kwargs['friction']) 139 140 print 'Start Set quantity',kwargs['elevation_file'] 141 142 domain.set_quantity('elevation', 143 filename = kwargs['elevation_file'], 144 use_cache = False, 145 verbose = True, 146 alpha = kwargs['alpha']) 147 print 'Finished Set quantity' 148 149 ## #------------------------------------------------------ 150 ## # Distribute domain to implement parallelism !!! 163 151 ## #------------------------------------------------------ 164 ## # Create x,y,z file of mesh vertex!!!165 ## #------------------------------------------------------166 ## coord = domain.get_vertex_coordinates()167 ## depth = domain.get_quantity('elevation')168 ##169 ## # Write vertex coordinates to file170 ## filename=project.vertex_filename171 ## fid=open(filename,'w')172 ## fid.write('x (m), y (m), z(m)\n')173 ## for i in range(len(coord)):174 ## pt=coord[i]175 ## x=pt[0]176 ## y=pt[1]177 ## z=depth[i]178 ## fid.write('%.6f,%.6f,%.6f\n' %(x, y, z))179 152 ## 180 181 #------------------------------------------------------ 182 # Distribute domain to implement parallelism !!! 183 #------------------------------------------------------ 184 185 if numprocs > 1: 186 domain=distribute(domain) 153 ## if numprocs > 1: 154 ## domain=distribute(domain) 187 155 188 156 #------------------------------------------------------ … … 190 158 #------------------------------------------------------ 191 159 print 'domain id', id(domain) 192 domain.set_name(kwargs[' aa_scenario_name'])160 domain.set_name(kwargs['scenario_name']) 193 161 domain.set_datadir(kwargs['output_dir']) 194 domain.set_default_order(2) # Apply second order scheme195 domain.set_minimum_storable_height(0.01) # Don't store anything less than 1cm162 domain.set_default_order(2) # Apply second order scheme 163 domain.set_minimum_storable_height(0.01) # Don't store anything less than 1cm 196 164 domain.set_store_vertices_uniquely(False) 197 165 domain.set_quantities_to_be_stored(['stage', 'xmomentum', 'ymomentum']) … … 205 173 print 'domain id', id(domain) 206 174 207 boundary_urs_out=project.boundaries_dir_ name175 boundary_urs_out=project.boundaries_dir_event + sep + project.scenario_name 208 176 209 177 Br = Reflective_boundary(domain) … … 237 205 domain.write_boundary_statistics(tags = 'ocean') 238 206 239 207 # these outputs should be checked with the resultant inundation map 240 208 x, y = domain.get_maximum_inundation_location() 241 209 q = domain.get_maximum_inundation_elevation() 242 243 210 print 'Maximum runup observed at (%.2f, %.2f) with elevation %.2f' %(x,y,q) 244 211 245 print ' Thattook %.2f seconds' %(time.time()-t0)212 print 'Simulation took %.2f seconds' %(time.time()-t0) 246 213 247 214 #kwargs 'completed' must be added to write the final parameters to file 248 215 kwargs['completed']=str(time.time()-t0) 249 250 if myid==0: 251 store_parameters(**kwargs) 252 # barrier 253 216 217 store_parameters(**kwargs) 218 254 219 print 'memory usage before del domain1',mem_usage() 255 220 … … 259 224 260 225 kwargs={} 261 kwargs['est_num_trigs']=project.trigs_min262 kwargs['num_cpu']=numprocs263 kwargs['host']=project.host264 kwargs['res_factor']=project.res_factor265 kwargs['starttime']=project.starttime266 kwargs['yieldstep']=project.yieldstep267 226 kwargs['finaltime']=project.finaltime 268 269 227 kwargs['output_dir']=project.output_run_time_dir 270 228 kwargs['elevation_file']=project.combined_dir_name+'.pts' 271 kwargs['file_name']=project.home+'detail.csv' 272 kwargs['aa_scenario_name']=project.scenario_name 273 kwargs['ab_time']=project.time 274 kwargs['res_factor']= project.res_factor 229 kwargs['scenario_name']=project.scenario_name 275 230 kwargs['tide']=project.tide 276 kwargs['user']=project.user277 231 kwargs['alpha'] = project.alpha 278 232 kwargs['friction']=project.friction 279 kwargs['time_thinning'] = project.time_thinning 280 kwargs['dir_comment']=project.dir_comment 281 kwargs['export_cellsize']=project.export_cellsize 282 283 233 #kwargs['num_cpu']=numprocs 234 #kwargs['host']=project.host 235 #kwargs['starttime']=project.starttime 236 #kwargs['yieldstep']=project.yieldstep 237 #kwargs['user']=project.user 238 #kwargs['time_thinning'] = project.time_thinning 239 284 240 run_model(**kwargs) 285 241 286 #barrier242
Note: See TracChangeset
for help on using the changeset viewer.