Changeset 5111 for anuga_work/production
- Timestamp:
- Mar 4, 2008, 3:00:23 PM (17 years ago)
- Location:
- anuga_work/production/perth
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
anuga_work/production/perth/project.py
r4988 r5111 9 9 from anuga.utilities.polygon import read_polygon, plot_polygons, is_inside_polygon, number_mesh_triangles 10 10 #from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees, convert_points_from_latlon_to_utm 11 from anuga.utilities.system_tools import get_user_name 11 from anuga.utilities.system_tools import get_user_name, get_host_name 12 12 13 13 # file and system info 14 14 #--------------------------------- 15 codename = 'project.py'16 17 home = getenv('INUNDATIONHOME') #Sandpit's parent dir15 #codename = 'project.py' 16 17 home = getenv('INUNDATIONHOME') + sep +'data'+sep #Sandpit's parent diruser = get_user_name() 18 18 user = get_user_name() 19 host = get_host_name() 19 20 20 21 # INUNDATIONHOME is the inundation directory, not the data directory. 21 home += sep +'data'22 22 23 23 #time stuff … … 28 28 print 'gtime: ', gtime 29 29 30 tide = 0.631 32 30 #Making assumptions about the location of scenario data 33 31 state = 'western_australia' 34 32 scenario_name = 'perth' 35 scenario = 'perth_tsunami_scenario_2006' 33 scenario = 'perth_tsunami_scenario' 34 35 tide = 0.6 36 37 alpha = 0.1 38 friction=0.01 39 starttime=10000 40 midtime=21600 41 finaltime=10000 42 export_cellsize=50 43 setup='trial' 44 source='test' 45 46 if setup =='trial': 47 print'trial' 48 res_factor=10 49 time_thinning=48 50 yieldstep=240 51 if setup =='basic': 52 print'basic' 53 res_factor=4 54 time_thinning=12 55 yieldstep=120 56 if setup =='final': 57 print'final' 58 res_factor=1 59 time_thinning=4 60 yieldstep=60 61 62 dir_comment='_'+setup+'_'+str(tide)+'_'+str(source)+'_'+str(user) 36 63 37 64 # onshore data provided by WA DLI … … 52 79 combined_smaller_name = 'perth_combined_elevation_smaller' 53 80 81 anuga_dir = home+state+sep+scenario+sep+'anuga'+sep 54 82 55 83 topographies_in_dir = home+sep+state+sep+scenario+sep+'elevation_final'+sep+'points'+sep 56 topographies_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'topographies'+sep84 topographies_dir = anuga_dir+'topographies'+sep 57 85 topographies_time_dir = topographies_dir+build_time+sep 58 86 … … 83 111 #combined_time_dir_final_name = topographies_time_dir + combined_final_name 84 112 85 86 87 meshes_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'meshes'+sep 113 meshes_dir = anuga_dir+'meshes'+sep 88 114 meshes_dir_name = meshes_dir + scenario_name 89 115 90 polygons_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'polygons'+sep 91 tide_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'tide_data'+sep 92 93 94 boundaries_source = '1' 116 polygons_dir = anuga_dir+'polygons'+sep 117 tide_dir = anuga_dir+'tide_data'+sep 118 119 120 #boundaries_source = '1' 121 122 if source =='dampier': 123 boundaries_name = 'broome_3854_17042007' #Dampier gun 124 boundaries_in_dir = anuga_dir+'boundaries'+sep+'urs'+sep+'dampier'+sep+'1_10000'+sep 125 126 if source=='onslow': 127 boundaries_name = 'broome_3859_16052007' #onslow_hedland_broome gun 128 boundaries_in_dir = anuga_dir+'boundaries'+sep+'urs'+sep+'onslow_hedland_broome'+sep+'1_10000'+sep 129 130 if source=='exmouth': 131 boundaries_name = 'broome_3103_18052007' #exmouth gun 132 boundaries_in_dir = anuga_dir+'boundaries'+sep+'urs'+sep+'exmouth'+sep+'1_10000'+sep 133 134 if source=='test': 135 boundaries_name = 'other' #exmouth gun 136 boundaries_in_dir = anuga_dir+'boundaries'+sep 137 138 95 139 #boundaries locations 96 boundaries_in_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'boundaries'+sep+'urs'+sep+boundaries_source+sep97 140 boundaries_in_dir_name = boundaries_in_dir + scenario_name 98 boundaries_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'boundaries'+sep141 boundaries_dir = anuga_dir+'boundaries'+sep 99 142 boundaries_dir_name = boundaries_dir + scenario_name 100 #boundaries_time_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'boundaries'+sep+build_time+sep143 #boundaries_time_dir = anuga_dir+'boundaries'+sep+build_time+sep 101 144 #boundaries_time_dir_name = boundaries_time_dir + boundaries_name #Used by post processing 102 145 103 146 #output locations 104 output_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'outputs'+sep105 output_build_time_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'outputs'+sep+build_time+sep106 output_run_time_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'outputs'+sep+run_time+sep147 output_dir = anuga_dir+'outputs'+sep 148 output_build_time_dir = anuga_dir+'outputs'+sep+build_time+sep 149 output_run_time_dir = anuga_dir+'outputs'+sep+dir_comment+sep 107 150 output_run_time_dir_name = output_run_time_dir + scenario_name #Used by post processing 108 151 109 152 #gauges 110 153 gauge_name = 'perth.csv' 111 gauges_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'gauges'+sep154 gauges_dir = anuga_dir+'gauges'+sep 112 155 gauges_dir_name = gauges_dir + gauge_name 113 156 … … 121 164 122 165 poly_all = read_polygon(polygons_dir+'bounding_poly.csv') 123 res_poly_all = 100000 124 125 refzone = 50 126 166 res_poly_all = 100000*res_factor 167 168 #refzone = 50 127 169 128 170 ############################### … … 132 174 #poly_pos20_neg20 = read_polygon(polygons_dir+'pos20_neg20pts.csv') 133 175 poly_pos20_neg20 = read_polygon(polygons_dir+'pos20_neg20_new_pts.csv') 134 res_pos20_neg20 = 20000 176 res_pos20_neg20 = 20000*res_factor 135 177 136 178 #poly_cbd = read_polygon(polygons_dir+'cbd_pts.csv') 137 poly_cbd = read_polygon(polygons_dir+'cbd_ new_pts.csv')138 res_cbd = 1000179 poly_cbd = read_polygon(polygons_dir+'cbd_smaller_pts.csv') 180 res_cbd = 500*res_factor 139 181 140 182 poly_penguin = read_polygon(polygons_dir+'penguin_pts.csv') 141 res_penguin = 1000 183 res_penguin = 500*res_factor 184 185 poly_geordie_bay = read_polygon(polygons_dir+'geordie_bay_pts.csv') 186 res_geordie_bay = 500*res_factor 187 188 poly_sorrento_gauge = read_polygon(polygons_dir+'sorrento_gauge_pts.csv') 189 res_sorrento_gauge = 500*res_factor 142 190 #assert zone == refzone 143 191 144 192 interior_regions = [[poly_pos20_neg20,res_pos20_neg20],[poly_cbd,res_cbd] 193 ,[poly_penguin,res_penguin],[poly_penguin,res_penguin] 145 194 ,[poly_penguin,res_penguin]] 195 196 boundary_tags={'back': [4], 'side': [0,3],'ocean': [1, 2]} 146 197 147 198 trigs_min = number_mesh_triangles(interior_regions, poly_all, res_poly_all) -
anuga_work/production/perth/run_perth.py
r4359 r5111 1 """Script for running tsunami inundation scenario for Perth, WA, Australia.1 """Script for running tsunami inundation scenario for Dampier, WA, Australia. 2 2 3 3 Source data such as elevation and boundary data is assumed to be available in … … 28 28 from anuga.shallow_water import File_boundary 29 29 from anuga.shallow_water import Reflective_boundary 30 from anuga.shallow_water import Field_boundary 30 31 from Numeric import allclose 32 from anuga.shallow_water.data_manager import export_grid 31 33 32 34 from anuga.pmesh.mesh_interface import create_mesh_from_regions 33 from anuga.geospatial_data.geospatial_data import * 34 from anuga.abstract_2d_finite_volumes.util import start_screen_catcher, copy_code_files 35 from anuga.shallow_water.data_manager import start_screen_catcher, copy_code_files,store_parameters 35 36 from anuga_parallel.parallel_api import distribute, numprocs, myid, barrier 36 from anuga.utilities.polygon import plot_polygons, polygon_area 37 from anuga_parallel.parallel_abstraction import get_processor_name 38 from anuga.caching import myhash 39 from anuga.damage_modelling.inundation_damage import add_depth_and_momentum2csv, inundation_damage 40 from anuga.fit_interpolate.benchmark_least_squares import mem_usage 37 41 38 42 # Application specific imports 39 43 import project # Definition of file names and polygons 40 44 41 #------------------------------------------------------------------------------ 42 # Copy scripts to time stamped output directory and capture screen 43 # output to file 44 #------------------------------------------------------------------------------ 45 46 start_screen_catcher(project.output_run_time_dir, myid, numprocs) 47 48 # filenames 49 boundaries_name = project.scenario 50 meshes_dir_name = project.meshes_dir_name+'.msh' 51 #boundaries_time_dir_name = project.boundaries_time_dir_name 52 boundaries_dir_name = project.boundaries_dir_name 53 54 tide = project.tide 55 print 'hello' 56 # creates copy of code in output dir 57 if myid == 0: 58 copy_code_files(project.output_run_time_dir,__file__, 45 def run_model(**kwargs): 46 47 48 #------------------------------------------------------------------------------ 49 # Copy scripts to time stamped output directory and capture screen 50 # output to file 51 #------------------------------------------------------------------------------ 52 print "Processor Name:",get_processor_name() 53 54 #copy script must be before screen_catcher 55 #print kwargs 56 57 print 'output_dir',kwargs['output_dir'] 58 if myid == 0: 59 copy_code_files(kwargs['output_dir'],__file__, 59 60 dirname(project.__file__)+sep+ project.__name__+'.py' ) 60 barrier() 61 62 print 'USER: ', project.user 63 print 'min triangles', project.trigs_min, 64 print 'Note: This is generally about 20% less than the final amount' 65 import sys 66 sys.exit() 67 #-------------------------------------------------------------------------- 68 # Create the triangular mesh based on overall clipping polygon with a 69 # tagged 70 # boundary and interior regions defined in project.py along with 71 # resolutions (maximal area of per triangle) for each polygon 72 #-------------------------------------------------------------------------- 73 74 if myid == 0: 75 76 print 'start create mesh from regions' 77 create_mesh_from_regions(project.poly_all, 78 boundary_tags={'back': [4], 'side': [0,3], 79 'ocean': [1, 2]}, 80 maximum_triangle_area=project.res_poly_all, 81 interior_regions=project.interior_regions, 82 # filename=meshes_time_dir_name, 83 filename=meshes_dir_name, 84 use_cache=True, 85 verbose=True) 86 87 # to sync all processors are ready 88 barrier() 89 90 #------------------------------------------------------------------------- 91 # Setup computational domain 92 #------------------------------------------------------------------------- 93 print 'Setup computational domain' 94 #domain = Domain(meshes_time_dir_name, use_cache=True, verbose=True) 95 domain = Domain(meshes_dir_name, use_cache=True, verbose=True) 96 print domain.statistics() 97 98 #------------------------------------------------------------------------- 99 # Setup initial conditions 100 #------------------------------------------------------------------------- 101 if myid == 0: 102 103 print 'Setup initial conditions' 104 105 domain.set_quantity('stage', tide) 106 domain.set_quantity('friction', 0.01) 107 #combined_time_dir_name = project.topographies_dir+build_time+project.combined_name 108 print 'Start Set quantity' 109 110 domain.set_quantity('elevation', 111 filename = project.combined_dir_name+'.txt', 112 # filename = project.combined_smaller_name_dir+'.txt', 113 use_cache = True, 114 verbose = True, 115 alpha = 0.1) 116 print 'Finished Set quantity' 117 barrier() 118 119 #------------------------------------------------------ 120 # Distribute domain to implement parallelism !!! 121 #------------------------------------------------------ 122 123 if numprocs > 1: 124 domain=distribute(domain) 125 126 #------------------------------------------------------ 127 # Set domain parameters 128 #------------------------------------------------------ 129 130 domain.set_name(project.scenario_name) 131 domain.set_datadir(project.output_run_time_dir) 132 domain.set_default_order(2) # Apply second order scheme 133 domain.set_minimum_storable_height(0.001) # Don't store anything less than 0.1cm 134 #domain.set_minimum_storable_height(0.01) # Don't store anything less than 1cm 135 #domain.set_minimum_storable_height(0.1) # Don't store anything less than 10cm 136 domain.set_store_vertices_uniquely(False) 137 domain.set_quantities_to_be_stored(['stage', 'xmomentum', 'ymomentum']) 138 domain.set_maximum_allowed_speed(0.1) # Allow a little runoff (0.1 is OK) 139 140 #------------------------------------------------------------------------- 141 # Setup boundary conditions 142 #------------------------------------------------------------------------- 143 print 'Available boundary tags', domain.get_boundary_tags() 144 145 print 'Reading Boundary file' 146 print 'domain id', id(domain) 147 #Bf = File_boundary(boundaries_dir_name + '.sww', 148 # domain, time_thinning=5, use_cache=True, verbose=True) 149 150 print 'finished reading boundary file' 151 Br = Reflective_boundary(domain) 152 Bd = Dirichlet_boundary([tide,0,0]) 153 Bo = Dirichlet_boundary([tide+5.0,0,0]) 154 155 print'set_boundary' 156 domain.set_boundary({'back': Br, 157 'side': Bd, 158 'ocean': Bd}) 159 print'finish set boundary' 160 161 #---------------------------------------------------------------------------- 162 # Evolve system through time 163 #---------------------------------------------------------------------------- 164 165 t0 = time.time() 166 167 for t in domain.evolve(yieldstep = 60, finaltime = 10000): 168 domain.write_time() 169 domain.write_boundary_statistics(tags = 'ocean') 170 if allclose(t, 120): 171 domain.set_boundary({'back': Br, 'side': Bd, 'ocean': Bo}) 172 173 if allclose(t, 1020): 174 domain.set_boundary({'back': Br, 'side': Bd, 'ocean': Bd}) 175 176 177 print 'That took %.2f seconds' %(time.time()-t0) 178 61 62 store_parameters(**kwargs) 63 64 barrier() 65 66 start_screen_catcher(kwargs['output_dir'], myid, numprocs) 67 68 print "Processor Name:",get_processor_name() 69 70 # filenames 71 # meshes_dir_name = project.meshes_dir_name+'.msh' 72 73 # creates copy of code in output dir 74 print 'min triangles', project.trigs_min, 75 print 'Note: This is generally about 20% less than the final amount' 76 77 #-------------------------------------------------------------------------- 78 # Create the triangular mesh based on overall clipping polygon with a 79 # tagged 80 # boundary and interior regions defined in project.py along with 81 # resolutions (maximal area of per triangle) for each polygon 82 #-------------------------------------------------------------------------- 83 84 #IMPORTANT don't cache create_mesh_from_region and Domain(mesh....) as it 85 # causes problems with the ability to cache set quantity which takes alot of times 86 if myid == 0: 87 88 print 'start create mesh from regions' 89 90 create_mesh_from_regions(project.poly_all, 91 boundary_tags=project.boundary_tags, 92 maximum_triangle_area=project.res_poly_all, 93 interior_regions=project.interior_regions, 94 filename=project.meshes_dir_name+'.msh', 95 use_cache=False, 96 verbose=True) 97 barrier() 98 99 100 #------------------------------------------------------------------------- 101 # Setup computational domain 102 #------------------------------------------------------------------------- 103 print 'Setup computational domain' 104 105 #domain = cache(Domain, (meshes_dir_name), {'use_cache':True, 'verbose':True}, verbose=True) 106 #above don't work 107 domain = Domain(project.meshes_dir_name+'.msh', use_cache=False, verbose=True) 108 print 'memory usage before del domain',mem_usage() 109 110 print domain.statistics() 111 print 'triangles',len(domain) 112 113 kwargs['act_num_trigs']=len(domain) 114 115 116 #------------------------------------------------------------------------- 117 # Setup initial conditions 118 #------------------------------------------------------------------------- 119 if myid == 0: 120 121 print 'Setup initial conditions' 122 123 from polygon import Polygon_function 124 #following sets the stage/water to be offcoast only 125 # IC = Polygon_function( [(project.poly_mainland, -1.0)], default = kwargs['tide'], 126 # geo_reference = domain.geo_reference) 127 # domain.set_quantity('stage', IC) 128 domain.set_quantity('stage',kwargs['tide'] ) 129 # domain.set_quantity('stage', kwargs['tide']) 130 domain.set_quantity('friction', kwargs['friction']) 131 132 print 'Start Set quantity',kwargs['bathy_file'] 133 134 domain.set_quantity('elevation', 135 filename = kwargs['bathy_file'], 136 use_cache = True, 137 verbose = True, 138 alpha = kwargs['alpha']) 139 print 'Finished Set quantity' 140 barrier() 141 142 143 #------------------------------------------------------ 144 # Distribute domain to implement parallelism !!! 145 #------------------------------------------------------ 146 147 if numprocs > 1: 148 domain=distribute(domain) 149 150 #------------------------------------------------------ 151 # Set domain parameters 152 #------------------------------------------------------ 153 print 'domain id', id(domain) 154 domain.set_name(kwargs['aa_scenario_name']) 155 domain.set_datadir(kwargs['output_dir']) 156 domain.set_default_order(2) # Apply second order scheme 157 domain.set_minimum_storable_height(0.01) # Don't store anything less than 1cm 158 domain.set_store_vertices_uniquely(False) 159 domain.set_quantities_to_be_stored(['stage', 'xmomentum', 'ymomentum']) 160 domain.tight_slope_limiters = 1 161 #domain.set_maximum_allowed_speed(0.1) # Allow a little runoff (0.1 is OK) 162 print 'domain id', id(domain) 163 domain.beta_h = 0 164 165 #------------------------------------------------------------------------- 166 # Setup boundary conditions 167 #------------------------------------------------------------------------- 168 print 'Available boundary tags', domain.get_boundary_tags() 169 print 'domain id', id(domain) 170 #print 'Reading Boundary file',project.boundaries_dir_namea + '.sww' 171 172 Br = Reflective_boundary(domain) 173 Bd = Dirichlet_boundary([kwargs['tide'],0,0]) 174 Bo = Dirichlet_boundary([kwargs['tide']+5.0,0,0]) 175 176 if project.source != 'test': 177 print 'start reading boundary file' 178 179 Bf = Field_boundary(kwargs['boundary_file'], 180 domain, time_thinning=kwargs['time_thinning'], mean_stage=kwargs['tide'], 181 use_cache=True, verbose=True) 182 domain.set_boundary({'back': Br, 183 'side': Bd, 184 'ocean': Bf}) 185 else: 186 domain.set_boundary({'back': Br, 187 'side': Bd, 188 'ocean': Bd}) 189 190 kwargs['input_start_time']=domain.starttime 191 192 print'finish set boundary' 193 194 #---------------------------------------------------------------------------- 195 # Evolve system through time 196 #-------------------------------------------------------------------- 197 t0 = time.time() 198 199 for t in domain.evolve(yieldstep = 240, finaltime = kwargs['starttime']): 200 domain.write_time() 201 domain.write_boundary_statistics(tags = 'ocean') 202 203 if allclose(t, 120): 204 domain.set_boundary({'back': Br, 'side': Bd, 'ocean': Bo}) 205 206 if allclose(t, 720): 207 domain.set_boundary({'back': Br, 'side': Bd, 'ocean': Bd}) 208 209 # for t in domain.evolve(yieldstep = kwargs['yieldstep'], finaltime = kwargs['midtime'] 210 # ,skip_initial_step = True): 211 # domain.write_time() 212 # domain.write_boundary_statistics(tags = 'ocean') 213 # 214 # for t in domain.evolve(yieldstep = 240, finaltime = kwargs['finaltime'] 215 # ,skip_initial_step = True): 216 # domain.write_time() 217 # domain.write_boundary_statistics(tags = 'ocean') 218 219 x, y = domain.get_maximum_inundation_location() 220 q = domain.get_maximum_inundation_elevation() 221 222 print 'Maximum runup observed at (%.2f, %.2f) with elevation %.2f' %(x,y,q) 223 224 print 'That took %.2f seconds' %(time.time()-t0) 225 226 #kwargs 'completed' must be added to write the final parameters to file 227 kwargs['completed']=str(time.time()-t0) 228 229 if myid==0: 230 store_parameters(**kwargs) 231 barrier 232 233 print 'memory usage before del domain1',mem_usage() 234 235 def export_model(**kwargs): 236 #store_parameters(**kwargs) 237 238 # print 'memory usage before del domain',mem_usage() 239 #del domain 240 print 'memory usage after del domain',mem_usage() 241 242 swwfile = kwargs['output_dir']+kwargs['aa_scenario_name'] 243 print'swwfile',swwfile 244 245 export_grid(swwfile, extra_name_out = 'town', 246 quantities = ['speed','depth','elevation','stage'], # '(xmomentum**2 + ymomentum**2)**0.5' defaults to elevation 247 #quantities = ['speed','depth'], # '(xmomentum**2 + ymomentum**2)**0.5' defaults to elevation 248 timestep = None, 249 reduction = max, 250 cellsize = kwargs['export_cellsize'], 251 NODATA_value = -9999, 252 easting_min = project.eastingmin, 253 easting_max = project.eastingmax, 254 northing_min = project.northingmin, 255 northing_max = project.northingmax, 256 verbose = False, 257 origin = None, 258 datum = 'GDA94', 259 format = 'asc') 260 261 inundation_damage(swwfile+'.sww', project.buildings_filename, 262 project.buildings_filename_out) 263 264 #------------------------------------------------------------- 265 if __name__ == "__main__": 266 267 kwargs={} 268 kwargs['est_num_trigs']=project.trigs_min 269 kwargs['num_cpu']=numprocs 270 kwargs['host']=project.host 271 kwargs['res_factor']=project.res_factor 272 kwargs['starttime']=project.starttime 273 kwargs['yieldstep']=project.yieldstep 274 kwargs['midtime']=project.midtime 275 kwargs['finaltime']=project.finaltime 276 277 kwargs['output_dir']=project.output_run_time_dir 278 kwargs['bathy_file']=project.combined_dir_name+'.txt' 279 # kwargs['bathy_file']=project.combined_small_dir_name + '.pts' 280 kwargs['boundary_file']=project.boundaries_in_dir_name + '.sww' 281 kwargs['file_name']=project.home+'detail.csv' 282 kwargs['aa_scenario_name']=project.scenario_name 283 kwargs['ab_time']=project.time 284 kwargs['res_factor']= project.res_factor 285 kwargs['tide']=project.tide 286 kwargs['user']=project.user 287 kwargs['alpha'] = project.alpha 288 kwargs['friction']=project.friction 289 kwargs['time_thinning'] = project.time_thinning 290 kwargs['dir_comment']=project.dir_comment 291 kwargs['export_cellsize']=project.export_cellsize 292 293 294 run_model(**kwargs) 295 296 if myid==0: 297 export_model(**kwargs) 298 barrier
Note: See TracChangeset
for help on using the changeset viewer.