Changeset 5786
- Timestamp:
- Sep 25, 2008, 2:24:38 PM (15 years ago)
- Location:
- anuga_work/production/busselton
- Files:
-
- 5 added
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
anuga_work/production/busselton/build_busselton.py
r5646 r5786 1 """Script for running tsunami inundation scenario for Dampier, WA, Australia. 1 """ 2 Script for building the elevation data to run a tsunami inundation scenario 3 for busselton, WA, Australia. 2 4 3 Source data such as elevation and boundary data is assumed to be available in 4 directories specified by project.py 5 The output sww file is stored in project.output_time_dir5 Input: elevation data from project.py 6 Output: pts file stored in project.topographies_dir 7 The run_busselton.py is reliant on the output of this script. 6 8 7 The scenario is defined by a triangular mesh created from project.polygon,8 the elevation data and a simulated submarine landslide.9 10 Ole Nielsen and Duncan Gray, GA - 2005 and Jane Sexton, Nick Bartzis, GA - 200611 9 """ 12 10 … … 27 25 from anuga.shallow_water.data_manager import convert_dem_from_ascii2netcdf, dem2pts 28 26 from anuga.geospatial_data.geospatial_data import * 29 from anuga.shallow_water.data_manager import start_screen_catcher, copy_code_files ,store_parameters27 from anuga.shallow_water.data_manager import start_screen_catcher, copy_code_files 30 28 31 29 # Application specific imports … … 42 40 start_screen_catcher(project.output_build_time_dir) 43 41 44 print 'USER: ', project.user42 print 'USER: ', project.user 45 43 46 44 #------------------------------------------------------------------------------- … … 51 49 # Fine pts file to be clipped to area of interest 52 50 #------------------------------------------------------------------------------- 53 print "project.poly_all",project.poly_all54 print "project.combined_dir_name",project.combined_dir_name51 print "project.poly_all", project.poly_all 52 print "project.combined_dir_name", project.combined_dir_name 55 53 56 # topographydirectory filenames54 # input elevation directory filenames 57 55 onshore_in_dir_name = project.onshore_in_dir_name 58 56 coast_in_dir_name = project.coast_in_dir_name … … 65 63 offshore_in_dir_name5 = project.offshore_in_dir_name5 66 64 65 # output elevation directory filenames 67 66 onshore_dir_name = project.onshore_dir_name 68 67 coast_dir_name = project.coast_dir_name … … 74 73 offshore_dir_name4 = project.offshore_dir_name4 75 74 offshore_dir_name5 = project.offshore_dir_name5 76 77 75 # creates DEM from asc data 78 print "creates DEMs from asc iidata"76 print "creates DEMs from asc data" 79 77 convert_dem_from_ascii2netcdf(onshore_in_dir_name, basename_out=onshore_dir_name, use_cache=True, verbose=True) 80 78 81 # creates pts file for onshore DEM79 # creates pts file for onshore DEM 82 80 print "creates pts file for onshore DEM" 83 dem2pts(onshore_dir_name, 84 # easting_min=project.eastingmin, 85 # easting_max=project.eastingmax, 86 # northing_min=project.northingmin, 87 # northing_max= project.northingmax, 88 use_cache=True, 89 verbose=True) 81 dem2pts(onshore_dir_name ,use_cache=True,verbose=True) 90 82 91 #creates pts file for island DEM 92 #dem2pts(island_dir_name, use_cache=True, verbose=True) 83 # create onshore pts files 84 print'create Geospatial data1 objects from topographies' 85 G1 = Geospatial_data(file_name = onshore_dir_name + '.pts') 93 86 94 print'create Geospatial data1 objects from topographies' 95 G1 = Geospatial_data(file_name = onshore_dir_name + '.pts',verbose=True) 96 G2 = Geospatial_data(file_name = coast_in_dir_name + '.txt',verbose=True) 97 G3 = Geospatial_data(file_name = coast_in_dir_name1 + '.txt',verbose=True) 98 G_off = Geospatial_data(file_name = offshore_in_dir_name + '.txt',verbose=True) 99 G_off1 = Geospatial_data(file_name = offshore_in_dir_name1 + '.txt',verbose=True) 100 G_off2 = Geospatial_data(file_name = offshore_in_dir_name2 + '.txt',verbose=True) 101 G_off3 = Geospatial_data(file_name = offshore_in_dir_name3 + '.txt',verbose=True) 102 G_off4 = Geospatial_data(file_name = offshore_in_dir_name4 + '.txt',verbose=True) 103 G_off5 = Geospatial_data(file_name = offshore_in_dir_name5 + '.txt',verbose=True) 87 # create coastal and offshore pts files 88 print'create Geospatial data2 objects from topographies' 89 G3 = Geospatial_data(file_name = coast_in_dir_name) 90 print'create Geospatial data3 objects from topographies' 91 G4 = Geospatial_data(file_name = coast_in_dir_name1) 92 print'create Geospatial data4 objects from topographies' 93 G_off = Geospatial_data(file_name = offshore_in_dir_name) 94 print'create Geospatial data5 objects from topographies' 95 G_off1 = Geospatial_data(file_name = offshore_in_dir_name1) 96 print'create Geospatial data6 objects from topographies' 97 G_off2 = Geospatial_data(file_name = offshore_in_dir_name2) 98 print'create Geospatial data7 objects from topographies' 99 G_off3 = Geospatial_data(file_name = offshore_in_dir_name3) 100 print'create Geospatial data8 objects from topographies' 101 G_off4 = Geospatial_data(file_name = offshore_in_dir_name4) 102 print'create Geospatial data9 objects from topographies' 103 G_off5 = Geospatial_data(file_name = offshore_in_dir_name5) 104 105 106 #------------------------------------------------------------------------------- 107 # Combine, clip and export dataset 108 #------------------------------------------------------------------------------- 109 104 110 print'add all geospatial objects' 105 111 G = G1 + G2 + G3 + G_off + G_off1 + G_off2 + G_off3 + G_off4 + G_off5 … … 111 117 if access(project.topographies_dir,F_OK) == 0: 112 118 mkdir (project.topographies_dir) 113 G_clipped.export_points_file(project.combined_dir_name + '.txt') 119 G_clipped.export_points_file(project.combined_dir_name + '.pts') 120 #G_clipped.export_points_file(project.combined_dir_name + '.txt') #Use for comparision in ARC 114 121 115 print'project.combined_dir_name + .txt',project.combined_dir_name + '.txt'116 117 ###-------------------------------------------------------------------------118 ### Convert URS to SWW file for boundary conditions119 ###-------------------------------------------------------------------------120 ##print 'starting to create boundary conditions'121 ##from anuga.shallow_water.data_manager import urs2sww, urs_ungridded2sww122 ##123 ##boundaries_in_dir_name = project.boundaries_in_dir_name124 ##print 'boundaries_in_dir_name',project.boundaries_in_dir_name125 ##126 ##127 ###import sys; sys.exit()128 ##129 ##urs_ungridded2sww(project.boundaries_in_dir_name, project.boundaries_in_dir_name,130 ## verbose=True, mint=4000, maxt=80000, zscale=1)131 ## -
anuga_work/production/busselton/export_results_all.py
r5669 r5786 8 8 9 9 #time_dir = '20080815_103708_run_final_0.6_polyline_newExtent_kvanputt' 10 time_dir = '20080 815_103818_run_final_0_polyline_newExtent_kvanputt'10 time_dir = '20080910_154939_run_final_0.6_27255_alpha0.1_kvanputt' 11 11 12 12 is_parallel = False … … 19 19 directory = project.output_dir 20 20 name1 = directory+time_dir+sep+project.scenario_name 21 name2 = directory+time_dir+sep+'busselton_time_38340'+sep+project.scenario_name+'_time_38340_0' 21 name2 = directory+time_dir+sep+'sww2'+sep+project.scenario_name+'_time_38460_0' 22 name3 = directory+time_dir+sep+'sww3'+sep+project.scenario_name+'_time_76920_0' 22 23 23 names= [name1, name2 ]24 names= [name1, name2, name3] 24 25 for name in names: 25 26 26 area = ['Busselton', 'Bunbury' , 'Dunsborough']27 area = ['Busselton', 'Bunbury'] 27 28 28 29 for which_area in area: -
anuga_work/production/busselton/get_gauges.py
r5608 r5786 14 14 compress,zeros,fabs,take 15 15 fid = NetCDFFile(filename+'.sts', 'r') #Open existing file for read 16 permutation = fid.variables['permutation'][:] 16 17 x = fid.variables['x'][:]+fid.xllcorner #x-coordinates of vertices 17 18 y = fid.variables['y'][:]+fid.yllcorner #y-coordinates of vertices … … 27 28 28 29 maxname = 'max_sts_stage.csv' 29 fid_max = open(project.boundaries_dir +sep+maxname,'w')30 s = ' x, y, max_stage \n'30 fid_max = open(project.boundaries_dir_event+sep+maxname,'w') 31 s = 'index, x, y, max_stage \n' 31 32 fid_max.write(s) 32 33 for j in range(len(x)): 33 locx=int(x[j]) 34 locy=int(y[j]) 34 index= permutation[j] 35 35 stage = quantities['stage'][:,j] 36 36 xmomentum = quantities['xmomentum'][:,j] 37 37 ymomentum = quantities['ymomentum'][:,j] 38 38 39 s = '% .6f, %.6f, %.6f\n' %(x[j], y[j], max(stage))39 s = '%d, %.6f, %.6f, %.6f\n' %(index, x[j], y[j], max(stage)) 40 40 fid_max.write(s) 41 41 42 fid_sts = open(project.boundaries_dir +sep+basename+'_'+str(locx)+'_'+str(locy)+'.csv', 'w')42 fid_sts = open(project.boundaries_dir_event+sep+basename+'_'+ str(index)+'.csv', 'w') 43 43 s = 'time, stage, xmomentum, ymomentum \n' 44 44 fid_sts.write(s) … … 54 54 return quantities,elevation,time 55 55 56 quantities,elevation,time=get_sts_gauge_data(os.path.join(project.boundaries_dir ,project.scenario_name),verbose=False)56 quantities,elevation,time=get_sts_gauge_data(os.path.join(project.boundaries_dir_event,project.scenario_name),verbose=False) 57 57 58 58 print len(elevation), len(quantities['stage'][0,:]) -
anuga_work/production/busselton/project.py
r5669 r5786 1 # -*- coding: cp1252 -*- 2 """Common filenames and locations for topographic data, meshes and outputs. 1 """Common filenames and locations for elevation, meshes and outputs. 2 This script is the heart of all scripts in the folder 3 3 """ 4 5 from os import sep, environ, getenv, getcwd ,umask 4 #------------------------------------------------------------------------------ 5 # Import necessary modules 6 #------------------------------------------------------------------------------ 7 8 from os import sep, environ, getenv, getcwd 6 9 from os.path import expanduser 7 10 import sys 8 11 from time import localtime, strftime, gmtime 9 12 from anuga.utilities.polygon import read_polygon, plot_polygons, is_inside_polygon, number_mesh_triangles 10 #from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees, convert_points_from_latlon_to_utm11 13 from anuga.utilities.system_tools import get_user_name, get_host_name 14 from anuga.shallow_water.data_manager import urs2sts,create_sts_boundary 12 15 from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon 13 16 14 # file and system info 15 #--------------------------------- 16 #codename = 'project.py' 17 18 home = getenv('INUNDATIONHOME') + sep +'data'+sep #Sandpit's parent dir 17 #------------------------------------------------------------------------------ 18 # Directory setup 19 #------------------------------------------------------------------------------ 20 # Note: INUNDATIONHOME is the inundation directory, not the data directory. 21 22 home = getenv('INUNDATIONHOME') + sep +'data'+sep #Sandpit's parent diruser = get_user_name() 19 23 muxhome = getenv('MUXHOME') 20 24 user = get_user_name() 21 25 host = get_host_name() 22 26 23 # INUNDATIONHOME is the inundation directory, not the data directory. 24 25 #time stuff 26 time = strftime('%Y%m%d_%H%M%S',localtime()) #gets time for new dir 27 gtime = strftime('%Y%m%d_%H%M%S',gmtime()) #gets time for new dir 27 # determines time for setting up output directories 28 time = strftime('%Y%m%d_%H%M%S',localtime()) 29 gtime = strftime('%Y%m%d_%H%M%S',gmtime()) 28 30 build_time = time+'_build' 29 31 run_time = time+'_run' 30 print 'gtime: ', gtime 31 32 #Making assumptions about the location of scenario data 32 33 #------------------------------------------------------------------------------ 34 # Initial Conditions 35 #------------------------------------------------------------------------------ 36 37 # this section needs to be updated to reflect the modelled community. 38 # Note, the user needs to set up the directory system accordingly 33 39 state = 'western_australia' 34 40 scenario_name = 'busselton' 35 41 scenario = 'busselton_tsunami_scenario' 36 42 37 tide = 0 #0.6 38 39 alpha = 0.1 40 friction=0.01 41 starttime=0 42 finaltime=80000 43 export_cellsize=25 44 setup='final' 45 source='polyline' 46 43 # Model specific parameters. One or all can be changed each time the 44 # run_scenario script is executed 45 tide = 0 #0.6 46 #event_number = 27255 # linked to hazard map 47 event_number = 27283 48 alpha = 0.1 # smoothing parameter for mesh 49 friction=0.01 # manning's friction coefficient 50 starttime=0 51 finaltime=80000 # final time for simulation 52 53 setup='final' # Final can be replaced with trial or basic. 54 # Either will result in a coarser mesh that will allow a 55 # faster, but less accurate, simulation. 47 56 48 57 if setup =='trial': … … 62 71 yieldstep=60 63 72 73 #------------------------------------------------------------------------------ 74 # Revision numbers - for comparisons study 75 #------------------------------------------------------------------------------ 64 76 rev_num = 'newExtent' 65 77 #rev_num = '5449' … … 78 90 79 91 80 dir_comment='_'+setup+'_'+str(tide)+'_'+str(source)+'_'+str(rev_num)+'_'+str(user) 81 82 83 # onshore data provided by WA DLI - provided by Hamish on the 17th June 2008 84 85 onshore_name = 'busselton_v2_gda94_mga50' # original 86 87 # AHO + DPI data 88 coast_name = 'Busselton_Contour0' # provided by hamish, represent better coastline than the 100km as compared to charts 89 coast_name1 = 'Busselton_BeachSurvey' 92 #------------------------------------------------------------------------------ 93 # Output Filename 94 #------------------------------------------------------------------------------ 95 # Important to distinguish each run - ensure str(user) is included! 96 # Note, the user is free to include as many parameters as desired 97 dir_comment='_'+setup+'_'+str(tide)+'_'+str(event_number)+'_'+ 'alpha' +str(alpha)+'_'+str(user) 98 99 #------------------------------------------------------------------------------ 100 # Input Data 101 #------------------------------------------------------------------------------ 102 103 # elevation data used in build_busselton.py 104 # onshore data: format ascii grid with accompanying projection file 105 onshore_name = 'busselton_v2_gda94_mga50' 106 # coastline: format x,y,elevation (with title) 107 coast_name = 'Busselton_Contour0.txt' 108 coast_name1 = 'Busselton_BeachSurvey.txt' 109 # bathymetry: format x,y,elevation (with title) 90 110 offshore_name = 'Busselton_NavyFinal' 91 111 offshore_name1 = 'Busselton_Chart' … … 95 115 offshore_name5 = 'Busselton_TIN' # for area within Busselton 500 mesh less than zero generated from TIN 96 116 97 98 #final topo name 117 # gauges - used in get_timeseries.py 118 gauge_name = 'busselton.csv' 119 gauge_name2 = 'thinned_MGA50.csv' 120 121 # BOUNDING POLYGON - used in build_boundary.py and run_busselton.py respectively 122 # NOTE: when files are put together the points must be in sequence - for ease go clockwise! 123 # Check the run_busselton.py for boundary_tags 124 # thinned ordering file from Hazard Map: format is index,latitude,longitude (with title) 125 order_filename = 'thinned_boundary_ordering.txt' 126 #landward bounding points 127 landward = 'landward_bounding_polygon.txt' 128 129 #------------------------------------------------------------------------------ 130 # Output Elevation Data 131 #------------------------------------------------------------------------------ 132 # Output filename for elevation 133 # this is a combination of all the data (utilisied in build_boundary) 99 134 combined_name ='busselton_combined_elevation' 100 combined_name_small = 'busselton_combined_elevation_smaller' 101 135 combined_smaller_name = 'busselton_combined_elevation_smaller' 136 137 #------------------------------------------------------------------------------ 138 # Directory Structure 139 #------------------------------------------------------------------------------ 102 140 anuga_dir = home+state+sep+scenario+sep+'anuga'+sep 103 104 141 topographies_in_dir = home+state+sep+scenario+sep+'elevation_final'+sep+'points'+sep 105 142 topographies_dir = anuga_dir+'topographies'+sep 106 107 # input topo file location 143 polygons_dir = anuga_dir+'polygons'+sep 144 tide_dir = anuga_dir+'tide_data'+sep 145 boundaries_dir = anuga_dir+'boundaries'+ sep 146 output_dir = anuga_dir+'outputs'+sep 147 gauges_dir = anuga_dir+'gauges'+sep 148 meshes_dir = anuga_dir+'meshes'+sep 149 150 #------------------------------------------------------------------------------ 151 # Location of input and output data 152 #------------------------------------------------------------------------------ 153 # where the input data sits 108 154 onshore_in_dir_name = topographies_in_dir + onshore_name #topo 109 110 155 coast_in_dir_name = topographies_in_dir + coast_name #coastline 111 156 coast_in_dir_name1 = topographies_in_dir + coast_name1 #beach survey 112 113 157 offshore_in_dir_name = topographies_in_dir + offshore_name #bathymetry 114 158 offshore_in_dir_name1 = topographies_in_dir + offshore_name1 #bathymetry Charts … … 118 162 offshore_in_dir_name5 = topographies_in_dir + offshore_name5 #Busselton TIN 119 163 120 # output to anuga from build file164 # where the output data sits 121 165 onshore_dir_name = topographies_dir + onshore_name 122 166 … … 131 175 offshore_dir_name5 = topographies_dir + offshore_name5 132 176 133 # final topo files177 # where the combined elevation file sits 134 178 combined_dir_name = topographies_dir + combined_name 135 combined_dir_name_small = topographies_dir + combined_name_small 136 137 meshes_dir = anuga_dir+'meshes'+sep 138 meshes_dir_name = meshes_dir + scenario_name 139 140 polygons_dir = anuga_dir+'polygons'+sep+'New_Extents'+sep 141 tide_dir = anuga_dir+'tide_data'+sep 142 143 #boundaries_source = '1' 144 145 ##if source=='exmouth': 146 ## boundaries_name = 'busselton_3103_30052008' # exmouth gun 147 ## boundaries_in_dir = anuga_dir+'boundaries'+sep+'urs'+sep+'exmouth'+sep+'1_10000'+sep 148 ## 149 ##if source=='test': 150 ## boundaries_name = 'other' #exmouth gun 151 ## boundaries_in_dir = anuga_dir+'boundaries'+sep 152 ## 153 154 #boundaries locations 155 #boundaries_in_dir_name = boundaries_in_dir + boundaries_name 156 boundaries_dir = anuga_dir+'boundaries'+sep 157 boundaries_dir_name = boundaries_dir + scenario_name 179 combined_smaller_name_dir = topographies_dir + combined_smaller_name 180 181 # where the mesh sits (this is created during the run_busselton.py) 182 meshes_dir_name = meshes_dir + scenario_name+'.msh' 183 184 # where the boundary ordering files sit (this is used within build_boundary.py) 185 order_filename_dir = boundaries_dir + order_filename 186 187 # where the landward points of boundary extent sit (this is used within run_busselton.py) 188 landward_dir = boundaries_dir + landward 189 190 # where the event sts files sits (this is created during the build_boundary.py) 191 boundaries_dir_event = boundaries_dir + str(event_number) + sep 158 192 boundaries_dir_mux = muxhome 159 193 160 #output locations 161 output_dir = anuga_dir+'outputs'+sep 162 output_build_time_dir = output_dir +build_time + dir_comment + sep 163 output_run_time_dir = output_dir + run_time + dir_comment +sep 194 # where the directory of the output filename sits 195 output_build_time_dir = output_dir+build_time+dir_comment+sep #used for build_busselton.py 196 output_run_time_dir = output_dir+run_time+dir_comment+sep #used for run_busselton.py 164 197 output_run_time_dir_name = output_run_time_dir + scenario_name #Used by post processing 165 198 166 #gauges 167 gauge_name = 'busselton.csv' 168 gauge_name2 = 'thinned_MGA50+1-1.csv' 169 170 gauges_dir = home+state+sep+scenario+sep+'anuga'+sep+'gauges'+sep 171 gauges_dir_name = gauges_dir + gauge_name 172 gauges_dir_name2 = gauges_dir + gauge_name2 173 174 buildings_filename = gauges_dir + 'Busselton_res_Project.csv' 175 buildings_filename_out = 'Busselton_res_Project_modified.csv' 176 177 community_filename = gauges_dir +'' 178 community_broome = gauges_dir + '' 179 180 181 ############################### 199 #w here the directory of the gauges sit 200 gauges_dir_name = gauges_dir + gauge_name #used for get_timeseries.py 201 gauges_dir_name2 = gauges_dir + gauge_name2 #used for get_timeseries.py 202 203 #------------------------------------------------------------------------------ 182 204 # Interior region definitions 183 ############################### 184 185 # Initial bounding polygon for data clipping 205 #------------------------------------------------------------------------------ 206 207 #Land, to set the initial stage/water to be offcoast only 208 poly_mainland = read_polygon(polygons_dir+'initial_condition.csv') 209 210 # Initial bounding polygon for data clipping 186 211 poly_all = read_polygon(polygons_dir+'poly_all_extend.csv') 187 212 res_poly_all = 100000*res_factor 188 213 189 #digitized polygons 190 poly_large = read_polygon(polygons_dir+'coast_5km_d20m.csv') 191 res_large = 40000*res_factor 192 193 poly_busselton = read_polygon(polygons_dir+'busselton_1km.csv') 194 res_busselton = 500*res_factor 195 196 poly_bunbury = read_polygon(polygons_dir+'bunbury_1km.csv') 197 res_bunbury = 500*res_factor 198 199 poly_busselton2 = read_polygon(polygons_dir+'busselton_2km.csv') 200 res_busselton2 = 10000*res_factor 201 202 poly_bunbury2 = read_polygon(polygons_dir+'bunbury_2km.csv') 203 res_bunbury2 = 10000*res_factor 204 205 poly_island1 = read_polygon(polygons_dir+'island1.csv') 206 res_island1 = 10000*res_factor 207 208 poly_island2 = read_polygon(polygons_dir+'island2.csv') 209 res_island2 = 10000*res_factor 210 211 212 interior_regions = [[poly_large,res_large],[poly_busselton,res_busselton],[poly_bunbury,res_bunbury] 213 ,[poly_busselton2,res_busselton2],[poly_bunbury2,res_bunbury2] 214 ,[poly_island1, res_island1],[poly_island2, res_island2]] 215 216 214 # Area of Interest 1 (Busselton) 215 poly_aoi1 = read_polygon(polygons_dir+'busselton_1km.csv') 216 res_aoi1 = 500*res_factor 217 218 # Area of Interest 2 (Bunbury) 219 poly_aoi2 = read_polygon(polygons_dir+'bunbury_1km.csv') 220 res_aoi2 = 500*res_factor 221 222 # Area of Significance 1 (Busselton) 223 poly_aos1 = read_polygon(polygons_dir+'busselton_2km.csv') 224 res_aos1 = 10000*res_factor 225 226 # Area of Significance 2 (Bunbury) 227 poly_aos2 = read_polygon(polygons_dir+'busselton_2km.csv') 228 res_aos2 = 10000*res_factor 229 230 # Refined areas 231 # Polygon designed to islands 232 poly_aos3 = read_polygon(polygons_dir+'island1.csv') 233 res_aos3 = 10000*res_factor 234 poly_aos4 = read_polygon(polygons_dir+'island2.csv') 235 res_aos4 = 10000*res_factor 236 237 # Shallow water 1 238 poly_sw1 = read_polygon(polygons_dir+'coast_5km_d20m.csv') 239 res_sw1 = 40000*res_factor 240 241 # Combined all regions, must check that all are included! 242 interior_regions = [[poly_aoi1,res_aoi1],[poly_aoi2,res_aoi2] 243 ,[poly_aos1,res_aos1],[poly_aos2,res_aos2] 244 ,[poly_aos3,res_aos3],[poly_aos4,res_aos4] 245 ,[poly_sw1,res_sw1]] 246 247 217 248 trigs_min = number_mesh_triangles(interior_regions, poly_all, res_poly_all) 218 print 'min number triangles', trigs_min 219 220 poly_mainland=read_polygon(polygons_dir+'initial_condition.csv') 221 222 223 ################################################################### 249 print 'min estimated number of triangles', trigs_min 250 251 #------------------------------------------------------------------------------ 224 252 # Clipping regions for export to asc and regions for clipping data 225 ################################################################### 226 227 # exporting asc grid for Busselton 228 xminBusselton = 340000 229 xmaxBusselton = 352000 230 yminBusselton = 6271500 231 ymaxBusselton = 6280000 232 233 # exporting asc grid for Bunbury 234 xminBunbury = 369000 235 xmaxBunbury = 381000 236 yminBunbury = 6308000 237 ymaxBunbury = 6316500 238 239 # exporting asc grid for Dunsborough 240 xminDunsborough = 321000 241 xmaxDunsborough = 327500 242 yminDunsborough = 6277000 243 ymaxDunsborough = 6282000 244 253 # Final inundation maps should only be created in regions of the finest mesh 254 #------------------------------------------------------------------------------ 255 256 #Geordie Bay extract ascii grid 257 xminGeordie = 358000 258 xmaxGeordie = 362000 259 yminGeordie = 6458500 260 ymaxGeordie = 6461000 261 262 #Sorrento extract ascii grid 263 xminSorrento = 379000 264 xmaxSorrento = 382500 265 yminSorrento = 6477000 266 ymaxSorrento = 6480000 267 268 #Fremantle extract ascii grid 269 xminFremantle = 376000 270 xmaxFremantle = 388000 271 yminFremantle = 6449000 272 ymaxFremantle = 6461000 273 274 #Rockingham extract ascii grid 275 xminRockingham = 373500 276 xmaxRockingham = 385500 277 yminRockingham = 6424000 278 ymaxRockingham = 6433000 279 -
anuga_work/production/busselton/run_busselton.py
r5669 r5786 1 """Script for running tsunami inundation scenario for Dampier, WA, Australia. 2 3 Source data such as elevation and boundary data is assumed to be available in 4 directories specified by project.py 5 The output sww file is stored in project.output_run_time_dir 1 """Script for running a tsunami inundation scenario for busselton, WA, Australia. 6 2 7 3 The scenario is defined by a triangular mesh created from project.polygon, 8 the elevation data and a simulated tsunami generated with URS code. 9 10 Ole Nielsen and Duncan Gray, GA - 2005 and Jane Sexton, Nick Bartzis, GA - 2006 4 the elevation data is compiled into a pts file through build_busselton.py 5 and a simulated tsunami is generated through an sts file from build_boundary.py. 6 7 Input: sts file (build_boundary.py for respective event) 8 pts file (build_busselton.py) 9 information from project file 10 Outputs: sww file stored in project.output_run_time_dir 11 The export_results_all.py and get_timeseries.py is reliant 12 on the outputs of this script 13 14 Ole Nielsen and Duncan Gray, GA - 2005, Jane Sexton, Nick Bartzis, GA - 2006 15 Ole Nielsen, Jane Sexton and Kristy Van Putten - 2008 11 16 """ 12 17 … … 17 22 # Standard modules 18 23 from os import sep 24 import os 19 25 from os.path import dirname, basename 20 import os21 26 from os import mkdir, access, F_OK 22 27 from shutil import copy … … 32 37 from Numeric import allclose 33 38 from anuga.shallow_water.data_manager import export_grid, create_sts_boundary 34 35 39 from anuga.pmesh.mesh_interface import create_mesh_from_regions 36 40 from anuga.shallow_water.data_manager import start_screen_catcher, copy_code_files,store_parameters 37 #from anuga_parallel.parallel_api import distribute, numprocs, myid, barrier38 41 from anuga_parallel.parallel_abstraction import get_processor_name 39 42 from anuga.caching import myhash … … 42 45 from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon 43 46 from anuga.geospatial_data.geospatial_data import find_optimal_smoothing_parameter 44 47 from polygon import Polygon_function 48 45 49 # Application specific imports 46 import project # Definition of file names and polygons 47 50 import project # Definition of file names and polygons 48 51 numprocs = 1 49 52 myid = 0 … … 51 54 def run_model(**kwargs): 52 55 53 54 56 #------------------------------------------------------------------------------ 55 57 # Copy scripts to time stamped output directory and capture screen … … 59 61 60 62 #copy script must be before screen_catcher 61 #print kwargs62 63 63 64 print 'output_dir',kwargs['output_dir'] 64 if myid == 0: 65 copy_code_files(kwargs['output_dir'],__file__, 66 dirname(project.__file__)+sep+ project.__name__+'.py' ) 67 68 store_parameters(**kwargs) 69 70 #barrier() 65 66 copy_code_files(kwargs['output_dir'],__file__, 67 dirname(project.__file__)+sep+ project.__name__+'.py' ) 68 69 store_parameters(**kwargs) 71 70 72 71 start_screen_catcher(kwargs['output_dir'], myid, numprocs) 73 72 74 73 print "Processor Name:",get_processor_name() 75 74 76 75 #----------------------------------------------------------------------- 77 76 # Domain definitions … … 79 78 80 79 # Read in boundary from ordered sts file 81 urs_bounding_polygon=create_sts_boundary(os.path.join(project.boundaries_dir ,project.scenario_name))80 urs_bounding_polygon=create_sts_boundary(os.path.join(project.boundaries_dir_event,project.scenario_name)) 82 81 83 82 # Reading the landward defined points, this incorporates the original clipping 84 83 # polygon minus the 100m contour 85 landward_bounding_polygon = read_polygon(project. boundaries_dir+'landward_bounding_polygon.txt')84 landward_bounding_polygon = read_polygon(project.landward_dir) 86 85 87 86 # Combine sts polyline with landward points … … 90 89 # counting segments 91 90 N = len(urs_bounding_polygon)-1 92 boundary_tags={'back': [N+1,N+2,N+3,N+4, N+5], 'side': [N,N+6],'ocean': range(N)} 93 94 91 92 # boundary tags refer to project.landward 4 points equals 5 segments start at N 93 boundary_tags={'back': [N+1,N+2,N+3,N+4, N+5], 'side': [N,N+6], 'ocean': range(N)} 94 95 95 #-------------------------------------------------------------------------- 96 # Create the triangular mesh based on overall clipping polygon with a 97 # tagged 96 # Create the triangular mesh based on overall clipping polygon with a tagged 98 97 # boundary and interior regions defined in project.py along with 99 98 # resolutions (maximal area of per triangle) for each polygon 100 99 #-------------------------------------------------------------------------- 101 100 102 # IMPORTANT don't cache create_mesh_from_region and Domain(mesh....) as it101 # IMPORTANT don't cache create_mesh_from_region and Domain(mesh....) as it 103 102 # causes problems with the ability to cache set quantity which takes alot of times 104 if myid == 0:105 106 print 'start create mesh from regions'107 103 108 create_mesh_from_regions(bounding_polygon, 109 boundary_tags=boundary_tags, 110 maximum_triangle_area=project.res_poly_all, 111 interior_regions=project.interior_regions, 112 filename=project.meshes_dir_name+'.msh', 113 use_cache=False, 114 verbose=False) 115 #barrier() 116 117 ## covariance_value,alpha = find_optimal_smoothing_parameter (data_file = kwargs['bathy_file'], 118 ## alpha_list=[0.001, 0.01, 0.1, 0.15, 0.2, 0.25, 0.3, 0.4, 0.5], 119 ## mesh_file = project.meshes_dir_name+'.msh') 120 ## print 'optimal alpha', covariance_value,alpha 121 104 print 'start create mesh from regions' 105 106 create_mesh_from_regions(bounding_polygon, 107 boundary_tags=boundary_tags, 108 maximum_triangle_area=project.res_poly_all, 109 interior_regions=project.interior_regions, 110 filename=project.meshes_dir_name, 111 use_cache=True, 112 verbose=True) 113 122 114 #------------------------------------------------------------------------- 123 115 # Setup computational domain … … 125 117 print 'Setup computational domain' 126 118 127 domain = Domain(project.meshes_dir_name +'.msh', use_cache=False, verbose=True)119 domain = Domain(project.meshes_dir_name, use_cache=False, verbose=True) 128 120 print 'memory usage before del domain',mem_usage() 129 121 … … 137 129 # Setup initial conditions 138 130 #------------------------------------------------------------------------- 139 if myid == 0: 140 141 print 'Setup initial conditions' 142 143 from polygon import Polygon_function 144 #following sets the stage/water to be offcoast only 145 IC = Polygon_function( [(project.poly_mainland, 0)], default = kwargs['tide'], 146 geo_reference = domain.geo_reference) 147 domain.set_quantity('stage', IC) 148 # domain.set_quantity('stage', kwargs['tide']) 149 domain.set_quantity('friction', kwargs['friction']) 150 151 print 'Start Set quantity',kwargs['bathy_file'] 152 153 domain.set_quantity('elevation', 154 filename = kwargs['bathy_file'], 155 use_cache = True, 156 verbose = True, 157 alpha = kwargs['alpha']) 158 print 'Finished Set quantity' 159 #barrier() 160 161 162 #------------------------------------------------------ 163 # Distribute domain to implement parallelism !!! 164 #------------------------------------------------------ 165 166 if numprocs > 1: 167 domain=distribute(domain) 131 print 'Setup initial conditions' 132 133 # sets the initial stage in the offcoast region only 134 IC = Polygon_function( [(project.poly_mainland, 0)], default = kwargs['tide'], 135 geo_reference = domain.geo_reference) 136 domain.set_quantity('stage', IC) 137 #domain.set_quantity('stage',kwargs['tide'] ) 138 domain.set_quantity('friction', kwargs['friction']) 139 140 print 'Start Set quantity',kwargs['elevation_file'] 141 142 domain.set_quantity('elevation', 143 filename = kwargs['elevation_file'], 144 use_cache = False, 145 verbose = True, 146 alpha = kwargs['alpha']) 147 print 'Finished Set quantity' 148 149 ## #------------------------------------------------------ 150 ## # Distribute domain to implement parallelism !!! 151 ## #------------------------------------------------------ 152 ## 153 ## if numprocs > 1: 154 ## domain=distribute(domain) 168 155 169 156 #------------------------------------------------------ … … 171 158 #------------------------------------------------------ 172 159 print 'domain id', id(domain) 173 domain.set_name(kwargs[' aa_scenario_name'])160 domain.set_name(kwargs['scenario_name']) 174 161 domain.set_datadir(kwargs['output_dir']) 175 domain.set_default_order(2) # Apply second order scheme176 domain.set_minimum_storable_height(0.01) # Don't store anything less than 1cm162 domain.set_default_order(2) # Apply second order scheme 163 domain.set_minimum_storable_height(0.01) # Don't store anything less than 1cm 177 164 domain.set_store_vertices_uniquely(False) 178 165 domain.set_quantities_to_be_stored(['stage', 'xmomentum', 'ymomentum']) … … 186 173 print 'domain id', id(domain) 187 174 188 boundary_urs_out=project.boundaries_dir_ name175 boundary_urs_out=project.boundaries_dir_event + sep + project.scenario_name 189 176 190 177 Br = Reflective_boundary(domain) … … 192 179 193 180 print 'Available boundary tags', domain.get_boundary_tags() 194 Bf = Field_boundary(boundary_urs_out+'.sts', # Change from file_boundary195 domain, mean_stage= project.tide,181 Bf = Field_boundary(boundary_urs_out+'.sts', # Change from file_boundary 182 domain, mean_stage= project.tide, 196 183 time_thinning=1, 197 184 default_boundary=Bd, … … 199 186 verbose = True, 200 187 boundary_polygon=bounding_polygon) 201 202 domain.set_boundary({'back': B d,188 189 domain.set_boundary({'back': Br, 203 190 'side': Bd, 204 'ocean': Bf}) #changed from Bf to Bd for large wave191 'ocean': Bf}) 205 192 206 193 kwargs['input_start_time']=domain.starttime … … 214 201 215 202 for t in domain.evolve(yieldstep = project.yieldstep, finaltime = kwargs['finaltime'] 216 ,skip_initial_step = False):203 ,skip_initial_step = False): 217 204 domain.write_time() 218 205 domain.write_boundary_statistics(tags = 'ocean') 219 206 207 # these outputs should be checked with the resultant inundation map 220 208 x, y = domain.get_maximum_inundation_location() 221 209 q = domain.get_maximum_inundation_elevation() 222 223 210 print 'Maximum runup observed at (%.2f, %.2f) with elevation %.2f' %(x,y,q) 224 211 225 print ' Thattook %.2f seconds' %(time.time()-t0)212 print 'Simulation took %.2f seconds' %(time.time()-t0) 226 213 227 214 #kwargs 'completed' must be added to write the final parameters to file 228 215 kwargs['completed']=str(time.time()-t0) 229 230 if myid==0: 231 store_parameters(**kwargs) 232 #barrier 233 216 217 store_parameters(**kwargs) 218 234 219 print 'memory usage before del domain1',mem_usage() 235 236 #------------------------------------------------------------- 220 221 222 #------------------------------------------------------------- 237 223 if __name__ == "__main__": 238 224 239 225 kwargs={} 240 kwargs['est_num_trigs']=project.trigs_min241 kwargs['num_cpu']=numprocs242 kwargs['host']=project.host243 kwargs['res_factor']=project.res_factor244 kwargs['starttime']=project.starttime245 kwargs['yieldstep']=project.yieldstep246 226 kwargs['finaltime']=project.finaltime 247 248 227 kwargs['output_dir']=project.output_run_time_dir 249 kwargs['bathy_file']=project.combined_dir_name+'.txt' 250 kwargs['file_name']=project.home+'detail.csv' 251 kwargs['aa_scenario_name']=project.scenario_name 252 kwargs['ab_time']=project.time 253 kwargs['res_factor']= project.res_factor 228 kwargs['elevation_file']=project.combined_dir_name+'.pts' 229 kwargs['scenario_name']=project.scenario_name 254 230 kwargs['tide']=project.tide 255 kwargs['user']=project.user256 231 kwargs['alpha'] = project.alpha 257 232 kwargs['friction']=project.friction 258 kwargs['time_thinning'] = project.time_thinning 259 kwargs['dir_comment']=project.dir_comment 260 kwargs['export_cellsize']=project.export_cellsize 261 262 233 #kwargs['num_cpu']=numprocs 234 #kwargs['host']=project.host 235 #kwargs['starttime']=project.starttime 236 #kwargs['yieldstep']=project.yieldstep 237 #kwargs['user']=project.user 238 #kwargs['time_thinning'] = project.time_thinning 239 263 240 run_model(**kwargs) 264 241 265 #barrier 266 242
Note: See TracChangeset
for help on using the changeset viewer.