Changeset 7678
- Timestamp:
- Apr 9, 2010, 3:16:39 PM (15 years ago)
- Location:
- branches/anuga_1_1/anuga_work/production/bunbury_storm_surge_2009
- Files:
-
- 2 added
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/anuga_1_1/anuga_work/production/bunbury_storm_surge_2009/Arc_asc2raster_GDA94z50.py
r7613 r7678 23 23 24 24 scenario_dir="\\\\nas2\\gemd\\georisk_models\\inundation\\data\\western_australia\\bunbury_storm_surge_scenario_2009\\" 25 #scenario_dir = "C:\\WorkSpace\\data\\bunbury_storm_surge_scenario_2009\\" 25 26 output_dir = "anuga\\outputs\\" 26 27 27 time_dir1 = '20100103_102505_run_storm_surge_final_0_alby_coarse_lfountai' 28 time_dir1 = '20100311_181323_run_storm_surge_final_0_alby_coarse_fountl' #No storm gate 29 time_dir2 = '20100312_132550_run_storm_surge_final_0_alby_coarse_fountl' #Closed storm gate 28 30 29 31 30 time_dirs = [time_dir1 ] ##, time_dir2, time_dir3, time_dir4, time_dir5,32 time_dirs = [time_dir1, time_dir2] ##, time_dir2, time_dir3, time_dir4, time_dir5, 31 33 ##time_dir6, time_dir7, time_dir8, time_dir9, time_dir10] 32 34 … … 36 38 folder = scenario_dir + output_dir + time_dir + '\\' 37 39 raster_gbd = folder + 'raster.gdb' 38 ## elevation_gdb = folder + 'elevation.gdb' 39 #contour = raster_gbd + '\\contour_dep' 40 land = scenario_dir + "ArcGIS\\data.gdb\\initial_conditions_10m" 41 ## ocean = scenario_dir + "map_work\\Perth.gdb\\Outlines\\initial_condition_ocean" 40 land = scenario_dir + "ArcGIS\\data.gdb\\initial_conditions_10m_clip" 41 ocean = scenario_dir + "ArcGIS\\data.gdb\\initial_conditions_ocean" 42 42 43 43 print 'Process: Create File GDB' 44 ## gp.createFileGDB_management(folder, "elevation") 45 ## gp.CreateFileGDB_management(folder, "raster") 44 # gp.CreateFileGDB_management(folder, "raster") 46 45 47 46 gp.Workspace = raster_gbd 48 ## gp.Workspace = elevation_gdb49 47 50 48 print gp.Workspace … … 52 50 #replication dictionary 53 51 replicate = (('gold_coast', ''), ('_time_29220_0', 'b'), ('_time_58440_0', 'c'), 54 ('_time_28860_0', 'b'), ('_time_57720_0', 'c'), 55 ('_', ''), ('max','M_'), ('depth','_dep_'), 52 ('_time_28860_0', 'b'), ('_time_57720_0', 'c'), ('storm_gate', 'gate'), 53 ('_', ''), ('max','M_'), ('depth','_dep_'), ('albycoarse', ''), 56 54 ('speed', '_spe_'), ('elevation', '_ele_'), ('stage','_stage')) 57 55 58 56 generate_filename = [] 59 ## input_ascii = glob.glob(folder + '*elevation*.asc') 60 input_ascii = glob.glob(folder + '*depth*.asc') 57 # input_ascii = glob.glob(folder + '*elevation*.asc') 58 # input_ascii = glob.glob(folder + '*speed*.asc') 59 # input_ascii = glob.glob(folder + '*depth*.asc') 60 input_ascii = glob.glob(folder + '*stage*.asc') 61 61 62 print time_dir 62 63 … … 86 87 87 88 print 'Process: Extract by Mask' 88 gp.ExtractByMask_sa(output_DEM, land, output_extract) 89 # gp.ExtractByMask_sa(output_DEM, land, output_extract) 90 # gp.ExtractByMask_sa(output_DEM, ocean, output_extract) 89 91 90 92 -
branches/anuga_1_1/anuga_work/production/bunbury_storm_surge_2009/build_elevation.py
r7658 r7678 94 94 G9 = geospatial_data[keylist[8]] 95 95 G10 = geospatial_data[keylist[9]] 96 #G11 = geospatial_data[keylist[10]] 96 G11 = geospatial_data[keylist[10]] 97 G12 = geospatial_data[keylist[11]] 98 G13 = geospatial_data[keylist[12]] 97 99 98 100 … … 120 122 #~ print 'G10', G10.attributes 121 123 122 G = G1 + G2 + G3 + G4 + G5 + G6 + G7 + G8 + G9 + G10 #+ G11124 G = G1 + G2 + G3 + G4 + G5 + G6 + G7 + G8 + G9 + G10 + G11 + G12 + G13 123 125 124 126 # G = None … … 133 135 134 136 print 'Export combined DEM file' 135 G.export_points_file(project.combined_elevation + ' .pts')137 G.export_points_file(project.combined_elevation + '_GEMS.pts') 136 138 137 139 print 'Do txt version too' … … 141 143 #~ g.export_points_file(join(project.topographies_folder, (str(keylist[i]) +'_export.txt'))) 142 144 143 G.export_points_file(project.combined_elevation + ' .txt')145 G.export_points_file(project.combined_elevation + '_GEMS.txt') 144 146 145 147 print 'Export individual text files' … … 149 151 G1.export_points_file(join(project.topographies_folder, keylist[0] +'_export.txt')) 150 152 G2.export_points_file(join(project.topographies_folder, keylist[1] +'_export.txt')) 151 G3.export_points_file(join(project.topographies_folder, keylist[2] +'_export.txt')) 152 G4.export_points_file(join(project.topographies_folder, keylist[3] +'_export.txt')) 153 G5.export_points_file(join(project.topographies_folder, keylist[4] +'_export.txt')) 154 G6.export_points_file(join(project.topographies_folder, keylist[5] +'_export.txt')) 155 G7.export_points_file(join(project.topographies_folder, keylist[6] +'_export.txt')) 156 G8.export_points_file(join(project.topographies_folder, keylist[7] +'_export.txt')) 157 G9.export_points_file(join(project.topographies_folder, keylist[8] +'_export.txt')) 158 G10.export_points_file(join(project.topographies_folder, keylist[9] +'_export.txt')) 159 #G11.export_points_file(join(project.topographies_folder, keylist[10] +'_export.txt')) 153 G3.export_points_file(join(project.topographies_folder, keylist[2][:-4] +'_export.txt')) 154 G4.export_points_file(join(project.topographies_folder, keylist[3][:-4] +'_export.txt')) 155 G5.export_points_file(join(project.topographies_folder, keylist[4][:-4] +'_export.txt')) 156 G6.export_points_file(join(project.topographies_folder, keylist[5][:-4] +'_export.txt')) 157 G7.export_points_file(join(project.topographies_folder, keylist[6][:-4] +'_export.txt')) 158 G8.export_points_file(join(project.topographies_folder, keylist[7][:-4] +'_export.txt')) 159 G9.export_points_file(join(project.topographies_folder, keylist[8][:-4] +'_export.txt')) 160 G10.export_points_file(join(project.topographies_folder, keylist[9][:-4] +'_export.txt')) 161 G11.export_points_file(join(project.topographies_folder, keylist[10][:-4] +'_export.txt')) 162 G12.export_points_file(join(project.topographies_folder, keylist[11][:-4] +'_export.txt')) 163 G13.export_points_file(join(project.topographies_folder, keylist[12][:-4] +'_export.txt')) -
branches/anuga_1_1/anuga_work/production/bunbury_storm_surge_2009/export_results_max.py
r7613 r7678 21 21 22 22 directory = project.output_folder 23 time_dir1 = '20100103_102505_run_storm_surge_final_0_alby_coarse_lfountai' 23 time_dir1 = '20100311_181323_run_storm_surge_final_0_alby_coarse_fountl' #No storm gate 24 #time_dir2 = '20100312_132550_run_storm_surge_final_0_alby_coarse_fountl' #Closed storm gate 24 25 25 time_dirs = [time_dir1] 26 time_dirs = [time_dir1] #, time_dir2] 26 27 27 28 # sww filename extensions ie. hobart_time_17640_0.sww, input into list 17640 … … 31 32 #Modify the cellsize value to set the size of the raster you require 32 33 #Take into account mesh size when aplying this paramater 33 cellsize = 20 #25034 cellsize = 0.5 #10 34 35 35 36 #Now set the timestep at which you want the raster generated. 36 37 #Either set the actual timestep required or use 'None' to indicate that 37 38 #you want the maximum values in the raster over all timesteps 38 timestep = 039 timestep = None 39 40 40 41 # Set the special areas of interest. If none, do: area=['All'] … … 42 43 43 44 #area = ['South', 'NW', 'Hobart'] 44 area = ['All']45 area = ['Storm_gate'] #['All'] # 45 46 46 47 # one or more key strings from var_equations below 47 var = [' elevation']48 var = ['depth', 'stage', 'speed'] 48 49 49 50 ###### … … 100 101 sww2dem(name, basename_out = outname, 101 102 quantity = quantityname, 102 timestep= timestep,103 reduction = timestep, 103 104 cellsize = cellsize, 104 105 easting_min = easting_min, … … 106 107 northing_min = northing_min, 107 108 northing_max = northing_max, 108 reduction = max,109 109 verbose = True, 110 110 format = 'asc') -
branches/anuga_1_1/anuga_work/production/bunbury_storm_surge_2009/project.py
r7658 r7678 38 38 friction = 0.01 # manning's friction coefficient 39 39 starttime = 86400 # start time for simulation - -equivalent to 0000h 4 April 1978 40 finaltime = 172800 # final time for simulation - 24 hours for TC Alby 40 duration = 86400 41 #finaltime = 172800 # final time for simulation - 24 hours for TC Alby 41 42 42 43 setup = 'storm_surge_final' # This can be one of four values … … 83 84 # Format for point is x,y,elevation (with header) 84 85 point_filenames = ['DPI.txt', # Bathymetry data from DPI 85 'Busselton_Chart_Clip_ss.txt', # Clipped from Busselton_Chart - see Busselton Tsunami Scenario 2009 86 'Busselton_NavyFinal_Clip_ss.txt', # Clipped from Busselton_NavyFinal - see Busselton Tsunami Scenario 2009 87 'DPI5U1A02_01a_edited_v2.txt', # Bathymetric LiDAR from DPI - split into manageable pieces and edited so 88 'DPI5U1A02_01b_edited_v2.txt', # depths below 0 m are negative, and all soundings on land (ie positive) 89 'DPI5U1A02_01c_edited_v2.txt', # are removed as these are not corrected to "bare earth". 86 'Busselton_Chart.txt', # see Busselton Tsunami Scenario 2009 87 'Busselton_NavyFinal.txt', # see Busselton Tsunami Scenario 2009 88 'Busselton_250m.txt', # see Busselton Tsunami Scenario 2009 89 'DPI5U1A02_01a_edited_v2.txt', # Bathymetric LiDAR from DPI - split into manageable pieces and edited so 90 'DPI5U1A02_01b_edited_v2.txt', # depths below 0 m are negative, and all soundings on land (ie positive) 91 'DPI5U1A02_01c_edited_v2.txt', # are removed as these are not corrected to "bare earth". 90 92 'DPI5U1A02_01d_edited_v2.txt', 91 'DPI5U1A02_01e_edited_v2.txt'] 92 # 'Leschenault_TIN.txt'] # TIN created over the Leschenault Estuary and Inlet] 93 'DPI5U1A02_01e_edited_v2.txt', 94 'Bunbury_2010_shoalest.csv', # Multibeam infill for areas not captured in the Bathy LiDAR 95 # 'bu0506hy_cut.csv', # Hydro survey of Leschenault Inlet and Estuary, extra colums cut 96 'Leschenault_TIN.txt'] # TIN created over the Leschenault Estuary and Inlet] 93 97 94 98 # BOUNDING POLYGON - for data clipping and estimate of triangles in mesh 95 99 # Used in build_elevation.py & run_model.py 96 100 # Format for points easting,northing (no header) 97 bounding_polygon_filename = 'bounding_polygon_ ss.csv'101 bounding_polygon_filename = 'bounding_polygon_GEMS.csv' #'bounding_polygon_ss.csv' 98 102 bounding_polygon_maxarea = 50000 103 99 104 100 105 # INTERIOR REGIONS - for designing the mesh … … 102 107 # Format for points easting,northing (no header) 103 108 interior_regions_data = [['intermediate.csv', 2500], 104 ['area_of_interest.csv', 100], 105 ['storm_gate_area.csv', 1], 106 ['stormgates.csv', 1]] 109 #['aoi_small.csv',200], 110 ['area_of_interest.csv', 200], 111 ['storm_gate_area.csv', 20]] 112 # ['stormgates.csv', 1]] 107 113 108 114 # LAND - used to set the initial stage/water to be offcoast only … … 110 116 land_initial_conditions_filename = [['initial_conditions.csv', 0]] 111 117 112 # GEMS order filename 113 # Format is index,northing, easting, elevation(without header)118 # GEMS order filename - should be in same direction as landward boundary points ie clockwise or anti-clockwise 119 # Format is northing, easting (without header) 114 120 gems_order_filename = 'gems_boundary_order_thinned.csv' 115 121 … … 117 123 # Format is as for a building file to be read by csv2building_polygons, 118 124 # easting, northing, id, floors (with header) 119 storm_gate_filename = 'storm_gates .csv'125 storm_gate_filename = 'storm_gates_large.csv' #'storm_gates.csv' 120 126 121 127 # GAUGES - for creating timeseries at a specific point … … 130 136 131 137 # Landward bounding points 132 # Format easting,northing (no header) 138 # Format easting,northing (no header) - should be in same direction as GEMS order filename ie clockwise or anti-clockwise 133 139 landward_boundary_filename = 'landward_boundary.csv' 134 140 … … 258 264 ##mux_input = join(event_folder, mux_input_filename) 259 265 260 266 # Areas for export of results 267 # Used in export_results_max.py 268 269 # Storm gate area: 270 xminStorm_gate=373515 271 xmaxStorm_gate=373582 272 yminStorm_gate=6312314 273 ymaxStorm_gate=6312358 -
branches/anuga_1_1/anuga_work/production/bunbury_storm_surge_2009/run_model.py
r7658 r7678 38 38 from anuga.interface import Field_boundary 39 39 from anuga.interface import create_sts_boundary 40 ##from anuga.interface import csv2building_polygons40 from anuga.interface import csv2building_polygons 41 41 from file_length import file_length 42 42 … … 45 45 ##from anuga.shallow_water.data_manager import urs2sts 46 46 from anuga.utilities.polygon import read_polygon, Polygon_function 47 from anuga.utilities.system_tools import get_revision_number 47 48 48 49 # Application specific imports … … 61 62 start_screen_catcher(project.output_run, 0, 1) 62 63 64 print 'SVN revision number: ', get_revision_number() 65 63 66 #------------------------------------------------------------------------------- 64 67 # Create the computational domain based on overall clipping polygon with … … 68 71 69 72 print 'Create computational domain' 70 71 ### Create the STS file72 ##print 'project.mux_data_folder=%s' % project.mux_data_folder73 ##if not os.path.exists(project.event_sts + '.sts'):74 ## bub.build_urs_boundary(project.mux_input_filename, project.event_sts)75 76 ### Read in boundary from ordered sts file77 ##gems_boundary = create_sts_boundary(project.event_sts)78 73 79 74 #reading the GEMS boundary points … … 139 134 # Add storm surge gate 140 135 #-------------------------- 141 storm_gate_polygon, storm_gate_height = csv2building_polygons(project.storm_gate, floor_height=4 )136 storm_gate_polygon, storm_gate_height = csv2building_polygons(project.storm_gate, floor_height=4.4) 142 137 143 138 gate = [] … … 179 174 t0 = time.time() 180 175 for t in domain.evolve(yieldstep=project.yieldstep, 181 finaltime=project.finaltime,176 duration=project.duration, 182 177 skip_initial_step=False): 183 178 print domain.timestepping_statistics()
Note: See TracChangeset
for help on using the changeset viewer.