Changeset 4147
- Timestamp:
- Jan 8, 2007, 5:59:47 PM (18 years ago)
- Files:
-
- 26 edited
Legend:
- Unmodified
- Added
- Removed
-
anuga_core/source/anuga/abstract_2d_finite_volumes/util.py
r4145 r4147 1030 1030 savefig('profilefig') 1031 1031 1032 depth_axis = axis([time_min/60.0, time_max/60.0, 0, max(max_depths)*1.1])1032 depth_axis = axis([time_min/60.0, time_max/60.0, -0.1, max(max_depths)*1.1]) 1033 1033 stage_axis = axis([time_min/60.0, time_max/60.0, min(min_stages), max(max_stages)*1.1]) 1034 stage_axis = axis([time_min/60.0, time_max/60.0, -3.0, 3.0])1035 1034 vel_axis = axis([time_min/60.0, time_max/60.0, min(max_speeds), max(max_speeds)*1.1]) 1036 1035 mom_axis = axis([time_min/60.0, time_max/60.0, min(max_momentums), max(max_momentums)*1.1]) -
anuga_work/production/dampier_2006/build_dampier.py
r4049 r4147 41 41 # output to file 42 42 #------------------------------------------------------------------------------ 43 44 43 copy_code_files(project.output_build_time_dir,__file__, 45 44 dirname(project.__file__)+sep+ project.__name__+'.py' ) … … 56 55 # Fine pts file to be clipped to area of interest 57 56 #------------------------------------------------------------------------------- 57 print"project.bounding_polygon",project.bounding_polygon 58 print"project.combined_dir_name",project.combined_dir_name 58 59 59 '''60 60 # topography directory filenames 61 onshore_in_dir_name = project.onshore_in_dir_name 62 coast_in_dir_name = project.coast_in_dir_name 63 island_in_dir_name = project.island_in_dir_name 64 island_in_dir_name1 = project.island_in_dir_name1 65 island_in_dir_name2 = project.island_in_dir_name2 66 island_in_dir_name3 = project.island_in_dir_name3 67 offshore_in_dir_name = project.offshore_in_dir_name 68 offshore1_in_dir_name = project.offshore1_in_dir_name 69 61 70 onshore_dir_name = project.onshore_dir_name 62 71 coast_dir_name = project.coast_dir_name 63 islands_dir_name = project.islands_dir_name 72 island_dir_name = project.island_dir_name 73 island_dir_name1 = project.island_dir_name1 74 island_dir_name2 = project.island_dir_name2 75 island_dir_name3 = project.island_dir_name3 64 76 offshore_dir_name = project.offshore_dir_name 65 offshore_dir_name1 = project.offshore_dir_name166 offshore_dir_name2 = project.offshore_dir_name267 offshore_dir_name3 = project.offshore_dir_name368 offshore_dir_name4 = project.offshore_dir_name469 offshore_dir_name5 = project.offshore_dir_name570 offshore_dir_name6 = project.offshore_dir_name671 offshore_dir_name7 = project.offshore_dir_name772 offshore_dir_name8 = project.offshore_dir_name873 offshore_dir_name9 = project.offshore_dir_name974 offshore_dir_name10 = project.offshore_dir_name1075 offshore_dir_name11 = project.offshore_dir_name1176 offshore_dir_name12 = project.offshore_dir_name1277 offshore_dir_name13 = project.offshore_dir_name1378 offshore_dir_name14 = project.offshore_dir_name1479 77 80 78 # creates DEM from asc data 81 convert_dem_from_ascii2netcdf(onshore_dir_name, use_cache=True, verbose=True) 82 convert_dem_from_ascii2netcdf(islands_dir_name, use_cache=True, verbose=True) 79 print "creates DEMs from asc data" 80 convert_dem_from_ascii2netcdf(onshore_in_dir_name, basename_out=onshore_dir_name, use_cache=True, verbose=True) 81 convert_dem_from_ascii2netcdf(island_in_dir_name, basename_out=island_dir_name, use_cache=True, verbose=True) 82 convert_dem_from_ascii2netcdf(island_in_dir_name1, basename_out=island_dir_name1, use_cache=True, verbose=True) 83 convert_dem_from_ascii2netcdf(island_in_dir_name2, basename_out=island_dir_name2, use_cache=True, verbose=True) 84 convert_dem_from_ascii2netcdf(island_in_dir_name3, basename_out=island_dir_name3, use_cache=True, verbose=True) 83 85 84 86 #creates pts file for onshore DEM 87 print "creates pts file for onshore DEM" 85 88 dem2pts(onshore_dir_name, 86 89 # easting_min=project.eastingmin, … … 91 94 verbose=True) 92 95 93 #creates pts file for islands DEM 94 dem2pts(islands_dir_name, use_cache=True, verbose=True) 96 #creates pts file for island DEM 97 dem2pts(island_dir_name, use_cache=True, verbose=True) 98 dem2pts(island_dir_name1, use_cache=True, verbose=True) 99 dem2pts(island_dir_name2, use_cache=True, verbose=True) 100 dem2pts(island_dir_name3, use_cache=True, verbose=True) 95 101 96 print'create Geospatial data objects from topographies' 97 G1 = Geospatial_data(file_name = project.onshore_dir_name + '.pts') 98 G2 = Geospatial_data(file_name = project.coast_dir_name + '.xya') 99 G3 = Geospatial_data(file_name = project.islands_dir_name + '.pts') 100 G_off = Geospatial_data(file_name = project.offshore_dir_name + '.xya') 101 G_off1 = Geospatial_data(file_name = project.offshore_dir_name1 + '.xya') 102 G_off2 = Geospatial_data(file_name = project.offshore_dir_name2 + '.xya') 103 G_off3 = Geospatial_data(file_name = project.offshore_dir_name3 + '.xya') 104 G_off4 = Geospatial_data(file_name = project.offshore_dir_name4 + '.xya') 105 G_off5 = Geospatial_data(file_name = project.offshore_dir_name5 + '.xya') 106 G_off6 = Geospatial_data(file_name = project.offshore_dir_name6 + '.xya') 107 G_off7 = Geospatial_data(file_name = project.offshore_dir_name7 + '.xya') 108 G_off8 = Geospatial_data(file_name = project.offshore_dir_name8 + '.xya') 109 G_off9 = Geospatial_data(file_name = project.offshore_dir_name9 + '.xya') 110 G_off10 = Geospatial_data(file_name = project.offshore_dir_name10 + '.xya') 111 G_off11 = Geospatial_data(file_name = project.offshore_dir_name11 + '.xya') 112 G_off12 = Geospatial_data(file_name = project.offshore_dir_name12 + '.xya') 113 G_off13 = Geospatial_data(file_name = project.offshore_dir_name13 + '.xya') 114 G_off14 = Geospatial_data(file_name = project.offshore_dir_name14 + '.xya') 102 print'create Geospatial data1 objects from topographies' 103 G1 = Geospatial_data(file_name = onshore_dir_name + '.pts') 104 print'create Geospatial data2 objects from topographies' 105 G2 = Geospatial_data(file_name = coast_in_dir_name + '.xya') 106 print'create Geospatial data3 objects from topographies' 107 G3 = Geospatial_data(file_name = island_dir_name + '.pts') 108 print'create Geospatial data4 objects from topographies' 109 G4 = Geospatial_data(file_name = island_dir_name1 + '.pts') 110 print'create Geospatial data5 objects from topographies' 111 G5 = Geospatial_data(file_name = island_dir_name2 + '.pts') 112 print'create Geospatial data6 objects from topographies' 113 G6 = Geospatial_data(file_name = island_dir_name3 + '.pts') 114 print'create Geospatial data7 objects from topographies' 115 G_off = Geospatial_data(file_name = offshore_in_dir_name + '.xya') 116 print'create Geospatial data8 objects from topographies' 117 G_off1 = Geospatial_data(file_name = offshore1_in_dir_name + '.xya') 115 118 119 print'add all geospatial objects' 120 G = G1 + G2 + G3 + G4 + G5 + G6 + G_off + G_off1 116 121 117 118 print'clip nw', project.clip_poly_nw 119 print'clip e', project.clip_poly_e 120 121 print'reading combined_dir_name' 122 G_offshore_data = Geospatial_data(file_name = project.topographies_dir+'dampier_combined_elevation_final.pts') 123 124 print'reading offshore_dir_name_old' 125 G_offshore_old = Geospatial_data(file_name = project.offshore_dir_name_old + '.pts') 126 127 128 print 'G_offshore_data_old_nw', 129 G_nw_name = project.topographies_dir + 'nw_old_data' 130 G_offshore_nw = G_offshore_old.clip(project.clip_poly_nw) 131 G_offshore_nw.export_points_file(G_nw_name + '.pts') 132 G_offshore_nw.export_points_file(G_nw_name + '.xya') 133 G_nw = array(G_offshore_nw.get_data_points()) 134 print'shape of arr nw data', G_nw.shape 135 print' max and min of array 0',max(G_nw[:,0]),min(G_nw[:,0]) 136 print' max and min of array 1',max(G_nw[:,1]),min(G_nw[:,1]) 137 138 print 'G_offshore_data_old_e'#, G_offshore_old_nw.get_data_points() 139 G_e_name = project.topographies_dir+'e_old_data' 140 G_offshore_e = G_offshore_old.clip(project.clip_poly_e) 141 G_offshore_e.export_points_file(G_e_name + '.pts') 142 G_offshore_e.export_points_file(G_e_name + '.xya') 143 G_e = array(G_offshore_e.get_data_points()) 144 print'shape of arr e data', G_e.shape 145 print' max and min of array 0',max(G_e[:,0]),min(G_e[:,0]) 146 print' max and min of array 1',max(G_e[:,1]),min(G_e[:,1]) 147 148 149 print 'G_offshore_data_mid_e'#, G_offshore_old_nw.get_data_points() 150 G_mid_e_name = project.topographies_dir+'mid_e_old_data' 151 G_offshore_mid_e = G_offshore_old.clip(project.clip_poly_mid_e) 152 G_offshore_mid_e.export_points_file(G_mid_e_name + '.pts') 153 G_offshore_mid_e.export_points_file(G_mid_e_name + '.xya') 154 G_mid_e = array(G_offshore_mid_e.get_data_points()) 155 print'shape of arr e data', G_mid_e.shape 156 print' max and min of array 0',max(G_mid_e[:,0]),min(G_mid_e[:,0]) 157 print' max and min of array 1',max(G_mid_e[:,1]),min(G_mid_e[:,1]) 158 159 print 'G_offshore_data_mid_w'#, G_offshore_old_nw.get_data_points() 160 G_mid_w_name = project.topographies_dir+'mid_w_old_data' 161 G_offshore_mid_w = G_offshore_old.clip(project.clip_poly_mid_w) 162 G_offshore_mid_w.export_points_file(G_mid_w_name + '.pts') 163 G_offshore_mid_w.export_points_file(G_mid_w_name + '.xya') 164 G_mid_w = array(G_offshore_mid_w.get_data_points()) 165 print'shape of arr e data', G_mid_w.shape 166 print' max and min of array 0',max(G_mid_w[:,0]),min(G_mid_w[:,0]) 167 print' max and min of array 1',max(G_mid_w[:,1]),min(G_mid_w[:,1]) 168 169 170 print 'G_offshore_data_old_e'#, G_offshore_old_e.get_data_points() 171 print'add all geospatial objects' 172 #G = G1 + G2 + G3 + G_off + G_off1 + G_off2 + G_off3 + G_off4 + G_off5 \ 173 # + G_off6 + G_off7 + G_off8 + G_off9 + G_off10 + G_off11 + G_off12 \ 174 # + G_off13 + G_off14 175 176 G = G_offshore_data + G_offshore_mid_w + G_offshore_mid_e 177 print'shape of arr G data', G.get_data_points().shape 178 179 180 #print'clip combined geospatial object by bounding polygon' 122 print'clip combined geospatial object by bounding polygon' 181 123 G_clipped = G.clip(project.bounding_polygon) 182 124 #FIXME: add a clip function to pts 183 print'shape of clipped data', G_clipped.get_data_points().shape125 #print'shape of clipped data', G_clipped.get_data_points().shape 184 126 185 127 print'export combined DEM file' 186 if access(project.topographies_time_dir,F_OK) == 0: 187 mkdir (project.topographies_time_dir) 188 G_clipped.export_points_file(project.combined_time_dir_final_name + '.pts') 189 G_clipped.export_points_file(project.combined_time_dir_final_name + '.xya') 190 128 if access(project.topographies_dir,F_OK) == 0: 129 mkdir (project.topographies_dir) 130 G_clipped.export_points_file(project.combined_dir_name + '.pts') 131 #G_clipped.export_points_file(project.combined_dir_name + '.xya') 191 132 192 133 ''' … … 228 169 ) 229 170 # dependencies = source_dir + project.boundary_basename + '.sww') 230 231 232 233 234 235 236 237 238 -
anuga_work/production/dampier_2006/project.py
r4091 r4147 2 2 """ 3 3 4 from os import sep, environ, getenv, getcwd 5 from os.path import expanduser 4 6 import sys 5 from os import sep, environ, getenv, getcwd 6 from os.path import expanduser, basename 7 8 from anuga.coordinate_transforms.redfearn import\ 9 degminsec2decimal_degrees,\ 10 convert_from_latlon_to_utm 11 12 from time import localtime, strftime, gmtime, ctime 13 from anuga.geospatial_data.geospatial_data import * 14 from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area 7 from time import localtime, strftime, gmtime 8 from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon, number_mesh_triangles 9 from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees, convert_points_from_latlon_to_utm 15 10 from anuga.utilities.system_tools import get_user_name 16 11 … … 24 19 # INUNDATIONHOME is the inundation directory, not the data directory. 25 20 home += sep +'data' 26 #---------------------------------- 27 # Location and naming of scenario data 28 #---------------------------------- 29 state = 'western_australia' 30 scenario_name = 'dampier_tsunami' 31 scenario_datas_name = 'dampier_tsunami_scenario_2006' #name of the directory where the data is stored 32 #scenario_datas_name = 'karratha_tsunami_scenario_2005' # Tmp location 33 34 #mesh_name = 'elevation50m' 35 boundaries_name = 'dampier' 36 boundaries_source = 'mag_9_corrected' 37 #boundaries_source = 'test' 38 39 tide = 2.4 40 #tide = 0.0 41 42 # topography file names 43 onshore_name = 'dli_no_islands' 44 coast_name = 'DTED_05_Contour' 45 islands_name = 'dted_islands' 46 offshore_name = 'XY100003902' 47 offshore_name1 = 'XY100003903' 48 offshore_name2 = 'XY100003951' 49 offshore_name3 = 'XY100006321' 50 offshore_name4 = 'XY100011756' 51 offshore_name5 = 'XY100014243' 52 offshore_name6 = 'XY100014244' 53 offshore_name7 = 'XY100021081' 54 offshore_name8 = 'XY100021082' 55 offshore_name9 = 'XY100021083' 56 offshore_name10 = 'XY100021085' 57 offshore_name11 = 'XY100021086' 58 offshore_name12 = 'XY100026309' 59 offshore_name13 = 'XY100026338' 60 offshore_name14 = 'XYDM83' 61 62 offshore_old = 'elevation50m' 63 64 combined_name ='dampier_combined_elevation' 65 combined_final_name ='dampier_combined_elevation_final' 66 67 gauge_name = 'dampier_gauges_up2.csv' 68 69 #Derive subdirectories and filenames 70 71 meshes_dir = home+sep+state+sep+scenario_datas_name+sep+'anuga'+sep+'meshes'+sep 72 topographies_dir = home+sep+state+sep+scenario_datas_name+sep+'anuga'+sep+'topographies'+sep 73 gauges_dir = home+sep+state+sep+scenario_datas_name+sep+'anuga'+sep+'gauges'+sep 74 polygons_dir = home+sep+state+sep+scenario_datas_name+sep+'anuga'+sep+'polygons'+sep 75 boundaries_in_dir = home+sep+state+sep+scenario_datas_name+sep+'anuga'+sep+'boundaries'+sep+'urs'+sep+boundaries_source+sep 76 #outputdir = home+sep+state+sep+scenario_datas_name+sep+'anuga'+sep+'output'+sep 77 tide_dir = home+sep+state+sep+scenario_datas_name+sep+'anuga'+sep+'tide_data'+sep 78 21 22 #time stuff 79 23 time = strftime('%Y%m%d_%H%M%S',localtime()) #gets time for new dir 80 24 gtime = strftime('%Y%m%d_%H%M%S',gmtime()) #gets time for new dir 81 #cctime = strftime('%Y%m%d_%H%M%S',ctime()) #gets time for new dir82 25 build_time = time+'_build' 83 26 run_time = time+'_run' 84 85 27 print 'gtime: ', gtime 86 output_dir = home+sep+state+sep+scenario_datas_name+sep+'anuga'+sep+'outputs'+sep 87 output_dir = home+sep+state+sep+scenario_datas_name+sep+'anuga'+sep+'outputs'+sep 88 output_build_time_dir = home+sep+state+sep+scenario_datas_name+sep+'anuga'+sep+'outputs'+sep+build_time+sep 89 output_run_time_dir = home+sep+state+sep+scenario_datas_name+sep+'anuga'+sep+'outputs'+sep+run_time+sep 28 29 tide = 0.6 30 31 #Making assumptions about the location of scenario data 32 state = 'western_australia' 33 scenario_name = 'dampier' 34 scenario = 'dampier_tsunami_scenario_2006' 35 36 # onshore data provided by WA DLI 37 onshore_name = 'dampier_dli_ext' # original 38 #island 39 island_name = 'rott_dli_ext' # original 40 island_name1 = 'gard_dli_ext' 41 island_name2 = 'carnac_island_dted' 42 island_name3 = 'penguin_dted' 43 44 # AHO + DPI data + colin French coastline 45 coast_name = 'waterline' 46 offshore_name = 'dampier_bathymetry' 47 offshore1_name = 'missing_fairsheets' 48 49 #final topo name 50 combined_name ='dampier_combined_elevation' 51 combined_smaller_name = 'dampier_combined_elevation_smaller' 52 53 topographies_in_dir = home+sep+state+sep+scenario+sep+'elevation_final'+sep+'points'+sep 54 topographies_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'topographies'+sep 90 55 topographies_time_dir = topographies_dir+build_time+sep 91 boundaries_dir = home+sep+state+sep+scenario_datas_name+sep+'anuga'+sep+'boundaries'+sep 92 boundaries_time_dir = home+sep+state+sep+scenario_datas_name+sep+'anuga'+sep+'boundaries'+sep+build_time+sep 93 meshes_time_dir = meshes_dir+build_time+sep 94 95 #ideas 96 #boundaries_time_dir = boundaries_in_dir+'urs'+sep+boundaries_source+sep 97 98 gauge_dir = home+sep+state+sep+scenario_datas_name+sep+'anuga'+sep+'gauges'+sep 99 gauge_filename = gauge_dir + 'dampier_gauges_up2.csv' 100 101 gauges_dir_name = gauges_dir + gauge_name56 57 # input topo file location 58 onshore_in_dir_name = topographies_in_dir + onshore_name 59 island_in_dir_name = topographies_in_dir + island_name 60 island_in_dir_name1 = topographies_in_dir + island_name1 61 island_in_dir_name2 = topographies_in_dir + island_name2 62 island_in_dir_name3 = topographies_in_dir + island_name3 63 64 coast_in_dir_name = topographies_in_dir + coast_name 65 offshore_in_dir_name = topographies_in_dir + offshore_name 66 offshore1_in_dir_name = topographies_in_dir + offshore1_name 102 67 103 68 onshore_dir_name = topographies_dir + onshore_name 69 island_dir_name = topographies_dir + island_name 70 island_dir_name1 = topographies_dir + island_name1 71 island_dir_name2 = topographies_dir + island_name2 72 island_dir_name3 = topographies_dir + island_name3 73 104 74 coast_dir_name = topographies_dir + coast_name 105 islands_dir_name = topographies_dir + islands_name106 75 offshore_dir_name = topographies_dir + offshore_name 107 offshore_dir_name1 = topographies_dir + offshore_name1 108 offshore_dir_name2 = topographies_dir + offshore_name2 109 offshore_dir_name3 = topographies_dir + offshore_name3 110 offshore_dir_name4 = topographies_dir + offshore_name4 111 offshore_dir_name5 = topographies_dir + offshore_name5 112 offshore_dir_name6 = topographies_dir + offshore_name6 113 offshore_dir_name7 = topographies_dir + offshore_name7 114 offshore_dir_name8 = topographies_dir + offshore_name8 115 offshore_dir_name9 = topographies_dir + offshore_name9 116 offshore_dir_name10 = topographies_dir + offshore_name10 117 offshore_dir_name11 = topographies_dir + offshore_name11 118 offshore_dir_name12 = topographies_dir + offshore_name12 119 offshore_dir_name13 = topographies_dir + offshore_name13 120 offshore_dir_name14 = topographies_dir + offshore_name14 121 122 offshore_dir_name_old = topographies_dir + offshore_old 123 124 125 126 #output dir 76 77 #final topo files 127 78 combined_dir_name = topographies_dir + combined_name 128 79 combined_time_dir_name = topographies_time_dir + combined_name 129 combined_time_dir_final_name = topographies_time_dir + combined_final_name 130 131 80 combined_smaller_name_dir = topographies_dir + combined_smaller_name 81 #combined_time_dir_final_name = topographies_time_dir + combined_final_name 82 83 meshes_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'meshes'+sep 132 84 meshes_dir_name = meshes_dir + scenario_name 133 meshes_time_dir_name = meshes_time_dir + scenario_name 134 #output_build_time_dir_name = output_build_time_dir + scenario_name #Used by post processing 85 86 polygons_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'polygons'+sep 87 tide_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'tide_data'+sep 88 89 boundaries_source = '????' 90 #boundaries locations 91 boundaries_in_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'boundaries'+sep+'urs'+sep+boundaries_source+sep 92 boundaries_in_dir_name = boundaries_in_dir + scenario_name 93 boundaries_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'boundaries'+sep 94 boundaries_dir_name = boundaries_dir + scenario_name 95 #boundaries_time_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'boundaries'+sep+build_time+sep 96 #boundaries_time_dir_name = boundaries_time_dir + boundaries_name #Used by post processing 97 98 #output locations 99 output_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'outputs'+sep 100 output_build_time_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'outputs'+sep+build_time+sep 101 output_run_time_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'outputs'+sep+run_time+sep 135 102 output_run_time_dir_name = output_run_time_dir + scenario_name #Used by post processing 136 boundaries_in_dir_name = boundaries_in_dir + boundaries_name 137 boundaries_time_dir_name = boundaries_time_dir + boundaries_name #Used by post processing 138 boundaries_dir_name = boundaries_dir + boundaries_name 139 140 141 142 # Regions 103 104 #gauges 105 gauge_name = 'dampier.csv' 106 gauges_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'gauges'+sep 107 gauges_dir_name = gauges_dir + gauge_name 108 109 110 ############################### 111 # Domain definitions 112 ############################### 143 113 144 114 refzone = 50 145 115 south = degminsec2decimal_degrees(-20,55,0) 146 116 north = degminsec2decimal_degrees(-20,15,0) 147 #north = degminsec2decimal_degrees(-19,15,0)148 117 west = degminsec2decimal_degrees(116,17,0) 149 118 east = degminsec2decimal_degrees(117,10,0) 150 151 #only used to clip boundary condition152 #north_boundary = north + 0.02153 #south_boundary = south - 0.02154 #west_boundary = west - 0.02155 #east_boundary = east + 0.02156 157 south_boundary = degminsec2decimal_degrees(-21,0,0)158 #north_boundary = degminsec2decimal_degrees(-19,00,0)159 north_boundary = degminsec2decimal_degrees(-20,10,0)160 #west_boundary = degminsec2decimal_degrees(116,0,0)161 #east_boundary = degminsec2decimal_degrees(118,00,0)162 west_boundary = degminsec2decimal_degrees(116,00,0)163 east_boundary = degminsec2decimal_degrees(117,20,0)164 165 119 166 120 p0 = [south, degminsec2decimal_degrees(116,32,0)] … … 174 128 p8 = [south, east] 175 129 176 bounding_polygon, zone =\ 177 convert_from_latlon_to_utm([p0, p1, p2, p3, p4, p5, p6, p7, p8]) 178 #bounding_polygon, zone =\ 179 # convert_from_latlon_to_utm([p1, p2, p3, p4, p5, p6, p7]) 180 print bounding_polygon 130 poly_all, zone = convert_from_latlon_to_utm([p0, p1, p2, p3, p4, p5, p6, p7, p8]) 181 131 refzone = zone 182 from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon 183 print 'poly area', polygon_area(bounding_polygon)/1000000.0 184 185 #Interior regions 186 132 print 'Area of bounding polygon', polygon_area(poly_all)/1000000.0 133 134 res_poly_all = 100000 135 136 ############################### 137 # Interior region definitions 138 ############################### 139 140 poly_pos20_neg20 = read_polygon(polygons_dir+'pos20_neg20_pts.csv') 141 res_pos20_neg20 = 20000 142 143 poly_dampier = read_polygon(polygons_dir+'dampier_pts.csv') 144 res_dampier = 500 145 146 poly_karratha = read_polygon(polygons_dir+'karratha_pts.csv') 147 res_karratha = 500 148 149 poly_delambre = read_polygon(polygons_dir+'delambre_pts.csv') 150 res_delambre = 1000 151 152 poly_mainisland = read_polygon(polygons_dir+'mainisland_pts.csv') 153 res_mainisland = 1000 154 155 poly_NWislands = read_polygon(polygons_dir+'NWislands_pts.csv') 156 res_NWislands = 1000 157 158 plot_polygons([poly_pos20_neg20,poly_dampier,poly_karratha,poly_delambre,polylmainisland, 159 polyNWislands,poly_all],output_run_time_dir + 'poly_pic') 160 161 interior_regions = [[poly_pos20_neg20,res_pos20_neg20],[poly_dampier,res_dampier], 162 [poly_karratha,res_karratha],[poly_delambre,res_delambre], 163 [poly_mainisland,res_mainisland],[poly_NWislands,res_NWislands]] 164 165 trigs_min = number_mesh_triangles(interior_regions, poly_all, res_poly_all) 166 167 print 'min number triangles', trigs_min 168 169 ################################################################### 170 # Clipping regions for export to asc and regions for clipping data 171 ################################################################### 172 173 # exporting asc grid - Dampier 174 e_min_area = 474000 175 e_max_area = 480000 176 n_min_area = 7719000 177 n_max_area = 7725000 178 179 # exporting asc grid - Karratha 180 e_min_area = 181 e_max_area = 182 n_min_area = 183 n_max_area = 184 185 """ 186 # used in the CIPMA 2006 scenario 187 187 # CIPMA point of interest 188 188 cipma_latitude = -20.588456 189 189 cipma_longitude = 116.771527 190 191 190 192 191 k0 = [cipma_latitude-0.02, cipma_longitude-0.02] … … 198 197 assert zone == refzone 199 198 199 poly_facility = read_polygon(polygons_dir+'facility.csv') 200 poly_pipeline = read_polygon(polygons_dir+'pipeline2.csv') 201 poly_interior = read_polygon(polygons_dir+'interior.csv') 202 poly_coast = read_polygon(polygons_dir+'coast_final.csv') 203 clip_poly_e = read_polygon(polygons_dir+'gap_e.csv') 204 clip_poly_nw = read_polygon(polygons_dir+'gap_nw.csv') 205 clip_poly_mid_w = read_polygon(polygons_dir+'gap_mid_w.cvs') 206 clip_poly_mid_e = read_polygon(polygons_dir+'gap_mid_e.cvs') 207 208 # exporting asc grid 200 209 e_min_area = 474000 201 210 e_max_area = 480000 … … 203 212 n_max_area = 7725000 204 213 205 poly_facility = read_polygon(polygons_dir+'facility.csv') 206 207 poly_pipeline = read_polygon(polygons_dir+'pipeline2.csv') 208 209 poly_interior = read_polygon(polygons_dir+'interior.csv') 210 211 poly_coast = read_polygon(polygons_dir+'coast_final.csv') 212 213 clip_poly_e = read_polygon(polygons_dir+'gap_e.csv') 214 215 clip_poly_nw = read_polygon(polygons_dir+'gap_nw.csv') 216 217 clip_poly_mid_w = read_polygon(polygons_dir+'gap_mid_w.cvs') 218 219 clip_poly_mid_e = read_polygon(polygons_dir+'gap_mid_e.cvs') 220 214 # used in the original 2005 scenario 221 215 #Interior regions 222 216 karratha_south = degminsec2decimal_degrees(-20,44,0) … … 233 227 assert zone == refzone 234 228 235 236 229 #Interior regions 237 230 dampier_south = degminsec2decimal_degrees(-20,40,0) … … 248 241 assert zone == refzone 249 242 250 251 243 #Interior regions 252 244 refinery_south = degminsec2decimal_degrees(-20,37,50) … … 263 255 assert zone == refzone 264 256 265 266 257 #Interior region around 468899, 7715177: 267 258 #lat (-20, 39, 44.93753), lon (116, 42, 5.09106) … … 279 270 point_polygon, zone = convert_from_latlon_to_utm([d0, d1, d2, d3]) 280 271 assert zone == refzone 281 282 272 283 273 #Neils areas around interesting points … … 297 287 assert zone == refzone 298 288 299 300 301 302 289 neil2_point1 = [degminsec2decimal_degrees(-20,39,36), 303 290 degminsec2decimal_degrees(116,41,33)] … … 314 301 neil2_point4]) 315 302 assert zone == refzone 316 317 318 319 320 303 321 304 #Withnell bay … … 333 316 assert zone == refzone 334 317 335 336 337 338 339 318 #Larger Withnell bay 340 319 lwb_point1 = [degminsec2decimal_degrees(-20,35,59), … … 351 330 352 331 assert zone == refzone 353 354 355 356 332 """ -
anuga_work/production/dampier_2006/run_dampier.py
r4066 r4147 6 6 7 7 The scenario is defined by a triangular mesh created from project.polygon, 8 the elevation data and a simulated submarine landslide.8 the elevation data and a simulated tsunami generated with URS code. 9 9 10 10 Ole Nielsen and Duncan Gray, GA - 2005 and Jane Sexton, Nick Bartzis, GA - 2006 … … 23 23 import sys 24 24 25 26 25 # Related major packages 27 26 from anuga.shallow_water import Domain … … 32 31 33 32 from anuga.pmesh.mesh_interface import create_mesh_from_regions 34 35 from anuga.geospatial_data.geospatial_data import *36 33 from anuga.abstract_2d_finite_volumes.util import start_screen_catcher, copy_code_files 37 34 from anuga_parallel.parallel_api import distribute, numprocs, myid, barrier … … 45 42 #------------------------------------------------------------------------------ 46 43 44 start_screen_catcher(project.output_run_time_dir, myid, numprocs) 47 45 48 46 # filenames 49 50 #build_time = '20061029_231935_build_tide_24'51 #build_time = '20061030_165746_build_tide_24'52 #build_time = '20061102_215532_build_plus_old_data'53 #build_time = '20061103_055258_build'54 47 build_time = '20061107_063840_build' 55 #build_time = '20061025_153643_build_basic'56 48 bound_time = '20061102_221245_build' 57 49 58 50 boundaries_name = project.boundaries_name 59 #meshes_time_dir_name = project.meshes_time_dir_name+'.msh'60 51 meshes_dir_name = project.meshes_dir_name+'.msh' 61 #source_dir = project.boundarydir62 #boundaries_time_dir_name = project.boundaries_dir + build_time + sep + boundaries_name63 boundaries_time_dir_name = project.boundaries_time_dir_name64 52 boundaries_dir_name = project.boundaries_dir_name 53 65 54 tide = project.tide 66 55 … … 70 59 dirname(project.__file__)+sep+ project.__name__+'.py' ) 71 60 barrier() 72 #start_screen_catcher(project.output_run_time_dir, myid, numprocs)73 61 74 62 print 'USER: ', project.user 75 #sys.exit() 63 print 'min triangles', project.trigs_min, 64 print 'Note: This is generally about 20% less than the final amount' 65 76 66 #-------------------------------------------------------------------------- 77 67 # Create the triangular mesh based on overall clipping polygon with a … … 84 74 85 75 print 'start create mesh from regions' 86 interior_regions = [#[project.karratha_polygon, 25000], 87 [project.poly_coast, 10000], 88 [project.poly_pipeline, 2000], 89 [project.poly_facility, 500]] 90 # [project.poly_interior, 1000]] 91 from anuga.utilities.polygon import plot_polygons 92 figname = project.output_run_time_dir + 'poly_pic' 93 plot_polygons([project.poly_coast,project.poly_pipeline,project.poly_facility, 94 project.bounding_polygon], 95 figname, 96 verbose = True) 97 # if access(project.meshes_time_dir,F_OK) == 0: 98 # mkdir(project.meshes_time_dir) 99 # if access(project.meshes_dir,F_OK) == 0: 100 # mkdir(project.meshes_dir) 101 import sys; sys.exit() 102 print 'start create mesh from regions' 103 interior_regions = [#[project.karratha_polygon, 25000], 104 # [project.cipma_polygon, 1000], 105 # [project.poly_pipeline, 5000], 106 # [project.poly_facility, 500]] 107 [project.poly_interior, 1000]] 108 # meshes_dir_name = project.meshes_dir_name + '.msh' 109 110 create_mesh_from_regions(project.bounding_polygon, 111 boundary_tags={'back': [7, 8], 'side': [0, 6], 112 'ocean': [1, 2, 3, 4, 5]}, 113 maximum_triangle_area=100000, 114 interior_regions=interior_regions, 115 # filename=meshes_time_dir_name, 116 filename=meshes_dir_name, 117 use_cache=True, 118 verbose=True) 76 create_mesh_from_regions(project.poly_all, 77 boundary_tags={'back': [7, 8], 'side': [0, 6], 78 'ocean': [1, 2, 3, 4, 5]}, 79 maximum_triangle_area=project.res_poly_all, 80 interior_regions=project.interior_regions, 81 filename=meshes_dir_name, 82 use_cache=True, 83 verbose=True) 119 84 120 85 # to sync all processors are ready … … 125 90 #------------------------------------------------------------------------- 126 91 print 'Setup computational domain' 127 #domain = Domain(meshes_time_dir_name, use_cache=True, verbose=True)128 92 domain = Domain(meshes_dir_name, use_cache=True, verbose=True) 129 93 print domain.statistics() 130 94 95 """ 131 96 print 'starting to create boundary conditions' 132 97 boundaries_in_dir_name = project.boundaries_in_dir_name 133 98 134 from anuga.shallow_water.data_manager import urs2sww, ferret2sww 135 136 print 'maxlat=project.south_boundary, minlat=project.north_boundary', project.south_boundary,project.north_boundary 137 print 'minlon= project.west_boundary, maxlon=project.east_boundary',project.west_boundary, project.east_boundary 138 print ' maxlon=project.east',project.east 139 140 print 'origin: domain.geo_reference.get_origin()',domain.geo_reference.get_origin() 141 142 #import sys; sys.exit() 143 144 #if access(project.boundaries_time_dir,F_OK) == 0: 145 # mkdir (project.boundaries_time_dir) 99 from anuga.shallow_water.data_manager import urs2sw 100 146 101 # put above distribute 147 102 print 'boundary file is: ',boundaries_dir_name 148 103 from caching import cache 149 104 if myid == 0: 150 cache( ferret2sww,105 cache(urs2sww, 151 106 (boundaries_in_dir_name, 152 107 # boundaries_time_dir_name), … … 169 124 ) 170 125 barrier() 171 126 """ 172 127 173 128 #------------------------------------------------------------------------- … … 179 134 180 135 domain.set_quantity('stage', tide) 181 domain.set_quantity('friction', 0.0 )136 domain.set_quantity('friction', 0.01) 182 137 #combined_time_dir_name = project.topographies_dir+build_time+project.combined_name 183 138 print 'Start Set quantity' 184 139 185 186 140 domain.set_quantity('elevation', 187 filename = project. topographies_dir + build_time + sep + project.combined_final_name + '.pts',141 filename = project.combined_dir_name + '.txt', 188 142 use_cache = True, 189 143 verbose = True, 190 144 alpha = 0.1) 191 #domain.set_quantity('elevation', -50)192 145 print 'Finished Set quantity' 193 146 barrier() … … 211 164 domain.set_store_vertices_uniquely(False) 212 165 domain.set_quantities_to_be_stored(['stage', 'xmomentum', 'ymomentum']) 213 domain.set_maximum_allowed_speed(0. 0) # Allow a little runoff (0.1 is OK)166 domain.set_maximum_allowed_speed(0.1) # Allow a little runoff (0.1 is OK) 214 167 215 168 #------------------------------------------------------------------------- … … 217 170 #------------------------------------------------------------------------- 218 171 print 'Available boundary tags', domain.get_boundary_tags() 219 172 print 'domain id', id(domain) 220 173 print 'Reading Boundary file' 221 #boundariesname = project.boundaries_dir + '20061101_003322_build'+sep+boundaries_name 222 #print'boundariesname',boundariesname 223 #Bf = File_boundary(boundaries_time_dir_name + '.sww', 224 225 #Bf = File_boundary(boundariesname + '.sww', 226 227 print 'domain id', id(domain) 228 Bf = File_boundary(boundaries_dir_name + '.sww', 229 domain, time_thinning=5, use_cache=True, verbose=True) 174 #Bf = File_boundary(boundaries_dir_name + '.sww', 175 # domain, time_thinning=5, use_cache=True, verbose=True) 230 176 231 177 print 'finished reading boundary file' … … 246 192 t0 = time.time() 247 193 248 #for t in domain.evolve(yieldstep = 60, finaltime = 34000): 194 for t in domain.evolve(yieldstep = 60, finaltime = 34000): 195 domain.write_time() 196 domain.write_boundary_statistics(tags = 'ocean') 197 198 #for t in domain.evolve(yieldstep = 120, finaltime = 9000): 249 199 # domain.write_time() 250 200 # domain.write_boundary_statistics(tags = 'ocean') 251 252 for t in domain.evolve(yieldstep = 120, finaltime = 9000):253 domain.write_time()254 domain.write_boundary_statistics(tags = 'ocean')255 # print 'time: ',time.time()256 257 if allclose(t, 6000):258 domain.set_quantity('xmomentum', 0)259 domain.set_quantity('ymomentum', 0)260 #import sys; sys.exit()261 201 262 for t in domain.evolve(yieldstep = 60, finaltime = 28800263 ,skip_initial_step = True):264 domain.write_time()265 domain.write_boundary_statistics(tags = 'ocean')266 267 for t in domain.evolve(yieldstep = 120, finaltime = 34800268 ,skip_initial_step = True):269 domain.write_time()270 domain.write_boundary_statistics(tags = 'ocean')202 #for t in domain.evolve(yieldstep = 60, finaltime = 28800 203 # ,skip_initial_step = True): 204 # domain.write_time() 205 # domain.write_boundary_statistics(tags = 'ocean') 206 207 #for t in domain.evolve(yieldstep = 120, finaltime = 34800 208 # ,skip_initial_step = True): 209 # domain.write_time() 210 # domain.write_boundary_statistics(tags = 'ocean') 271 211 272 212 print 'That took %.2f seconds' %(time.time()-t0) -
anuga_work/production/onslow_2006/make_report.py
r4145 r4147 229 229 s = '\\begin{table} \\begin{center} \n' 230 230 fid.write(s) 231 s = '\caption{Defined point locations for %s study area.}' %scenario_name 231 s = '\caption{Defined point locations for %s study area.}' %scenario_name.title() 232 232 fid.write(s) 233 233 s = """ -
anuga_work/production/onslow_2006/report/modelling_methodology.tex
r4145 r4147 79 79 for a range of probabilities (or return periods). As Figure \ref{fig:probonslow} 80 80 shows, for a given probability, a number of events are possible. The resulting 81 impact to Onslow would then vary depending on the source of the event. Further detail 82 on the tsunami scenarios are outlined in Section \ref{sec:tsunamiscenario}. 81 impact to Onslow would then vary depending on the source of the event. The 82 tsunami scenarios selected for the tsunami risk assessment 83 are discussed in Section \ref{sec:tsunamiscenario}. 83 84 84 85 % used for the 2005 report when looking at one event -
anuga_work/production/onslow_2006/report/onslow_2006_report.tex
r4145 r4147 87 87 88 88 \begin{table} \begin{center} 89 \caption{Defined point locations for onslow study area.}89 \caption{Defined point locations for Onslow study area.} 90 90 \label{table:locations} 91 91 \begin{tabular}{|l|l|l|l|}\hline … … 158 158 159 159 160 160 161 \section{Damage modelling inputs} 161 162 \label{sec:damageinputs} -
anuga_work/production/onslow_2006/report/timeseriesdiscussion.tex
r4134 r4147 7 7 case of MSL, this water level will be 0. As the tsunami wave moves 8 8 through this point, the water height may grow and thus the stage will 9 represent the amplitude of the wave. For an onshore location such as the 10 Light Tower, the actual water depth will be the difference between 11 the stage and the elevation at that point. Therefore, at the beginning 12 of the simulation, there will be no water onshore and therefore 13 the stage and the elevation will be identical.}. Both stage and speed 9 represent the amplitude of the wave.} For an onshore location such as the 10 Light Tower, the actual water depth will be shown rather than the stage. 11 Both stage and speed 14 12 (in metres/second) for 15 13 each scenario (HAT, MSL and LAT) are shown … … 62 60 the actual amplitude is the difference between the stage value 63 61 and the initial water level; 2.3 - 1.5}. 64 The drawdown of around 4.3 m (i.e. 2.3 - -2) then occurs at around 230 mins62 The drawdown of 4.05 m (i.e. 2.3 - -1.75) then occurs at around 230 mins 65 63 (i.e. 3.8 hours after the event has been generated), before 66 64 the second wave arrives -
anuga_work/production/onslow_2006/report/tsunami_scenario.tex
r3402 r4147 1 The tsunamigenic event used in this report was developed for a 2 preliminary tsunami hazard assessment study delivered by GA 3 to FESA in September 2005 4 \cite{BC:FESA}. In the assessment, a suite of Mw 9 earthquakes 5 were evenly spaced along the Sunda Arc subduction zone and there 6 was no consideration of the likelihood of each event. 7 Other less likely sources were not considered, such 8 as intra-plate earthquakes near the WA coast, volcanoes, landslides 9 or asteroids. 10 In the preliminary assessment, 11 the maximum magnitude of earthquakes off Java was considered to be 12 at least 8.5 and could potentially be as high as 9. 1 % for original scenario 2 %The tsunamigenic event used in this report was developed for a 3 %preliminary tsunami hazard assessment study delivered by GA 4 %to FESA in September 2005 5 %\cite{BC:FESA}. In the assessment, a suite of Mw 9 earthquakes 6 %were evenly spaced along the Sunda Arc subduction zone and there 7 %was no consideration of the likelihood of each event. 8 %Other less likely sources were not considered, such 9 %as intra-plate earthquakes near the WA coast, volcanoes, landslides 10 %or asteroids. 11 %In the preliminary assessment, 12 %the maximum magnitude of earthquakes off Java was considered to be 13 %at least 8.5 and could potentially be as high as 9. 13 14 14 FESA is interested in the ``most frequent worst case scenario''. Whilst15 we currently cannot determine exactly what that event may be, the Mw 9 event16 provides a plausible worst case scenario. To understand the17 frequency of these tsunami-genic events,18 GA is building probabilistic19 models to develop a more complete tsunami hazard assessment20 for the Sunda Arc subduction zone,21 due for completion in late 2006. In the preliminary assessment for22 example, it was suggested that while Mw 7 and 8 earthquakes are expected23 to occur with a greater frequency than Mw 9 events,24 they are likely to pose a comparatively low and more localised hazard to WA.15 %FESA is interested in the ``most frequent worst case scenario''. Whilst 16 %we currently cannot determine exactly what that event may be, the Mw 9 event 17 %provides a plausible worst case scenario. To understand the 18 %frequency of these tsunami-genic events, 19 %GA is building probabilistic 20 %models to develop a more complete tsunami hazard assessment 21 %for the Sunda Arc subduction zone, 22 %due for completion in late 2006. In the preliminary assessment for 23 %example, it was suggested that while Mw 7 and 8 earthquakes are expected 24 %to occur with a greater frequency than Mw 9 events, 25 %they are likely to pose a comparatively low and more localised hazard to WA. 25 26 26 Figure \ref{fig:mw9} shows the maximum wave height of a tsunami initiated27 by a Mw 9 event off28 the coast of Java. This event provides the source and29 boundary condition to the30 inundation model presented in Section \ref{sec:anuga}.27 %Figure \ref{fig:mw9} shows the maximum wave height of a tsunami initiated 28 %by a Mw 9 event off 29 %the coast of Java. This event provides the source and 30 %boundary condition to the 31 %inundation model presented in Section \ref{sec:anuga}. 31 32 32 33 33 \begin{figure}[hbt]34 %\begin{figure}[hbt] 34 35 35 \centerline{ \includegraphics[width=140mm, height=100mm]36 {../report_figures/mw9.jpg}}36 % \centerline{ \includegraphics[width=140mm, height=100mm] 37 %{../report_figures/mw9.jpg}} 37 38 38 \caption{Maximum wave height (in cms) for a Mw 9 event off the39 coast of Java}40 \label{fig:mw9}41 \end{figure}39 % \caption{Maximum wave height (in cms) for a Mw 9 event off the 40 %coast of Java} 41 % \label{fig:mw9} 42 %\end{figure} -
anuga_work/production/perth_2006/build_perth.py
r4091 r4147 1 """Script for running tsunami inundation scenario for Dampier, WA, Australia.1 """Script for running tsunami inundation scenario for Perth, WA, Australia. 2 2 3 3 Source data such as elevation and boundary data is assumed to be available in -
anuga_work/production/perth_2006/project.py
r4133 r4147 43 43 island_name3 = 'penguin_dted' 44 44 45 # AHO + DPI data 45 # AHO + DPI data + colin French coastline 46 46 coast_name = 'waterline' 47 47 offshore_name = 'perth_bathymetry' … … 57 57 topographies_time_dir = topographies_dir+build_time+sep 58 58 59 # input topo file location59 # input topo file location 60 60 onshore_in_dir_name = topographies_in_dir + onshore_name 61 61 island_in_dir_name = topographies_in_dir + island_name -
anuga_work/production/perth_2006/run_perth.py
r4141 r4147 1 """Script for running tsunami inundation scenario for Dampier, WA, Australia.1 """Script for running tsunami inundation scenario for Perth, WA, Australia. 2 2 3 3 Source data such as elevation and boundary data is assumed to be available in … … 6 6 7 7 The scenario is defined by a triangular mesh created from project.polygon, 8 the elevation data and a simulated submarine landslide.8 the elevation data and a simulated tsunami generated with URS code. 9 9 10 10 Ole Nielsen and Duncan Gray, GA - 2005 and Jane Sexton, Nick Bartzis, GA - 2006
Note: See TracChangeset
for help on using the changeset viewer.