1 | """Common filenames and locations for topographic data, meshes and outputs. |
---|
2 | Also includes origin for slump scenario. |
---|
3 | |
---|
4 | All filenames are given without extension |
---|
5 | """ |
---|
6 | |
---|
7 | from os import sep, environ, getenv, getcwd, umask |
---|
8 | from os.path import expanduser, basename, join |
---|
9 | from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon, number_mesh_triangles |
---|
10 | import sys |
---|
11 | from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees |
---|
12 | from time import localtime, strftime, gmtime |
---|
13 | from anuga.utilities.system_tools import get_user_name, get_host_name |
---|
14 | |
---|
15 | codename = 'project.py' # FIXME can be obtained automatically as __file__ |
---|
16 | |
---|
17 | home = join(getenv('INUNDATIONHOME'), 'data') # Location of Data |
---|
18 | user = get_user_name() |
---|
19 | host = get_host_name() |
---|
20 | #needed when running using mpirun, mpirun doesn't inherit umask from .bashrc |
---|
21 | umask(002) |
---|
22 | |
---|
23 | #Making assumptions about the location of scenario data |
---|
24 | state = 'western_australia' |
---|
25 | scenario_name = 'shark_bay' |
---|
26 | scenario = 'shark_bay_tsunami_scenario' |
---|
27 | |
---|
28 | #time stuff |
---|
29 | time = strftime('%Y%m%d_%H%M%S',gmtime()) #gets time for new dir |
---|
30 | build_time = time+'_build' |
---|
31 | run_time = time+'_run' |
---|
32 | |
---|
33 | #tide = 0.0 # MSL |
---|
34 | tide = 0.79 # HAT average between Denham and Canarvon |
---|
35 | #tide = -0.80 # Estimated LAT |
---|
36 | |
---|
37 | # For frequency response study |
---|
38 | amplitude = 0.5 |
---|
39 | period = 3000 |
---|
40 | |
---|
41 | #momentum_scale=50 # Experiment |
---|
42 | momentum_scale=1 # Experiment |
---|
43 | |
---|
44 | #Maybe will try to make project a class to allow these parameters to be passed in. |
---|
45 | alpha = 0.1 |
---|
46 | friction=0.01 |
---|
47 | finaltime=40000 |
---|
48 | starttime=8000 # Action starts around 9000 |
---|
49 | setup='final' |
---|
50 | #setup='trial' |
---|
51 | source='shark_bay' |
---|
52 | |
---|
53 | boundary_event = 'july2006' |
---|
54 | #boundary_event = '10000' |
---|
55 | #boundary_event = 'experimental' |
---|
56 | |
---|
57 | if setup =='trial': |
---|
58 | print'trial' |
---|
59 | res_factor=10 |
---|
60 | time_thinning=48 |
---|
61 | yieldstep=240 |
---|
62 | if setup =='basic': |
---|
63 | print'basic' |
---|
64 | res_factor=4 |
---|
65 | time_thinning=12 |
---|
66 | yieldstep=120 |
---|
67 | if setup =='final': |
---|
68 | print'final' |
---|
69 | res_factor=1.0 |
---|
70 | time_thinning=1 |
---|
71 | yieldstep=10 |
---|
72 | |
---|
73 | |
---|
74 | |
---|
75 | #dir_comment='_'+setup+'_'+str(tide)+'_'+str(source)+'_'+\ |
---|
76 | # str(user)+'_'+boundary_event+'_X'+str(momentum_scale)+'_fixedbathy' |
---|
77 | |
---|
78 | dir_comment='_'+setup+'_'+str(tide)+'_'+str(source)+'_'+\ |
---|
79 | str(user)+'_'+boundary_event |
---|
80 | |
---|
81 | |
---|
82 | if boundary_event == 'experimental': |
---|
83 | dir_comment += '_A'+str(amplitude)+'_T'+str(period) |
---|
84 | |
---|
85 | |
---|
86 | # elevation data filenames |
---|
87 | ascii_grid_filenames = ['10m_dem_without_survey', '50m_dem_without_10m_dem', |
---|
88 | 'bathysteeppt', 'bathyleft', 'bathyright'] |
---|
89 | point_filenames = ['field_survey_north', 'field_survey_south', |
---|
90 | 'clipped_bathymetry_final', 'coast_points_final'] |
---|
91 | |
---|
92 | # final topo name |
---|
93 | combined_name ='shark_bay_combined_elevation' |
---|
94 | combined_small_name = 'shark_bay_combined_elevation_small' |
---|
95 | |
---|
96 | |
---|
97 | anuga_dir = join(home, state, scenario, 'anuga') |
---|
98 | |
---|
99 | topographies_in_dir = join(home, state, scenario, |
---|
100 | 'elevation_final', 'test_area') |
---|
101 | |
---|
102 | topographies_dir = join(anuga_dir, 'topographies') # Output dir for ANUGA |
---|
103 | |
---|
104 | # Convert topo file locations into absolute pathnames |
---|
105 | for filename_list in [ascii_grid_filenames, point_filenames]: |
---|
106 | for i, filename in enumerate(filename_list): |
---|
107 | filename_list[i] = join(topographies_in_dir, filename) |
---|
108 | |
---|
109 | #final topo files |
---|
110 | combined_dir_name = join(topographies_dir, combined_name) |
---|
111 | combined_small_dir_name = join(topographies_dir, combined_small_name) |
---|
112 | |
---|
113 | |
---|
114 | mesh_dir = join(anuga_dir, 'meshes') |
---|
115 | mesh_name = join(mesh_dir, scenario_name) |
---|
116 | |
---|
117 | |
---|
118 | polygons_dir = join(anuga_dir, 'polygons') # Created with ArcGIS (csv files) |
---|
119 | tide_dir = join(anuga_dir, 'tide_data') |
---|
120 | |
---|
121 | |
---|
122 | # locations for boundary conditions |
---|
123 | if source=='shark_bay': |
---|
124 | if boundary_event == '10000': |
---|
125 | boundary_file_name = 'shark_bay_3867_18052007' |
---|
126 | boundary_dir = join(anuga_dir, 'boundaries', 'shark_bay', '1_10000') |
---|
127 | elif boundary_event == 'july2006': |
---|
128 | boundary_file_name = 'july2006' |
---|
129 | boundary_dir = join(anuga_dir, 'boundaries', |
---|
130 | 'shark_bay', 'july_2006_event') |
---|
131 | else: |
---|
132 | pass |
---|
133 | #raise Exception, 'Unknown boundary event specified' |
---|
134 | |
---|
135 | if boundary_event != 'experimental': |
---|
136 | boundary_name = join(boundary_dir, boundary_file_name) |
---|
137 | else: |
---|
138 | boundary_name = 'nil' |
---|
139 | |
---|
140 | #output locations |
---|
141 | output_dir = join(anuga_dir, 'outputs')+sep |
---|
142 | output_build_time_dir = output_dir+build_time+sep |
---|
143 | output_run_time_dir = output_dir +run_time+dir_comment+sep |
---|
144 | output_run_time_dir_name = output_run_time_dir + scenario_name #Used by post processing |
---|
145 | |
---|
146 | #gauges |
---|
147 | gauge_name = 'gaugesv3.csv' |
---|
148 | #gauge_name_test = 'gauge_checking_test.csv' |
---|
149 | gauges_dir = anuga_dir+sep+'gauges'+sep |
---|
150 | gauges_dir_name = gauges_dir + gauge_name |
---|
151 | #gauges_dir_name_test = gauges_dir + gauge_name_test |
---|
152 | |
---|
153 | #tide_dir = anuga_dir+'tide_data'+sep |
---|
154 | |
---|
155 | #buildings_filename = gauges_dir + 'pt_hedland_res.csv' |
---|
156 | #buildings_filename_out = 'pt_hedland_res_modified.csv' |
---|
157 | #buildings_filename_damage_out = 'pt_hedland_res_modified_damage.csv' |
---|
158 | #tidal_filename = tide_dir + 'pt_hedland_tide.txt' |
---|
159 | #community_filename = gauges_dir + 'CHINS_v2.csv' |
---|
160 | #community_scenario = gauges_dir + 'community_pt_hedland.csv' |
---|
161 | |
---|
162 | # clipping region to make DEM (pts file) from onshore data |
---|
163 | #eastingmin = 594000 |
---|
164 | #eastingmax = 715000 |
---|
165 | #northingmin = 7720000 |
---|
166 | #northingmax = 7880000 |
---|
167 | |
---|
168 | # for ferret2sww |
---|
169 | south = degminsec2decimal_degrees(-20,30,0) |
---|
170 | north = degminsec2decimal_degrees(-17,10,0) |
---|
171 | west = degminsec2decimal_degrees(117,00,0) |
---|
172 | east = degminsec2decimal_degrees(120,00,0) |
---|
173 | |
---|
174 | # region to export (used from export_results.py) |
---|
175 | e_min_area = 713500 # Eastings min |
---|
176 | e_max_area = 726350 |
---|
177 | n_min_area = 7100890 |
---|
178 | n_max_area = 7111250 |
---|
179 | |
---|
180 | #export_region = [[e_min_area,n_min_area],[e_min_area,n_max_area], |
---|
181 | # [e_max_area,n_max_area],[e_max_area,n_min_area]] |
---|
182 | |
---|
183 | #FIXME (Ole): It is bad to read in polygons in project.py. If a file does not exist |
---|
184 | #project.py cannot be imported |
---|
185 | |
---|
186 | # Bounding polygon |
---|
187 | bounding_polygon = read_polygon(join(polygons_dir, 'boundary_extent_small.csv')) |
---|
188 | res_bounding_polygon = 500000*res_factor |
---|
189 | |
---|
190 | # This is a restricted boundary used for experiments. |
---|
191 | # Cannot use URS boundary on this! |
---|
192 | small_bounding_polygon = read_polygon(join(polygons_dir, 'small_boundary.csv')) |
---|
193 | res_bounding_polygon = 500000*res_factor |
---|
194 | |
---|
195 | |
---|
196 | # Interior regions |
---|
197 | interior_regions_and_resolutions = { |
---|
198 | 'study_area': 300000, |
---|
199 | 'bernier_dorre_islands': 250000, |
---|
200 | 'map_area': 10000, |
---|
201 | 'bay_area': 4000, |
---|
202 | 'south_bank_buffer': 2000, |
---|
203 | 'south_coast_polygon_1': 1000, |
---|
204 | 'south_coast_polygon_2': 1000, |
---|
205 | 'north_bay_coastline': 1000, |
---|
206 | 'tsunami_approach_area': 50, |
---|
207 | 'inundated_area': 10, |
---|
208 | 'exclusion_1': 10000000, |
---|
209 | 'exclusion_2': 10000000, |
---|
210 | 'exclusion_3': 10000000} |
---|
211 | |
---|
212 | |
---|
213 | #poly_exclusion_area1 = read_polygon(join(polygons_dir, 'exclusion_area1.csv')) |
---|
214 | #res_exclusion_area1 = 2000000*res_factor |
---|
215 | # |
---|
216 | #interior_regions = [[poly_inundated_area, res_inundated_area], |
---|
217 | # [poly_tsunami_approach_area, res_tsunami_approach_area], |
---|
218 | # [poly_bay_area, res_bay_area], |
---|
219 | # [poly_map_area, res_map_area]] |
---|
220 | #[poly_model_area, res_model_area], |
---|
221 | #[poly_exclusion_area1, res_exclusion_area1]] |
---|
222 | |
---|
223 | # FIXME (Ole): What if tags are wrong? |
---|
224 | |
---|
225 | #Large bounding poly |
---|
226 | #boundary_tags = {'back': [1, 2, 3, 4, 5], |
---|
227 | # 'side': [0, 6], |
---|
228 | # 'ocean': [7, 8, 9, 10, 11]} |
---|
229 | |
---|
230 | |
---|
231 | #Small bounding poly |
---|
232 | #(counter clockwise starting in upper right corner) |
---|
233 | boundary_tags = {'tide': [0, 1, 2], |
---|
234 | 'ocean': [3, 4, 5, 6]} |
---|
235 | |
---|
236 | #Very small bounding poly (experimental boundary) |
---|
237 | boundary_tags_small = {'tide': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13], |
---|
238 | 'ocean': [11]} |
---|
239 | |
---|
240 | |
---|
241 | |
---|
242 | # Read filenames etc (FIXME: move this to new file: read_structures) |
---|
243 | |
---|
244 | interior_regions = [] # Consider using dictionary for mesh interface. |
---|
245 | for area_name in interior_regions_and_resolutions.keys(): |
---|
246 | file_name = join(polygons_dir, area_name + '.csv') |
---|
247 | |
---|
248 | print 'Reading polygon from', file_name |
---|
249 | polygon = read_polygon(file_name) |
---|
250 | base_resolution = interior_regions_and_resolutions[area_name] |
---|
251 | |
---|
252 | interior_regions.append([polygon, base_resolution*res_factor]) |
---|
253 | |
---|
254 | |
---|
255 | |
---|
256 | |
---|
257 | trigs_min = number_mesh_triangles(interior_regions, |
---|
258 | bounding_polygon, |
---|
259 | res_bounding_polygon) |
---|
260 | |
---|
261 | |
---|
262 | |
---|
263 | # For use with resetting IC (This is temporary) |
---|
264 | poly_tsunami_approach_area = read_polygon( |
---|
265 | join(polygons_dir, 'tsunami_approach_area.csv')) |
---|
266 | |
---|
267 | |
---|
268 | |
---|
269 | onshore_polygon = read_polygon(join(topographies_in_dir, |
---|
270 | 'initial_condition.txt')) |
---|
271 | |
---|