1 | """Common filenames and locations for topographic data, meshes and outputs. |
---|
2 | Also includes origin for slump scenario. |
---|
3 | |
---|
4 | All filenames are given without extension |
---|
5 | """ |
---|
6 | |
---|
7 | from os import sep, environ, getenv, getcwd, umask |
---|
8 | from os.path import expanduser, basename, join |
---|
9 | from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon, number_mesh_triangles |
---|
10 | import sys |
---|
11 | from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees |
---|
12 | from time import localtime, strftime, gmtime |
---|
13 | from anuga.utilities.system_tools import get_user_name, get_host_name |
---|
14 | |
---|
15 | codename = 'project.py' # FIXME can be obtained automatically as __file__ |
---|
16 | |
---|
17 | home = join(getenv('INUNDATIONHOME'), 'data') # Location of Data |
---|
18 | user = get_user_name() |
---|
19 | host = get_host_name() |
---|
20 | #needed when running using mpirun, mpirun doesn't inherit umask from .bashrc |
---|
21 | umask(002) |
---|
22 | |
---|
23 | #Making assumptions about the location of scenario data |
---|
24 | state = 'western_australia' |
---|
25 | scenario_name = 'shark_bay' |
---|
26 | scenario = 'shark_bay_tsunami_scenario' |
---|
27 | |
---|
28 | #time stuff |
---|
29 | time = strftime('%Y%m%d_%H%M%S',gmtime()) #gets time for new dir |
---|
30 | build_time = time+'_build' |
---|
31 | run_time = time+'_run' |
---|
32 | |
---|
33 | #tide = -3.9 |
---|
34 | tide = 0.0 |
---|
35 | #tide = 3.6 |
---|
36 | |
---|
37 | #Maybe will try to make project a class to allow these parameters to be passed in. |
---|
38 | alpha = 0.1 |
---|
39 | friction=0.01 |
---|
40 | finaltime=25000 |
---|
41 | starttime=3600 |
---|
42 | setup='final' |
---|
43 | source='shark_bay' |
---|
44 | |
---|
45 | if setup =='trial': |
---|
46 | print'trial' |
---|
47 | res_factor=10 |
---|
48 | time_thinning=48 |
---|
49 | yieldstep=240 |
---|
50 | if setup =='basic': |
---|
51 | print'basic' |
---|
52 | res_factor=4 |
---|
53 | time_thinning=12 |
---|
54 | yieldstep=120 |
---|
55 | if setup =='final': |
---|
56 | print'final' |
---|
57 | res_factor=1.1 |
---|
58 | time_thinning=4 |
---|
59 | yieldstep=60 |
---|
60 | |
---|
61 | |
---|
62 | #dir_comment='_'+setup+'_'+str(tide)+'_'+str(source)+'_'+str(user)+'_'+'nobdry' |
---|
63 | dir_comment='_'+setup+'_'+str(tide)+'_'+str(source)+'_'+str(user) |
---|
64 | |
---|
65 | # elevation data filenames |
---|
66 | ascii_grid_filenames = ['10m_dem_without_survey', '50m_dem_without_10m_dem'] |
---|
67 | point_filenames = ['field_survey_north', 'field_survey_south', |
---|
68 | 'clipped_bathymetry_final', 'coast_points_final'] |
---|
69 | |
---|
70 | # final topo name |
---|
71 | combined_name ='shark_bay_combined_elevation' |
---|
72 | combined_small_name = 'shark_bay_combined_elevation_small' |
---|
73 | |
---|
74 | |
---|
75 | anuga_dir = join(home, state, scenario, 'anuga') |
---|
76 | |
---|
77 | topographies_in_dir = join(home, state, scenario, |
---|
78 | 'elevation_final', 'test_area') |
---|
79 | |
---|
80 | topographies_dir = join(anuga_dir, 'topographies') # Output dir for ANUGA |
---|
81 | |
---|
82 | # Convert topo file locations into absolute pathnames |
---|
83 | for filename_list in [ascii_grid_filenames, point_filenames]: |
---|
84 | for i, filename in enumerate(filename_list): |
---|
85 | filename_list[i] = join(topographies_in_dir, filename) |
---|
86 | |
---|
87 | #final topo files |
---|
88 | combined_dir_name = join(topographies_dir, combined_name) |
---|
89 | combined_small_dir_name = join(topographies_dir, combined_small_name) |
---|
90 | |
---|
91 | |
---|
92 | mesh_dir = join(anuga_dir, 'meshes') |
---|
93 | mesh_name = join(mesh_dir, scenario_name) |
---|
94 | |
---|
95 | |
---|
96 | polygons_dir = join(anuga_dir, 'polygons') # Created with ArcGIS (csv files) |
---|
97 | tide_dir = join(anuga_dir, 'tide_data') |
---|
98 | |
---|
99 | |
---|
100 | # locations for boundary conditions |
---|
101 | if source=='shark_bay': |
---|
102 | boundary_file_name = 'shark_bay_3867_18052007' |
---|
103 | boundary_dir = join(anuga_dir, 'boundaries', 'shark_bay', '1_10000') |
---|
104 | |
---|
105 | boundary_name = join(boundary_dir, boundary_file_name) |
---|
106 | |
---|
107 | #output locations |
---|
108 | output_dir = join(anuga_dir, 'outputs')+sep |
---|
109 | output_build_time_dir = output_dir+build_time+sep |
---|
110 | output_run_time_dir = output_dir +run_time+dir_comment+sep |
---|
111 | output_run_time_dir_name = output_run_time_dir + scenario_name #Used by post processing |
---|
112 | |
---|
113 | #gauges |
---|
114 | #gauge_name = 'gauge_location_port_hedland.csv' |
---|
115 | #gauge_name_test = 'gauge_checking_test.csv' |
---|
116 | #gauges_dir = anuga_dir+'gauges'+sep |
---|
117 | #gauges_dir_name = gauges_dir + gauge_name |
---|
118 | #gauges_dir_name_test = gauges_dir + gauge_name_test |
---|
119 | |
---|
120 | #tide_dir = anuga_dir+'tide_data'+sep |
---|
121 | |
---|
122 | #buildings_filename = gauges_dir + 'pt_hedland_res.csv' |
---|
123 | #buildings_filename_out = 'pt_hedland_res_modified.csv' |
---|
124 | #buildings_filename_damage_out = 'pt_hedland_res_modified_damage.csv' |
---|
125 | #tidal_filename = tide_dir + 'pt_hedland_tide.txt' |
---|
126 | #community_filename = gauges_dir + 'CHINS_v2.csv' |
---|
127 | #community_scenario = gauges_dir + 'community_pt_hedland.csv' |
---|
128 | |
---|
129 | # clipping region to make DEM (pts file) from onshore data |
---|
130 | #eastingmin = 594000 |
---|
131 | #eastingmax = 715000 |
---|
132 | #northingmin = 7720000 |
---|
133 | #northingmax = 7880000 |
---|
134 | |
---|
135 | # for ferret2sww |
---|
136 | south = degminsec2decimal_degrees(-20,30,0) |
---|
137 | north = degminsec2decimal_degrees(-17,10,0) |
---|
138 | west = degminsec2decimal_degrees(117,00,0) |
---|
139 | east = degminsec2decimal_degrees(120,00,0) |
---|
140 | |
---|
141 | # region to export (used from export_results.py) |
---|
142 | #e_min_area = 648000 # Eastings min |
---|
143 | #e_max_area = 675000 |
---|
144 | #n_min_area = 7745000 |
---|
145 | #n_max_area = 7761000 |
---|
146 | |
---|
147 | #export_region = [[e_min_area,n_min_area],[e_min_area,n_max_area], |
---|
148 | # [e_max_area,n_max_area],[e_max_area,n_min_area]] |
---|
149 | |
---|
150 | |
---|
151 | # Bounding polygon |
---|
152 | bounding_polygon = read_polygon(join(polygons_dir, 'boundary_extent.csv')) |
---|
153 | res_bounding_polygon = 250000*res_factor |
---|
154 | |
---|
155 | #Interior regions |
---|
156 | poly_inundated_area = read_polygon(join(polygons_dir, 'inundated_area.csv')) |
---|
157 | res_inundated_area = 200*res_factor |
---|
158 | |
---|
159 | poly_tsunami_approach_area = read_polygon(join(polygons_dir, 'tsunami_approach_area.csv')) |
---|
160 | res_tsunami_approach_area = 500*res_factor |
---|
161 | |
---|
162 | poly_bay_area = read_polygon(join(polygons_dir, 'bay_area.csv')) |
---|
163 | res_bay_area = 1000*res_factor |
---|
164 | |
---|
165 | poly_map_area = read_polygon(join(polygons_dir, 'map_area.csv')) |
---|
166 | res_map_area = 5000*res_factor |
---|
167 | |
---|
168 | poly_model_area = read_polygon(join(polygons_dir, 'model_area.csv')) |
---|
169 | res_model_area = 200000*res_factor |
---|
170 | |
---|
171 | poly_exclusion_area1 = read_polygon(join(polygons_dir, 'exclusion_area1.csv')) |
---|
172 | res_exclusion_area1 = 2000000*res_factor |
---|
173 | |
---|
174 | interior_regions = [[poly_inundated_area, res_inundated_area], |
---|
175 | [poly_tsunami_approach_area, res_tsunami_approach_area], |
---|
176 | [poly_bay_area, res_bay_area], |
---|
177 | [poly_map_area, res_map_area], |
---|
178 | [poly_model_area, res_model_area], |
---|
179 | [poly_exclusion_area1, res_exclusion_area1]] |
---|
180 | |
---|
181 | |
---|
182 | trigs_min = number_mesh_triangles(interior_regions, |
---|
183 | bounding_polygon, |
---|
184 | res_bounding_polygon) |
---|
185 | |
---|
186 | |
---|
187 | onshore_polygon = read_polygon(join(topographies_in_dir, |
---|
188 | 'initial_condition.txt')) |
---|
189 | |
---|