source: anuga_work/production/shark_bay_2007/project.py @ 5578

Last change on this file since 5578 was 4885, checked in by ole, 17 years ago

Work on Shark Bay embayment

File size: 8.4 KB
Line 
1"""Common filenames and locations for topographic data, meshes and outputs.
2Also includes origin for slump scenario.
3
4All filenames are given without extension
5"""
6
7from os import sep, environ, getenv, getcwd, umask
8from os.path import expanduser, basename, join
9from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon, number_mesh_triangles
10import sys
11from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees
12from time import localtime, strftime, gmtime
13from anuga.utilities.system_tools import get_user_name, get_host_name
14
15#codename = 'project.py' # FIXME can be obtained automatically as __file__
16
17home = join(getenv('INUNDATIONHOME'), 'data') # Location of Data   
18user = get_user_name()
19host = get_host_name()
20
21#needed when running using mpirun, mpirun doesn't inherit umask from .bashrc
22umask(002)
23
24#Making assumptions about the location of scenario data
25state = 'western_australia'
26scenario_name = 'shark_bay'
27scenario = 'shark_bay_tsunami_scenario'
28
29#time stuff
30time = strftime('%Y%m%d_%H%M%S',gmtime()) #gets time for new dir
31build_time = time+'_build'
32run_time = time+'_run'
33
34#tide = 0.0 # MSL
35tide = 0.79 # HAT average between Denham and Canarvon
36#tide = -0.80 # Estimated LAT
37
38# For frequency response study
39amplitude = 0.5
40period = 1800
41
42#momentum_scale=50 # Experiment
43momentum_scale=1 # Experiment                       
44
45#Maybe will try to make project a class to allow these parameters to be passed in.
46alpha = 0.1
47friction = 0.01
48finaltime = 40000
49starttime = 8000 # Action starts around 9000
50setup='final'
51#setup='trial'
52source='shark_bay'
53
54#boundary_event = 'july2006'
55#boundary_event = '10000'
56boundary_event = 'experimental'
57
58if setup =='trial':
59    print'trial'
60    res_factor=10
61    time_thinning=48
62    yieldstep=240
63if setup =='basic': 
64    print'basic'
65    res_factor=4
66    time_thinning=12
67    yieldstep=120
68if setup =='final': 
69    print'final'
70    res_factor=1.0
71    time_thinning=1
72    yieldstep=10
73
74
75
76#dir_comment='_'+setup+'_'+str(tide)+'_'+str(source)+'_'+\
77#             str(user)+'_'+boundary_event+'_X'+str(momentum_scale)+'_fixedbathy'
78
79dir_comment='_'+setup+'_'+str(tide)+'_'+str(source)+'_'+\
80             str(user)+'_'+boundary_event
81
82
83if boundary_event == 'experimental':
84    dir_comment += '_A'+str(amplitude)+'_T'+str(period)
85   
86
87# elevation data filenames
88ascii_grid_filenames = ['10m_dem_without_survey', '50m_dem_without_10m_dem',
89                        'bathysteeppt', 'bathyleft', 'bathyright']
90point_filenames = ['field_survey_north', 'field_survey_south',
91                   'clipped_bathymetry_final', 'coast_points_final']
92
93# final topo name
94combined_name ='shark_bay_combined_elevation'
95combined_small_name = 'shark_bay_combined_elevation_small'
96
97
98anuga_dir = join(home, state, scenario, 'anuga')
99
100topographies_in_dir = join(home, state, scenario,
101                           'elevation_final', 'test_area')
102
103topographies_dir = join(anuga_dir, 'topographies') # Output dir for ANUGA
104
105# Convert topo file locations into absolute pathnames
106for filename_list in [ascii_grid_filenames, point_filenames]:
107    for i, filename in enumerate(filename_list):
108        filename_list[i] = join(topographies_in_dir, filename)
109
110#final topo files
111combined_dir_name = join(topographies_dir, combined_name)
112combined_small_dir_name = join(topographies_dir, combined_small_name)
113
114
115mesh_dir = join(anuga_dir, 'meshes')
116mesh_name = join(mesh_dir, scenario_name)
117
118
119polygons_dir = join(anuga_dir, 'polygons') # Created with ArcGIS (csv files)
120tide_dir = join(anuga_dir, 'tide_data')
121
122
123# locations for boundary conditions
124if source=='shark_bay':
125    if boundary_event == '10000':
126        boundary_file_name = 'shark_bay_3867_18052007'
127        boundary_dir = join(anuga_dir, 'boundaries', 'shark_bay', '1_10000')
128    elif boundary_event == 'july2006':
129        boundary_file_name = 'july2006'   
130        boundary_dir = join(anuga_dir, 'boundaries',
131                            'shark_bay', 'july_2006_event')
132    else:
133        pass
134        #raise Exception, 'Unknown boundary event specified'
135       
136if boundary_event != 'experimental':
137    boundary_name = join(boundary_dir, boundary_file_name)
138else:
139    boundary_name = 'nil'
140
141#output locations
142output_dir = join(anuga_dir, 'outputs')+sep
143output_build_time_dir = output_dir+build_time+sep
144output_run_time_dir = output_dir +run_time+dir_comment+sep
145output_run_time_dir_name = output_run_time_dir + scenario_name  #Used by post processing
146
147#gauges
148gauge_name = 'gaugesv3.csv' 
149#gauge_name_test = 'gauge_checking_test.csv'
150gauges_dir = anuga_dir+sep+'gauges'+sep
151gauges_dir_name = gauges_dir + gauge_name
152#gauges_dir_name_test = gauges_dir + gauge_name_test
153
154#tide_dir = anuga_dir+'tide_data'+sep
155
156#buildings_filename = gauges_dir + 'pt_hedland_res.csv'
157#buildings_filename_out = 'pt_hedland_res_modified.csv'
158#buildings_filename_damage_out = 'pt_hedland_res_modified_damage.csv'
159#tidal_filename = tide_dir + 'pt_hedland_tide.txt'
160#community_filename = gauges_dir + 'CHINS_v2.csv'
161#community_scenario = gauges_dir + 'community_pt_hedland.csv'
162
163# clipping region to make DEM (pts file) from onshore data
164#eastingmin = 594000
165#eastingmax = 715000
166#northingmin = 7720000
167#northingmax = 7880000
168
169# for ferret2sww
170south = degminsec2decimal_degrees(-20,30,0)
171north = degminsec2decimal_degrees(-17,10,0)
172west = degminsec2decimal_degrees(117,00,0)
173east = degminsec2decimal_degrees(120,00,0)
174
175# region to export (used from export_results.py)
176e_min_area = 713500   # Eastings min
177e_max_area = 726350
178n_min_area = 7100890
179n_max_area = 7111250
180
181#export_region = [[e_min_area,n_min_area],[e_min_area,n_max_area],
182#                 [e_max_area,n_max_area],[e_max_area,n_min_area]]
183                 
184#FIXME (Ole): It is bad to read in polygons in project.py. If a file does not exist
185#project.py cannot be imported
186
187# Bounding polygon
188bounding_polygon = read_polygon(join(polygons_dir, 'boundary_extent_small.csv'))
189res_bounding_polygon = 500000*res_factor
190
191# This is a restricted boundary used for experiments.
192# Cannot use URS boundary on this!
193small_bounding_polygon = read_polygon(join(polygons_dir, 'small_boundary.csv'))
194res_bounding_polygon = 500000*res_factor
195
196
197# Interior regions
198interior_regions_and_resolutions = {
199    'study_area': 300000,
200    'bernier_dorre_islands': 250000,
201    'map_area': 10000,
202    'bay_area': 4000,
203    'south_bank_buffer': 2000,
204    'south_coast_polygon_1': 1000,
205    'south_coast_polygon_2': 1000,
206    'north_bay_coastline': 1000,
207    'tsunami_approach_area': 50,
208    'inundated_area': 10,
209    'exclusion_1': 10000000,
210    'exclusion_2': 10000000,
211    'exclusion_3': 10000000}
212
213
214#poly_exclusion_area1 = read_polygon(join(polygons_dir, 'exclusion_area1.csv'))
215#res_exclusion_area1 = 2000000*res_factor
216#
217#interior_regions = [[poly_inundated_area, res_inundated_area],
218#                    [poly_tsunami_approach_area, res_tsunami_approach_area],
219#                    [poly_bay_area, res_bay_area],                   
220#                    [poly_map_area, res_map_area]]
221                    #[poly_model_area, res_model_area],
222                    #[poly_exclusion_area1, res_exclusion_area1]]                                       
223
224# FIXME (Ole): What if tags are wrong?
225
226#Large bounding poly
227#boundary_tags = {'back': [1, 2, 3, 4, 5],   
228#                 'side': [0, 6],
229#                 'ocean': [7, 8, 9, 10, 11]}
230
231
232#Small bounding poly
233#(counter clockwise starting in upper right corner)
234boundary_tags = {'tide': [0, 1, 2],
235                 'ocean': [3, 4, 5, 6]}
236
237#Very small bounding poly (experimental boundary)
238boundary_tags_small = {'tide': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13],
239                       'ocean': [11]}
240
241
242
243# Read filenames etc (FIXME: move this to new file: read_structures)
244
245interior_regions = [] # Consider using dictionary for mesh interface.
246for area_name in interior_regions_and_resolutions.keys():
247    file_name = join(polygons_dir, area_name + '.csv')
248   
249    print 'Reading polygon from', file_name
250    polygon = read_polygon(file_name)
251    base_resolution = interior_regions_and_resolutions[area_name]
252   
253    interior_regions.append([polygon, base_resolution*res_factor])
254
255
256
257
258trigs_min = number_mesh_triangles(interior_regions,
259                                  bounding_polygon,
260                                  res_bounding_polygon)
261
262
263
264# For use with resetting IC (This is temporary)
265poly_tsunami_approach_area = read_polygon(
266    join(polygons_dir, 'tsunami_approach_area.csv'))
267
268
269
270onshore_polygon = read_polygon(join(topographies_in_dir,
271                                    'initial_condition.txt'))
272
Note: See TracBrowser for help on using the repository browser.