source: anuga_work/production/shark_bay_2007/project.py @ 4689

Last change on this file since 4689 was 4689, checked in by ole, 17 years ago

Steep point back log

File size: 8.4 KB
RevLine 
[4505]1"""Common filenames and locations for topographic data, meshes and outputs.
2Also includes origin for slump scenario.
[4552]3
4All filenames are given without extension
[4505]5"""
6
[4552]7from os import sep, environ, getenv, getcwd, umask
8from os.path import expanduser, basename, join
[4505]9from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon, number_mesh_triangles
10import sys
11from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees
12from time import localtime, strftime, gmtime
13from anuga.utilities.system_tools import get_user_name, get_host_name
14
[4552]15codename = 'project.py' # FIXME can be obtained automatically as __file__
[4505]16
[4552]17home = join(getenv('INUNDATIONHOME'), 'data') # Location of Data   
[4505]18user = get_user_name()
19host = get_host_name()
20#needed when running using mpirun, mpirun doesn't inherit umask from .bashrc
21umask(002)
22
23#Making assumptions about the location of scenario data
24state = 'western_australia'
25scenario_name = 'shark_bay'
[4552]26scenario = 'shark_bay_tsunami_scenario'
[4505]27
28#time stuff
29time = strftime('%Y%m%d_%H%M%S',gmtime()) #gets time for new dir
30build_time = time+'_build'
31run_time = time+'_run'
32
[4616]33#tide = 0.0 # MSL
[4625]34tide = 0.79 # HAT average between Denham and Canarvon
35#tide = -0.80 # Estimated LAT
[4505]36
[4625]37# For frequency response study
38amplitude = 0.5
[4689]39period = 3000
[4625]40
41#momentum_scale=50 # Experiment
42momentum_scale=1 # Experiment                       
43
[4505]44#Maybe will try to make project a class to allow these parameters to be passed in.
45alpha = 0.1
46friction=0.01
[4616]47finaltime=40000
[4689]48starttime=8000 # Action starts around 9000
[4577]49setup='final'
[4601]50#setup='trial'
[4552]51source='shark_bay'
[4505]52
[4625]53boundary_event = 'july2006'
54#boundary_event = '10000'
55#boundary_event = 'experimental'
[4601]56
[4505]57if setup =='trial':
58    print'trial'
59    res_factor=10
60    time_thinning=48
61    yieldstep=240
62if setup =='basic': 
63    print'basic'
64    res_factor=4
65    time_thinning=12
66    yieldstep=120
67if setup =='final': 
68    print'final'
[4616]69    res_factor=1.0
[4689]70    time_thinning=1
71    yieldstep=10
[4505]72
73
[4616]74
[4625]75#dir_comment='_'+setup+'_'+str(tide)+'_'+str(source)+'_'+\
[4689]76#             str(user)+'_'+boundary_event+'_X'+str(momentum_scale)+'_fixedbathy'
[4625]77
[4601]78dir_comment='_'+setup+'_'+str(tide)+'_'+str(source)+'_'+\
[4689]79             str(user)+'_'+boundary_event
[4505]80
[4689]81
82if boundary_event == 'experimental':
83    dir_comment += '_A'+str(amplitude)+'_T'+str(period)
84   
85
[4552]86# elevation data filenames
[4625]87ascii_grid_filenames = ['10m_dem_without_survey', '50m_dem_without_10m_dem',
88                        'bathysteeppt', 'bathyleft', 'bathyright']
[4552]89point_filenames = ['field_survey_north', 'field_survey_south',
90                   'clipped_bathymetry_final', 'coast_points_final']
[4505]91
[4570]92# final topo name
[4552]93combined_name ='shark_bay_combined_elevation'
94combined_small_name = 'shark_bay_combined_elevation_small'
[4505]95
96
[4552]97anuga_dir = join(home, state, scenario, 'anuga')
[4505]98
[4570]99topographies_in_dir = join(home, state, scenario,
100                           'elevation_final', 'test_area')
101
[4552]102topographies_dir = join(anuga_dir, 'topographies') # Output dir for ANUGA
[4505]103
[4552]104# Convert topo file locations into absolute pathnames
105for filename_list in [ascii_grid_filenames, point_filenames]:
106    for i, filename in enumerate(filename_list):
107        filename_list[i] = join(topographies_in_dir, filename)
[4505]108
109#final topo files
[4552]110combined_dir_name = join(topographies_dir, combined_name)
111combined_small_dir_name = join(topographies_dir, combined_small_name)
[4505]112
113
[4570]114mesh_dir = join(anuga_dir, 'meshes')
115mesh_name = join(mesh_dir, scenario_name)
[4505]116
117
[4552]118polygons_dir = join(anuga_dir, 'polygons') # Created with ArcGIS (csv files)
119tide_dir = join(anuga_dir, 'tide_data')
[4505]120
[4552]121
122# locations for boundary conditions
123if source=='shark_bay':
[4601]124    if boundary_event == '10000':
125        boundary_file_name = 'shark_bay_3867_18052007'
126        boundary_dir = join(anuga_dir, 'boundaries', 'shark_bay', '1_10000')
127    elif boundary_event == 'july2006':
128        boundary_file_name = 'july2006'   
129        boundary_dir = join(anuga_dir, 'boundaries',
130                            'shark_bay', 'july_2006_event')
131    else:
[4625]132        pass
133        #raise Exception, 'Unknown boundary event specified'
134       
135if boundary_event != 'experimental':
136    boundary_name = join(boundary_dir, boundary_file_name)
137else:
138    boundary_name = 'nil'
[4552]139
[4505]140#output locations
[4570]141output_dir = join(anuga_dir, 'outputs')+sep
[4505]142output_build_time_dir = output_dir+build_time+sep
143output_run_time_dir = output_dir +run_time+dir_comment+sep
144output_run_time_dir_name = output_run_time_dir + scenario_name  #Used by post processing
145
146#gauges
[4624]147gauge_name = 'gaugesv2.csv' 
[4552]148#gauge_name_test = 'gauge_checking_test.csv'
[4624]149gauges_dir = anuga_dir+sep+'gauges'+sep
150gauges_dir_name = gauges_dir + gauge_name
[4552]151#gauges_dir_name_test = gauges_dir + gauge_name_test
[4505]152
[4552]153#tide_dir = anuga_dir+'tide_data'+sep
[4505]154
[4552]155#buildings_filename = gauges_dir + 'pt_hedland_res.csv'
156#buildings_filename_out = 'pt_hedland_res_modified.csv'
157#buildings_filename_damage_out = 'pt_hedland_res_modified_damage.csv'
158#tidal_filename = tide_dir + 'pt_hedland_tide.txt'
159#community_filename = gauges_dir + 'CHINS_v2.csv'
160#community_scenario = gauges_dir + 'community_pt_hedland.csv'
[4505]161
162# clipping region to make DEM (pts file) from onshore data
[4552]163#eastingmin = 594000
164#eastingmax = 715000
165#northingmin = 7720000
166#northingmax = 7880000
[4505]167
168# for ferret2sww
169south = degminsec2decimal_degrees(-20,30,0)
170north = degminsec2decimal_degrees(-17,10,0)
171west = degminsec2decimal_degrees(117,00,0)
172east = degminsec2decimal_degrees(120,00,0)
173
174# region to export (used from export_results.py)
[4624]175e_min_area = 713500   # Eastings min
176e_max_area = 726350
177n_min_area = 7100890
178n_max_area = 7111250
[4505]179
[4552]180#export_region = [[e_min_area,n_min_area],[e_min_area,n_max_area],
181#                 [e_max_area,n_max_area],[e_max_area,n_min_area]]
[4505]182                 
[4616]183#FIXME (Ole): It is bad to read in polygons in project.py. If a file does not exist
184#project.py cannot be imported
185
[4577]186# Bounding polygon
[4601]187bounding_polygon = read_polygon(join(polygons_dir, 'boundary_extent_small.csv'))
[4616]188res_bounding_polygon = 500000*res_factor
[4505]189
[4625]190# This is a restricted boundary used for experiments.
191# Cannot use URS boundary on this!
192small_bounding_polygon = read_polygon(join(polygons_dir, 'small_boundary.csv'))
193res_bounding_polygon = 500000*res_factor
194
195
196# Interior regions
[4616]197interior_regions_and_resolutions = {
198    'study_area': 300000,
199    'bernier_dorre_islands': 250000,
200    'map_area': 10000,
201    'bay_area': 4000,
202    'south_bank_buffer': 2000,
203    'south_coast_polygon_1': 1000,
204    'south_coast_polygon_2': 1000,
205    'north_bay_coastline': 1000,
206    'tsunami_approach_area': 50,
207    'inundated_area': 10,
208    'exclusion_1': 10000000,
209    'exclusion_2': 10000000,
210    'exclusion_3': 10000000}
[4505]211
[4577]212
[4616]213#poly_exclusion_area1 = read_polygon(join(polygons_dir, 'exclusion_area1.csv'))
214#res_exclusion_area1 = 2000000*res_factor
215#
216#interior_regions = [[poly_inundated_area, res_inundated_area],
217#                    [poly_tsunami_approach_area, res_tsunami_approach_area],
218#                    [poly_bay_area, res_bay_area],                   
219#                    [poly_map_area, res_map_area]]
[4601]220                    #[poly_model_area, res_model_area],
221                    #[poly_exclusion_area1, res_exclusion_area1]]                                       
[4577]222
[4601]223# FIXME (Ole): What if tags are wrong?
[4577]224
[4601]225#Large bounding poly
226#boundary_tags = {'back': [1, 2, 3, 4, 5],   
227#                 'side': [0, 6],
228#                 'ocean': [7, 8, 9, 10, 11]}
229
230
231#Small bounding poly
232#(counter clockwise starting in upper right corner)
233boundary_tags = {'tide': [0, 1, 2],
234                 'ocean': [3, 4, 5, 6]}
235
[4625]236#Very small bounding poly (experimental boundary)
237boundary_tags_small = {'tide': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13],
238                       'ocean': [11]}
[4601]239
[4616]240
[4625]241
[4616]242# Read filenames etc (FIXME: move this to new file: read_structures)
243
244interior_regions = [] # Consider using dictionary for mesh interface.
245for area_name in interior_regions_and_resolutions.keys():
246    file_name = join(polygons_dir, area_name + '.csv')
247   
248    print 'Reading polygon from', file_name
249    polygon = read_polygon(file_name)
250    base_resolution = interior_regions_and_resolutions[area_name]
251   
252    interior_regions.append([polygon, base_resolution*res_factor])
253
254
255
[4625]256
[4570]257trigs_min = number_mesh_triangles(interior_regions,
258                                  bounding_polygon,
259                                  res_bounding_polygon)
[4505]260
261
[4625]262
263# For use with resetting IC (This is temporary)
264poly_tsunami_approach_area = read_polygon(
265    join(polygons_dir, 'tsunami_approach_area.csv'))
266
267
268
[4570]269onshore_polygon = read_polygon(join(topographies_in_dir,
270                                    'initial_condition.txt'))
[4505]271
Note: See TracBrowser for help on using the repository browser.