"""Common filenames and locations for elevation, meshes and outputs. This script is the heart of all scripts in the folder """ #------------------------------------------------------------------------------ # Import necessary modules #------------------------------------------------------------------------------ from os import sep, environ, getenv, getcwd from os.path import expanduser import sys from time import localtime, strftime, gmtime from anuga.utilities.polygon import read_polygon, plot_polygons, is_inside_polygon, number_mesh_triangles from anuga.utilities.system_tools import get_user_name, get_host_name from anuga.shallow_water.data_manager import urs2sts,create_sts_boundary from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon #------------------------------------------------------------------------------ # Directory setup #------------------------------------------------------------------------------ # Note: INUNDATIONHOME is the inundation directory, not the data directory. home = getenv('INUNDATIONHOME') + sep +'data'+sep #Sandpit's parent diruser = get_user_name() muxhome = getenv('MUXHOME') user = get_user_name() host = get_host_name() # determines time for setting up output directories time = strftime('%Y%m%d_%H%M%S',localtime()) gtime = strftime('%Y%m%d_%H%M%S',gmtime()) build_time = time+'_build' run_time = time+'_run' #------------------------------------------------------------------------------ # Initial Conditions #------------------------------------------------------------------------------ # this section needs to be updated to reflect the modelled community. # Note, the user needs to set up the directory system accordingly state = 'australia_ph2' scenario_name = 'dampier_ph2' scenario = 'dampier' area = 'large_b10' #'small_b10' # Model specific parameters. One or all can be changed each time the # run_scenario script is executed tide = 0 #0.6 #event_number = 27283 alpha = 0.1 # smoothing parameter for mesh friction=0.01 # manning's friction coefficient starttime=0 finaltime=80000 # final time for simulation setup='final' # Final can be replaced with trial or basic. # Either will result in a coarser mesh that will allow a # faster, but less accurate, simulation. if setup =='trial': print'trial' res_factor=10 time_thinning=48 yieldstep=240 if setup =='basic': print'basic' res_factor=4 time_thinning=12 yieldstep=120 if setup =='final': print'final' res_factor=1 time_thinning=4 yieldstep=60 #------------------------------------------------------------------------------ # Output Filename #------------------------------------------------------------------------------ # Important to distinguish each run - ensure str(user) is included! # Note, the user is free to include as many parameters as desired dir_comment='_'+setup+'_'+str(tide)+'_250m_' + str(area)+'_'+str(user) #------------------------------------------------------------------------------ # Input Data #------------------------------------------------------------------------------ # elevation data used in build_perth.py # onshore data: format ascii grid with accompanying projection file onshore_name = 'grid_250m_2005' # gauges - used in get_timeseries.py gauge_name = 'gauges.csv' # BOUNDING POLYGON - used in build_boundary.py and run_perth.py respectively # NOTE: when files are put together the points must be in sequence - for ease go clockwise! # Check the run_perth.py for boundary_tags # thinned ordering file from Hazard Map: format is index,latitude,longitude (with title) #order_filename = 'thinned_boundary_ordering.txt' #landward bounding points #landward = 'landward_bounding_polygon.txt' #------------------------------------------------------------------------------ # Output Elevation Data #------------------------------------------------------------------------------ # Output filename for elevation # this is a combination of all the data (utilisied in build_boundary) combined_name = scenario_name + '_combined_elevation_250m_' + area #------------------------------------------------------------------------------ # Directory Structure #------------------------------------------------------------------------------ anuga_dir = home+state+sep+scenario+sep+'anuga'+sep topographies_in_dir = home+state+sep+scenario+sep+'elevation_final'+sep+'points'+sep topographies_dir = anuga_dir+'topographies'+sep polygons_dir = anuga_dir+'polygons'+sep tide_dir = anuga_dir+'tide_data'+sep boundaries_dir = anuga_dir+'boundaries'+ sep output_dir = anuga_dir+'outputs'+sep gauges_dir = anuga_dir+'gauges'+sep meshes_dir = anuga_dir+'meshes'+sep #------------------------------------------------------------------------------ # Location of input and output data #------------------------------------------------------------------------------ # where the input data sits onshore_in_dir_name = topographies_in_dir + onshore_name # where the output data sits onshore_dir_name = topographies_dir + onshore_name # where the combined elevation file sits combined_dir_name = topographies_dir + combined_name # where the mesh sits (this is created during the run_perth.py) meshes_dir_name = meshes_dir + scenario_name+ '_' + area + '.msh' # where the boundary ordering files sit (this is used within build_boundary.py) #order_filename_dir = boundaries_dir + order_filename # where the landward points of boundary extent sit (this is used within run_perth.py) #landward_dir = boundaries_dir + landward # where the event sts files sits (this is created during the build_boundary.py) ##boundaries_dir_event = boundaries_dir + str(event_number) + sep ##boundaries_dir_mux = muxhome # where the directory of the output filename sits output_build_time_dir = output_dir+build_time+dir_comment+sep #used for build_perth.py output_run_time_dir = output_dir+run_time+dir_comment+sep #used for run_perth.py output_run_time_dir_name = output_run_time_dir + scenario_name #Used by post processing # where the directory of the gauges sit gauges_dir_name = gauges_dir + gauge_name #used for get_timeseries.py ##gauges_dir_name2 = gauges_dir + gauge_name2 #used for get_timeseries.py #------------------------------------------------------------------------------ # Interior region definitions #------------------------------------------------------------------------------ # Initial bounding polygon for data clipping poly_all = read_polygon(polygons_dir+'poly_all_' +area + '.csv') res_poly_all = 125000*res_factor interior_regions = [] trigs_min = number_mesh_triangles(interior_regions, poly_all, res_poly_all) print 'min estimated number of triangles', trigs_min