Changeset 5759


Ignore:
Timestamp:
Sep 12, 2008, 11:33:39 AM (16 years ago)
Author:
kristy
Message:

Cleaned up all of Perth Files

Location:
anuga_work/production/perth
Files:
2 added
4 deleted
5 edited

Legend:

Unmodified
Added
Removed
  • anuga_work/production/perth/build_perth.py

    r5753 r5759  
    77The scenario is defined by a triangular mesh created from project.polygon,
    88the elevation data and a simulated submarine landslide.
     9
     10The run_perth.py is reliant on the output of this script.
    911
    1012Ole Nielsen and Duncan Gray, GA - 2005 and Jane Sexton, Nick Bartzis, GA - 2006
     
    5456print"project.combined_dir_name",project.combined_dir_name
    5557
    56 # topography directory filenames
     58# input elevation directory filenames
    5759onshore_in_dir_name = project.onshore_in_dir_name
    5860coast_in_dir_name = project.coast_in_dir_name
     
    6668offshore_in_dir_name3 = project.offshore_in_dir_name3
    6769
    68 
     70# output elevation directory filenames
    6971onshore_dir_name = project.onshore_dir_name
    7072coast_dir_name = project.coast_dir_name
     
    8890#creates pts file for onshore DEM
    8991print "creates pts file for onshore DEM"
    90 dem2pts(onshore_dir_name,
    91 #        easting_min=project.eastingmin,
    92 #        easting_max=project.eastingmax,
    93 #        northing_min=project.northingmin,
    94 #        northing_max= project.northingmax,
    95         use_cache=True,
    96         verbose=True)
     92dem2pts(onshore_dir_name ,use_cache=True,verbose=True)
    9793
    9894#creates pts file for island DEM
     
    10298dem2pts(island_dir_name3, use_cache=True, verbose=True)
    10399
     100#-------------------------------------------------------------------------------
     101# Combine datasets into project.combined_dir_name
     102#-------------------------------------------------------------------------------
     103
    104104print'create Geospatial data1 objects from topographies'
    105105G1 = Geospatial_data(file_name = onshore_dir_name + '.pts')
    106106print'create Geospatial data2 objects from topographies'
    107 G2 = Geospatial_data(file_name = coast_in_dir_name + '.txt')
     107G2 = Geospatial_data(file_name = island_dir_name + '.pts')
    108108print'create Geospatial data3 objects from topographies'
    109 G3 = Geospatial_data(file_name = island_dir_name + '.pts')
     109G3 = Geospatial_data(file_name = island_dir_name1 + '.pts')
    110110print'create Geospatial data4 objects from topographies'
    111 G4 = Geospatial_data(file_name = island_dir_name1 + '.pts')
     111G4 = Geospatial_data(file_name = island_dir_name2 + '.pts')
    112112print'create Geospatial data5 objects from topographies'
    113 G5 = Geospatial_data(file_name = island_dir_name2 + '.pts')
     113G5 = Geospatial_data(file_name = island_dir_name3 + '.pts')
    114114print'create Geospatial data6 objects from topographies'
    115 G6 = Geospatial_data(file_name = island_dir_name3 + '.pts')
     115G_coast = Geospatial_data(file_name = coast_in_dir_name)
    116116print'create Geospatial data7 objects from topographies'
    117 G_off = Geospatial_data(file_name = offshore_in_dir_name + '.txt')
     117G_off = Geospatial_data(file_name = offshore_in_dir_name)
    118118print'create Geospatial data8 objects from topographies'
    119 G_off1 = Geospatial_data(file_name = offshore_in_dir_name1 + '.txt')
     119G_off1 = Geospatial_data(file_name = offshore_in_dir_name1)
    120120print'create Geospatial data9 objects from topographies'
    121 G_off2 = Geospatial_data(file_name = offshore_in_dir_name2 + '.txt')
     121G_off2 = Geospatial_data(file_name = offshore_in_dir_name2)
    122122print'create Geospatial data10 objects from topographies'
    123 G_off3 = Geospatial_data(file_name = offshore_in_dir_name3 + '.txt')
     123G_off3 = Geospatial_data(file_name = offshore_in_dir_name3)
    124124
    125125
    126126print'add all geospatial objects'
    127 G = G1 + G2 + G3 + G4 + G5 + G6 + G_off + G_off1 + G_off2 + G_off3
     127G = G1 + G2 + G3 + G4 + G5 + G_coast + G_off + G_off1 + G_off2 + G_off3
    128128
    129129print'clip combined geospatial object by bounding polygon'
     
    134134    mkdir (project.topographies_dir)
    135135G_clipped.export_points_file(project.combined_dir_name + '.pts')
    136 #G_clipped.export_points_file(project.combined_dir_name + '.xya') - use for comparision in ARC
     136#G_clipped.export_points_file(project.combined_dir_name + '.txt') #Use for comparision in ARC
    137137
    138138import sys
  • anuga_work/production/perth/export_results_all.py

    r5669 r5759  
     1"""
     2Generates ascii grids of nominated areas - need sww file created in run_perth.py
     3If producing a grid for the enitre extent cellsize should be greater than 30m
     4If producing grids for inundation area resolution should be greater than mesh (ie ~22m)
     5"""
     6
    17import project, os
    28import sys
     
    915#time_dir = '20080530_170833_run_final_0.6_exmouth_kvanputt'
    1016#time_dir = '20080815_103442_run_final_0.0_polyline_alpha0.1_kvanputt'
    11 time_dir = '20080815_103336_run_final_0.6_polyline_alpha0.1_kvanputt'
     17time_dir = '20080909_151438_run_final_0.0_polyline_alpha0.1_kvanputt'
    1218
    13 is_parallel = False
    14 #is_parallel = True
    15 if is_parallel == True: nodes = 4
     19#cellsize = 20
     20cellsize = 30
     21#timestep = 0
     22#area = ['Geordie', 'Sorrento', 'Fremantle', 'Rockingham']
     23area = ['All']
     24#var = [1,2] # Absolute momentum and depth
     25#var = [2] # depth
     26var = [0,4] #stage and elevation
    1627
    1728
    18 cellsize = 15
    19 #cellsize = 150
    20 #timestep = 0
    2129directory = project.output_dir
    2230name1 = directory+time_dir+sep+project.scenario_name
    23 name2 = directory+time_dir+sep+'perth_time_39900'+sep+project.scenario_name+'_time_39900_0'
    24 #name2 = directory+time_dir+sep+project.scenario_name+'_time_39900_0'
     31name2 = directory+time_dir+sep+'sww2'+sep+project.scenario_name+'_time_39900_0'
     32
    2533names = [name1, name2]
    2634for name in names:
    2735
    28     area = ['Geordie', 'Sorrento', 'Fremantle', 'Rockingham']
    29 
    3036    for which_area in area:
     37        if which_area == 'All':
     38       
    3139        if which_area == 'Geordie':
    3240            easting_min = project.xminGeordie
     
    5361            northing_max = project.ymaxRockingham
    5462
    55         var = [2] # momentum and depth
    56         #var = [2] # depth
    57         #var = [0,4]
    58 
     63         
    5964        for which_var in var:
    6065            if which_var == 0:  # Stage
     
    7984                quantityname = 'elevation'  #Elevation
    8085
    81             if is_parallel == True:
    82             #    print 'is_parallel',is_parallel
    83                 for i in range(0,nodes):
    84                     namei = name + '_P%d_%d' %(i,nodes)
    85                     outnamei = outname + '_P%d_%d' %(i,nodes)
    86                     print 'start sww2dem for sww file %d' %(i)
    87                     sww2dem(namei, basename_out = outnamei,
    88                                 quantity = quantityname,
    89                                 timestep = timestep,
    90                                 cellsize = cellsize,     
    91                                 easting_min = project_grad.e_min_area,
    92                                 easting_max = project_grad.e_max_area,
    93                                 northing_min = project_grad.n_min_area,
    94                                 northing_max = project_grad.n_max_area,       
    95                                 reduction = max,
    96                                 verbose = True,
    97                                 format = 'asc')
     86            if which_area == 'All'
     87                print 'start sww2dem',which_area
     88                sww2dem(name, basename_out = outname,
     89                            quantity = quantityname,
     90                            #timestep = timestep,
     91                            cellsize = cellsize,     
     92                            reduction = max,
     93                            verbose = True,
     94                            format = 'asc')
     95
    9896            else:
    9997                print 'start sww2dem',which_area, easting_min
  • anuga_work/production/perth/get_timeseries.py

    r5702 r5759  
    11"""
    22Generate time series of nominated "gauges" read from project.gauge_filename. This
    3 is done by first running sww2csv_gauges on two different directories to make
    4 'csv' files. Then running csv2timeseries_graphs detailing the two directories
    5 containing the csv file and produces one set of graphs in the 'output_dir' containing
     3is done by running sww2csv_gauges to make 'csv' files in the 'output_dir' containing
    64the details at the gauges for both these sww files.
    7 
    8 Note, this script will only work if pylab is installed on the platform
    9 
     5Can run different sww files at the same time
     6make sure if there is a second sww file that it is placed into a folder called sww2
     7Can run different gauges at the same time - ie testing boundary index point
    108"""
    119
    1210from os import sep
    1311from anuga.abstract_2d_finite_volumes.util import sww2csv_gauges,csv2timeseries_graphs
    14 
    1512import project
    1613
     14timestamp1='20080815_103442_run_final_0.0_polyline_alpha0.1_kvanputt'
     15timestamp2='20080815_103336_run_final_0.6_polyline_alpha0.1_kvanputt'
    1716
    18 #timestamp='20080724_121200_run_trial_0.6_polyline_alpha0.1_kvanputt'
    19 #timestamp='20080724_161830_run_final_0.6_polyline_alpha0.1_kvanputt'
    20 timestamp='20080815_103442_run_final_0.0_polyline_alpha0.1_kvanputt'
    21 #timestamp='20080815_103336_run_final_0.6_polyline_alpha0.1_kvanputt'
     17timestamps = [timestamp1, timestamp2]
     18for timestamp in timestamps:
    2219
    23 filename1=project.output_dir+timestamp+sep+project.scenario_name+'.sww'
    24 filename2=project.output_dir+timestamp+sep+'perth_time_39900'+sep+project.scenario_name+'_time_39900_0.sww'
     20    filename1=project.output_dir+timestamp+sep+project.scenario_name+'.sww'
     21    filename2=project.output_dir+timestamp+sep+'sww2'+sep+project.scenario_name+'_time_39900_0.sww'
    2522
     23    filenames = [filename1, filename2]
     24    for filename in filenames:
    2625
    27 
    28 filenames = [filename1, filename2]
    29 for filename in filenames:
    30 
    31     sww2csv_gauges(filename,
    32                     project.gauges_dir_name,
    33                     #project.gauges_dir_name2,
    34                     quantities = ['stage','speed','depth','elevation'],
    35                     verbose=True)
    36                
     26        gauges = [project.gauges_dir_name, project.gauges_dir_name2]
     27        for gauge in gauges:
     28           
     29            sww2csv_gauges(filename,gauge,
     30                            quantities = ['stage','speed','depth','elevation'],
     31                            verbose=True)
     32                       
    3733
    3834   
  • anuga_work/production/perth/project.py

    r5754 r5759  
    1 # -*- coding: cp1252 -*-
    21"""Common filenames and locations for topographic data, meshes and outputs.
     2Simplified and Commented by Kristy Van Putten
    33"""
     4#------------------------------------------------------------------------------
     5# Import necessary modules
     6#------------------------------------------------------------------------------
    47
    58from os import sep, environ, getenv, getcwd
     
    811from time import localtime, strftime, gmtime
    912from anuga.utilities.polygon import read_polygon, plot_polygons, is_inside_polygon, number_mesh_triangles
    10 #from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees, convert_points_from_latlon_to_utm
    1113from anuga.utilities.system_tools import get_user_name, get_host_name
    1214from anuga.shallow_water.data_manager import urs2sts,create_sts_boundary
    1315from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon
    1416
    15 # file and system info
    16 #---------------------------------
    17 #codename = 'project.py'
     17#------------------------------------------------------------------------------
     18# Directory setup
     19#------------------------------------------------------------------------------
     20##Note: INUNDATIONHOME is the inundation directory, not the data directory.
    1821
    1922home = getenv('INUNDATIONHOME') + sep +'data'+sep #Sandpit's parent diruser = get_user_name()
     
    2225host = get_host_name()
    2326
    24 # INUNDATIONHOME is the inundation directory, not the data directory.
    25 
    26 #time stuff
     27##time stuff
    2728time = strftime('%Y%m%d_%H%M%S',localtime()) #gets time for new dir
    2829gtime = strftime('%Y%m%d_%H%M%S',gmtime()) #gets time for new dir
     
    3132print 'gtime: ', gtime
    3233
    33 #Making assumptions about the location of scenario data
     34#------------------------------------------------------------------------------
     35# Initial Conditions
     36#------------------------------------------------------------------------------
     37
     38##Changed to reflect the modeled community (also sets up the directory system)
    3439state = 'western_australia'
    35 #scenario_name = 'perth_44unitsources'
    3640scenario_name = 'perth'
    3741scenario = 'perth_tsunami_scenario'
    38 
    39 
    40 tide = 0.0 #0.6
    41 
     42##One or all can be changed each time the run_scenario script is excuted
     43tide = 0.6
     44event_number = 27255
     45#event_number = 27283
    4246alpha = 0.1
    4347friction=0.01
    4448starttime=0
    4549finaltime=80000
    46 export_cellsize=25
    47 setup='final'
    48 source='polyline'
     50
     51setup='final'  ## Can replace with trial or basic, this action will thin the mesh
     52               ## so that the run script will take less time (hence less acurate)
    4953
    5054if setup =='trial':
     
    6468    yieldstep=60
    6569
    66 dir_comment='_'+setup+'_'+str(tide)+'_'+str(source)+'_'+ 'alpha' +str(alpha)+'_'+str(user)
    67 
    68 # onshore data provided by WA DLI
    69 onshore_name = 'perth_dli_ext' # original
    70 #island
    71 island_name = 'rott_dli_ext' # original
     70#------------------------------------------------------------------------------
     71# Output Filename
     72#------------------------------------------------------------------------------
     73##Important to distiquish each run
     74dir_comment='_'+setup+'_'+str(tide)+'_'+str(event_number)+'_'+ 'alpha' +str(alpha)+'_'+str(user)
     75
     76#------------------------------------------------------------------------------
     77# INPUT DATA
     78#------------------------------------------------------------------------------
     79
     80##ELEVATION DATA## - used in build_perth.py
     81## onshore data: format ascii grid with accompaning projection file
     82onshore_name = 'perth_dli_ext'
     83## island: format ascii grid with accompaning projection file
     84island_name = 'rott_dli_ext'
    7285island_name1 = 'gard_dli_ext'
    7386island_name2 = 'carnac_island_dli_ext'
    7487island_name3 = 'penguin_dli_ext'
    75 
    76 # AHO + DPI data + colin French coastline
    77 coast_name = 'coastline_perthP'
    78 offshore_name = 'Perth'
    79 offshore_name1 = 'Perth_Chart'
    80 offshore_name2 = 'Fremantle_north'
    81 offshore_name3 = 'port_swanriver'
    82 
    83 
    84 
    85 #final topo name
     88## coastline: format x,y,elevation (with title)
     89coast_name = 'coastline_perthP.txt'
     90## bathymetry: format x,y,elevation (with title)
     91offshore_name = 'Perth.txt'
     92offshore_name1 = 'Perth_Chart.txt'
     93offshore_name2 = 'Fremantle_north.txt'
     94offshore_name3 = 'port_swanriver.txt'
     95
     96##GAUGES## - used in get_timeseries.py
     97gauge_name = 'perth.csv'
     98gauge_name2 = 'thinned_MGA50.csv'
     99
     100##BOUNDING POLYGON## -used in build_boundary.py and run_perth.py respectively
     101#NOTE: when files are put together must be in sequence - for ease go clockwise!
     102#Check the run_perth.py for boundary_tags
     103##thinned ordering file from Hazard Map: format index,latitude,longitude (with title)
     104order_filename = 'thinned_boundary_ordering.txt'
     105##landward bounding points
     106landward = 'landward_bounding_polygon.txt'
     107
     108#------------------------------------------------------------------------------
     109# OUTPUT ELEVATION DATA
     110#------------------------------------------------------------------------------
     111##Output filename for elevation
     112## its a combination of all the data put together (utilisied in build_boundary)
    86113combined_name ='perth_combined_elevation'
    87114combined_smaller_name = 'perth_combined_elevation_smaller'
    88115
     116#------------------------------------------------------------------------------
     117# Directory Structure
     118#------------------------------------------------------------------------------
    89119anuga_dir = home+state+sep+scenario+sep+'anuga'+sep
    90 
    91120topographies_in_dir = home+state+sep+scenario+sep+'elevation_final'+sep+'points'+sep
    92121topographies_dir = anuga_dir+'topographies'+sep
    93 
    94 # input topo file location
     122polygons_dir = anuga_dir+'polygons'+sep
     123tide_dir = anuga_dir+'tide_data'+sep
     124boundaries_dir = anuga_dir+'boundaries'+ sep
     125output_dir = anuga_dir+'outputs'+sep
     126gauges_dir = anuga_dir+'gauges'+sep
     127meshes_dir = anuga_dir+'meshes'+sep
     128
     129#------------------------------------------------------------------------------
     130# Location of input and output Data
     131#------------------------------------------------------------------------------
     132##where the input data sits
    95133onshore_in_dir_name = topographies_in_dir + onshore_name
    96134island_in_dir_name = topographies_in_dir + island_name
     
    98136island_in_dir_name2 = topographies_in_dir + island_name2
    99137island_in_dir_name3 = topographies_in_dir + island_name3
    100 
    101138coast_in_dir_name = topographies_in_dir + coast_name
    102139offshore_in_dir_name = topographies_in_dir + offshore_name
     
    105142offshore_in_dir_name3 = topographies_in_dir + offshore_name3
    106143
     144##where the output data sits
    107145onshore_dir_name = topographies_dir + onshore_name
    108146island_dir_name = topographies_dir + island_name
     
    110148island_dir_name2 = topographies_dir + island_name2
    111149island_dir_name3 = topographies_dir + island_name3
    112 
    113150coast_dir_name = topographies_dir + coast_name
    114151offshore_dir_name = topographies_dir + offshore_name
     
    117154offshore_dir_name3 = topographies_dir + offshore_name3
    118155
    119 #final topo files
     156##where the combined file sits
    120157combined_dir_name = topographies_dir + combined_name
    121 #combined_time_dir_name = topographies_time_dir + combined_name
    122158combined_smaller_name_dir = topographies_dir + combined_smaller_name
    123 #combined_time_dir_final_name = topographies_time_dir + combined_final_name
    124 
    125 meshes_dir = anuga_dir+'meshes'+sep
    126 meshes_dir_name = meshes_dir + scenario_name
    127 
    128 polygons_dir = anuga_dir+'polygons'+sep
    129 tide_dir = anuga_dir+'tide_data'+sep
    130 
    131 
    132 #boundaries_source = '1'
    133    
    134 if source=='polyline':
    135     boundaries_name = 'perth_3103_28052008' #polyline gun
    136     boundaries_in_dir = anuga_dir+'boundaries'+sep+'urs'+sep+'polyline'+sep+'1_10000'+sep
    137 
    138 if source=='test':
    139     boundaries_name = 'other' #polyline
    140     boundaries_in_dir = anuga_dir+'boundaries'+sep
    141 
    142 
    143 #boundaries locations
    144 boundaries_in_dir_name = boundaries_in_dir + boundaries_name
    145 boundaries_dir = anuga_dir+'boundaries'+sep
    146 boundaries_dir_name = boundaries_dir + scenario_name # what it creates???
     159
     160##where the mesh sits (this is created during the run_perth.py)
     161meshes_dir_name = meshes_dir + scenario_name+'.msh'
     162
     163#where the boundary order files sit (this is used within build_boundary.py)
     164order_filename_dir = boundaries_dir + order_filename
     165
     166#where the landward points of boundary extent sit (this is used within run_perth.py)
     167landward_dir = boundaries_dir + landward
     168
     169#where the event sts files sits (this is created during the build_boundary.py)
     170boundaries_dir_event = boundaries_dir + str(event_number) + sep
    147171boundaries_dir_mux = muxhome
    148172
    149 #output locations
    150 output_dir = anuga_dir+'outputs'+sep
    151 output_build_time_dir = anuga_dir+'outputs'+sep+build_time+dir_comment+sep
    152 output_run_time_dir = anuga_dir+'outputs'+sep+run_time+dir_comment+sep
     173#where the directory of the output filename sits
     174output_build_time_dir = output_dir+build_time+dir_comment+sep #used for build_perth.py
     175output_run_time_dir = output_dir+run_time+dir_comment+sep #used for run_perth.py
    153176output_run_time_dir_name = output_run_time_dir + scenario_name  #Used by post processing
    154177
    155 vertex_filename = output_run_time_dir + 'mesh_vertex.csv'
    156 
    157 #gauges
    158 gauge_name = 'perth.csv'
    159 gauge_name2 = 'thinned_MGA50.csv'
    160 
    161 
    162 gauges_dir = home+sep+state+sep+scenario+sep+'anuga'+sep+'gauges'+sep
    163 beach_gauges = gauges_dir + 'beach_gauges.csv'
    164 gauges_dir_name = gauges_dir + gauge_name
    165 gauges_dir_name2 = gauges_dir + gauge_name2
    166 
    167 buildings_filename = gauges_dir + 'Perth_resA.csv'
    168 buildings_filename_out = 'Perth_res_Project_modified.csv'
    169 
    170 ###############################
     178#where the directory of the gauges sit
     179gauges_dir_name = gauges_dir + gauge_name #used for get_timeseries.py
     180gauges_dir_name2 = gauges_dir + gauge_name2 #used for get_timeseries.py
     181
     182#------------------------------------------------------------------------------
    171183# Interior region definitions
    172 ###############################
     184#------------------------------------------------------------------------------
     185
     186#Land, to set the stage/water to be offcoast only
     187poly_mainland = read_polygon(polygons_dir+'initial_condition.csv')
    173188
    174189#Initial bounding polygon for data clipping
     
    176191res_poly_all = 100000*res_factor
    177192
    178 #Polygon designed by 20m contours, or 3km from the coastline
    179 poly_internal_20_3 = read_polygon(polygons_dir+'internal_h20mORd3km.csv')
    180 res_internal_20_3 = 25000*res_factor
    181 
    182 #Polygon designed to cut out the rottnest island land.
    183 poly_rottnest_in = read_polygon(polygons_dir+'rottnest_internal.csv')
    184 res_rottnest_in = 100000*res_factor
    185 
    186 #Polygon designed to incorporate Garden Island and sand bank infront of Rockingham
    187 poly_garden_rockingham = read_polygon(polygons_dir+'garden_rockingham.csv')
    188 res_garden_rockingham = 1000*res_factor
    189 
    190 #Polygon designed to incorporate coastline of rottnest
    191 poly_rottnest_ex = read_polygon(polygons_dir+'rottnest_external.csv')
    192 res_rottnest_ex = 1000*res_factor
    193 
    194 #Polygon designed to incorporate perth and Fremantle CBD
    195 poly_cbd = read_polygon(polygons_dir+'CBD_coastal.csv')
    196 res_cbd = 500*res_factor
    197 
    198 #Polygon designed to incorporate rockingham and penguin island
    199 poly_rockingham = read_polygon(polygons_dir+'rockingham_penguin.csv')
    200 res_rockingham = 500*res_factor
    201 
    202 #Polygon designed to incorporate bottom of Garden Island for image verification
    203 poly_garden = read_polygon(polygons_dir+'garden.csv')
    204 res_garden = 500*res_factor
    205 
    206 poly_geordie_bay = read_polygon(polygons_dir+'geordie_bay.csv')
    207 res_geordie_bay = 500*res_factor
    208 
    209 poly_sorrento_gauge = read_polygon(polygons_dir+'sorrento_gauge.csv')
    210 res_sorrento_gauge = 500*res_factor
    211 
     193#Area of Interest 1 (Fremantle)
     194poly_aoi1 = read_polygon(polygons_dir+'CBD_coastal.csv')
     195res_aoi1 = 500*res_factor
     196
     197#Area of Interest 2 (Rockingham)
     198poly_aoi2 = read_polygon(polygons_dir+'rockingham_penguin.csv')
     199res_aoi2 = 500*res_factor
     200
     201#Area of Interest 2 (garden Island)
     202poly_aoi2a = read_polygon(polygons_dir+'garden.csv')
     203res_aoi2a= 500*res_factor
     204
     205#Area of Interest 3 (geordie bay - record of tsunami impact)
     206poly_aoi3 = read_polygon(polygons_dir+'geordie_bay.csv')
     207res_aoi3 = 500*res_factor
     208
     209#Area of Interest 4 (sorrento - record of tsunami impact)
     210poly_aoi4 = read_polygon(polygons_dir+'sorrento_gauge.csv')
     211res_aoi4 = 500*res_factor
     212
     213#Area of Significance 1 (Garden Island and sand bank infront of Rockingham)
     214poly_aos1 = read_polygon(polygons_dir+'garden_rockingham.csv')
     215res_aos1 = 1000*res_factor
     216
     217#Area of Significance 2 (incorporate coastline of rottnest)
     218poly_aos2 = read_polygon(polygons_dir+'rottnest_external.csv')
     219res_aos2 = 1000*res_factor
     220
     221#Refined areas
    212222#Polygon designed to incorporate Dredge Area from Fremantle to
    213223#Rockingham the steep incline was making the mesh go to 0
    214 poly_dredge = read_polygon(polygons_dir+'DredgeArea.csv')
    215 res_dredge = 1000*res_factor
    216 
    217 #assert zone == refzone
    218 
    219 interior_regions = [[poly_internal_20_3,res_internal_20_3],[poly_cbd,res_cbd]
    220                      ,[poly_garden_rockingham,res_garden_rockingham]
    221                      ,[poly_rockingham,res_rockingham],[poly_geordie_bay,res_geordie_bay]
    222                      ,[poly_sorrento_gauge,res_sorrento_gauge],[poly_rottnest_in, res_rottnest_in]
    223                      ,[poly_rottnest_ex, res_rottnest_ex], [poly_garden, res_garden]
    224                      ,[poly_dredge, res_dredge]]
     224poly_aos3 = read_polygon(polygons_dir+'DredgeArea.csv')
     225res_aos3 = 1000*res_factor
     226
     227#Shallow water 1
     228poly_sw1 = read_polygon(polygons_dir+'internal_h20mORd3km.csv')
     229res_sw1 = 25000*res_factor
     230
     231#Deep water (land of Rottnest, ANUGA does not do donuts!)
     232poly_dw1 = read_polygon(polygons_dir+'rottnest_internal.csv')
     233res_dw1 = 100000*res_factor
     234
     235# Combined all regions, must check that all are included!
     236interior_regions = [[poly_aoi1,res_aoi1],[poly_aoi2,res_aoi2]
     237                     ,[poly_aoi2a,res_aoi2a],[poly_aoi3,res_aoi3]
     238                     ,[poly_aoi4,res_aoi4],[poly_aos1,res_aos1]
     239                     ,[poly_aos2,res_aos2],[poly_aos3,res_aos3]
     240                     ,[poly_sw1,res_sw1], [poly_dw1,res_dw1]]
    225241
    226242   
    227243trigs_min = number_mesh_triangles(interior_regions, poly_all, res_poly_all)
    228 print 'min number triangles', trigs_min
     244print 'min estimated number of triangles', trigs_min
    229245   
    230246
    231 poly_mainland = read_polygon(polygons_dir+'initial_condition.csv')
    232 
    233 ###################################################################
     247#------------------------------------------------------------------------------
    234248# Clipping regions for export to asc and regions for clipping data
    235 ###################################################################
    236 
     249#------------------------------------------------------------------------------
    237250
    238251#Geordie Bay extract ascii grid
  • anuga_work/production/perth/run_perth.py

    r5669 r5759  
    1 """Script for running tsunami inundation scenario for Dampier, WA, Australia.
     1"""Script for running tsunami inundation scenario for Perth, WA, Australia.
    22
    33Source data such as elevation and boundary data is assumed to be available in
     
    66
    77The scenario is defined by a triangular mesh created from project.polygon,
    8 the elevation data and a simulated tsunami generated with URS code.
    9 
    10 Ole Nielsen and Duncan Gray, GA - 2005 and Jane Sexton, Nick Bartzis, GA - 2006
     8the elevation data is combiled into a pts file through build_perth.py
     9and a simulated tsunami is generated through an sts file from build_boundary.py
     10
     11Ole Nielsen and Duncan Gray, GA - 2005, Jane Sexton, Nick Bartzis, GA - 2006
     12Ole Nielsen, Jane Sexton and Kristy Van Putten - 2008
    1113"""
    1214
     
    3234from Numeric import allclose
    3335from anuga.shallow_water.data_manager import export_grid, create_sts_boundary
    34 
    3536from anuga.pmesh.mesh_interface import create_mesh_from_regions
    3637from anuga.shallow_water.data_manager import start_screen_catcher, copy_code_files,store_parameters
    37 #from anuga_parallel.parallel_api import distribute, numprocs, myid, barrier
    3838from anuga_parallel.parallel_abstraction import get_processor_name
    3939from anuga.caching import myhash
     
    4242from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon
    4343from anuga.geospatial_data.geospatial_data import find_optimal_smoothing_parameter
    44 
     44from polygon import Polygon_function
     45   
    4546# Application specific imports
    46 import project                 # Definition of file names and polygons
     47import project  # Definition of file names and polygons
    4748numprocs = 1
    4849myid = 0
     
    5051def run_model(**kwargs):
    5152   
    52 
    5353    #------------------------------------------------------------------------------
    5454    # Copy scripts to time stamped output directory and capture screen
     
    5858
    5959    #copy script must be before screen_catcher
    60     #print kwargs
    6160
    6261    print 'output_dir',kwargs['output_dir']
    63     if myid == 0:
    64         copy_code_files(kwargs['output_dir'],__file__,
    65                  dirname(project.__file__)+sep+ project.__name__+'.py' )
    66 
    67         store_parameters(**kwargs)
    68 
    69    # barrier()
     62   
     63    copy_code_files(kwargs['output_dir'],__file__,
     64             dirname(project.__file__)+sep+ project.__name__+'.py' )
     65
     66    store_parameters(**kwargs)
    7067
    7168    start_screen_catcher(kwargs['output_dir'], myid, numprocs)
    7269
    7370    print "Processor Name:",get_processor_name()
    74 
    7571   
    7672    #-----------------------------------------------------------------------
     
    7975
    8076    # Read in boundary from ordered sts file
    81     urs_bounding_polygon=create_sts_boundary(os.path.join(project.boundaries_dir,project.scenario_name))
     77    urs_bounding_polygon=create_sts_boundary(os.path.join(project.boundaries_dir_event,project.scenario_name))
    8278
    8379    # Reading the landward defined points, this incorporates the original clipping
    8480    # polygon minus the 100m contour
    85     landward_bounding_polygon = read_polygon(project.polygons_dir+'landward_bounding_polygon.txt')
     81    landward_bounding_polygon = read_polygon(project.landward_dir)
    8682
    8783    # Combine sts polyline with landward points
     
    9086    # counting segments
    9187    N = len(urs_bounding_polygon)-1
    92     boundary_tags={'back': [N+1,N+2,N+3], 'side': [N,N+4],'ocean': range(N)}
     88
     89    #boundary tags refer to project.landward 4 points equals 5 segments start at N
     90    boundary_tags={'back': [N+1,N+2,N+3], 'side': [N,N+4], 'ocean': range(N)}
    9391
    9492   
    9593    #--------------------------------------------------------------------------
    96     # Create the triangular mesh based on overall clipping polygon with a
    97     # tagged
     94    # Create the triangular mesh based on overall clipping polygon with a tagged
    9895    # boundary and interior regions defined in project.py along with
    9996    # resolutions (maximal area of per triangle) for each polygon
     
    10299    #IMPORTANT don't cache create_mesh_from_region and Domain(mesh....) as it
    103100    # causes problems with the ability to cache set quantity which takes alot of times
    104     if myid == 0:
    105    
    106         print 'start create mesh from regions'
    107 
    108         create_mesh_from_regions(bounding_polygon,
    109                              boundary_tags=boundary_tags,
    110                              maximum_triangle_area=project.res_poly_all,
    111                              interior_regions=project.interior_regions,
    112                              filename=project.meshes_dir_name+'.msh',
    113                              use_cache=True,
    114                              verbose=True)
    115    # barrier()
    116 
    117 ##        covariance_value,alpha = find_optimal_smoothing_parameter (data_file= kwargs['elevation_file'],
    118 ##                                alpha_list=[0.001, 0.01, 0.1, 0.15, 0.2, 0.25, 0.3, 0.4, 0.5],
    119 ##                                mesh_file = project.meshes_dir_name+'.msh')
    120 ##        print 'optimal alpha', covariance_value,alpha       
    121 
     101       
     102    print 'start create mesh from regions'
     103
     104    create_mesh_from_regions(bounding_polygon,
     105                         boundary_tags=boundary_tags,
     106                         maximum_triangle_area=project.res_poly_all,
     107                         interior_regions=project.interior_regions,
     108                         filename=project.meshes_dir_name,
     109                         use_cache=True,
     110                         verbose=True)
     111   
    122112    #-------------------------------------------------------------------------
    123113    # Setup computational domain
     
    125115    print 'Setup computational domain'
    126116
    127     domain = Domain(project.meshes_dir_name+'.msh', use_cache=False, verbose=True)
     117    domain = Domain(project.meshes_dir_name, use_cache=False, verbose=True)
    128118    print 'memory usage before del domain',mem_usage()
    129119       
     
    137127    # Setup initial conditions
    138128    #-------------------------------------------------------------------------
    139     if myid == 0:
    140 
    141         print 'Setup initial conditions'
    142 
    143         from polygon import Polygon_function
    144         #following sets the stage/water to be offcoast only
    145         IC = Polygon_function( [(project.poly_mainland, 0)], default = kwargs['tide'],
    146                                  geo_reference = domain.geo_reference)
    147         domain.set_quantity('stage', IC)
    148         #domain.set_quantity('stage',kwargs['tide'] )
    149         domain.set_quantity('friction', kwargs['friction'])
    150        
    151         print 'Start Set quantity',kwargs['elevation_file']
    152 
    153         domain.set_quantity('elevation',
    154                             filename = kwargs['elevation_file'],
    155                             use_cache = False,
    156                             verbose = True,
    157                             alpha = kwargs['alpha'])
    158         print 'Finished Set quantity'
    159     #barrier()
    160 
     129   print 'Setup initial conditions'
     130
     131    #following sets the stage/water to be offcoast only
     132    IC = Polygon_function( [(project.poly_mainland, 0)], default = kwargs['tide'],
     133                             geo_reference = domain.geo_reference)
     134    domain.set_quantity('stage', IC)
     135    #domain.set_quantity('stage',kwargs['tide'] )
     136    domain.set_quantity('friction', kwargs['friction'])
     137   
     138    print 'Start Set quantity',kwargs['elevation_file']
     139
     140    domain.set_quantity('elevation',
     141                        filename = kwargs['elevation_file'],
     142                        use_cache = False,
     143                        verbose = True,
     144                        alpha = kwargs['alpha'])
     145    print 'Finished Set quantity'
     146
     147##   #------------------------------------------------------
     148##    # Distribute domain to implement parallelism !!!
    161149##    #------------------------------------------------------
    162 ##    # Create x,y,z file of mesh vertex!!!
    163 ##    #------------------------------------------------------
    164 ##        coord = domain.get_vertex_coordinates()
    165 ##        depth = domain.get_quantity('elevation')
    166 ##       
    167 ##        # Write vertex coordinates to file
    168 ##        filename=project.vertex_filename
    169 ##        fid=open(filename,'w')
    170 ##        fid.write('x (m), y (m), z(m)\n')
    171 ##        for i in range(len(coord)):
    172 ##            pt=coord[i]
    173 ##            x=pt[0]
    174 ##            y=pt[1]
    175 ##            z=depth[i]
    176 ##            fid.write('%.6f,%.6f,%.6f\n' %(x, y, z))
    177150##
    178 
    179     #------------------------------------------------------
    180     # Distribute domain to implement parallelism !!!
    181     #------------------------------------------------------
    182 
    183     if numprocs > 1:
    184         domain=distribute(domain)
     151##    if numprocs > 1:
     152##        domain=distribute(domain)
    185153
    186154    #------------------------------------------------------
     
    188156    #------------------------------------------------------
    189157    print 'domain id', id(domain)
    190     domain.set_name(kwargs['aa_scenario_name'])
     158    domain.set_name(kwargs['scenario_name'])
    191159    domain.set_datadir(kwargs['output_dir'])
    192160    domain.set_default_order(2) # Apply second order scheme
     
    203171    print 'domain id', id(domain)
    204172   
    205     boundary_urs_out=project.boundaries_dir_name
     173    boundary_urs_out=project.boundaries_dir_event + sep + project.scenario_name
    206174
    207175    Br = Reflective_boundary(domain)
     
    244212    #kwargs 'completed' must be added to write the final parameters to file
    245213    kwargs['completed']=str(time.time()-t0)
    246    
    247     if myid==0:
    248         store_parameters(**kwargs)
    249    # barrier
    250    
     214     
     215    store_parameters(**kwargs)
     216     
    251217    print 'memory usage before del domain1',mem_usage()
    252218   
     
    256222   
    257223    kwargs={}
    258     kwargs['est_num_trigs']=project.trigs_min
    259     kwargs['num_cpu']=numprocs
    260     kwargs['host']=project.host
    261     kwargs['res_factor']=project.res_factor
    262     kwargs['starttime']=project.starttime
    263     kwargs['yieldstep']=project.yieldstep
    264224    kwargs['finaltime']=project.finaltime
    265    
    266225    kwargs['output_dir']=project.output_run_time_dir
    267226    kwargs['elevation_file']=project.combined_dir_name+'.pts'
    268     kwargs['boundary_file']=project.boundaries_in_dir_name + '.sww'
    269     kwargs['file_name']=project.home+'detail.csv'
    270     kwargs['aa_scenario_name']=project.scenario_name
    271     kwargs['ab_time']=project.time
    272     kwargs['res_factor']= project.res_factor
     227    kwargs['scenario_name']=project.scenario_name
    273228    kwargs['tide']=project.tide
    274     kwargs['user']=project.user
    275229    kwargs['alpha'] = project.alpha
    276230    kwargs['friction']=project.friction
    277     kwargs['time_thinning'] = project.time_thinning
    278     kwargs['dir_comment']=project.dir_comment
    279     kwargs['export_cellsize']=project.export_cellsize
    280    
     231    #kwargs['est_num_trigs']=project.trigs_min
     232    #kwargs['num_cpu']=numprocs
     233    #kwargs['host']=project.host
     234    #kwargs['res_factor']=project.res_factor
     235    #kwargs['starttime']=project.starttime
     236    #kwargs['yieldstep']=project.yieldstep
     237    #kwargs['file_name']=project.home+'detail.csv'
     238    #kwargs['ab_time']=project.time
     239    #kwargs['res_factor']= project.res_factor
     240    #kwargs['user']=project.user
     241    #kwargs['time_thinning'] = project.time_thinning
     242    #kwargs['dir_comment']=project.dir_comment
     243     
    281244
    282245    run_model(**kwargs)
    283246     
    284     #barrier
     247   
Note: See TracChangeset for help on using the changeset viewer.