Changeset 5789


Ignore:
Timestamp:
Sep 25, 2008, 3:14:42 PM (16 years ago)
Author:
kristy
Message:

Updated all scripts to coincide with Perth format

Location:
anuga_work/production/geraldton
Files:
5 added
1 deleted
5 edited

Legend:

Unmodified
Added
Removed
  • anuga_work/production/geraldton/build_geraldton.py

    r5751 r5789  
    1 """Script for running tsunami inundation scenario for Dampier, WA, Australia.
     1"""
     2Script for building the elevation data to run a tsunami inundation scenario
     3for geraldton, WA, Australia.
    24
    3 Source data such as elevation and boundary data is assumed to be available in
    4 directories specified by project.py
    5 The output sww file is stored in project.output_time_dir
     5Input: elevation data from project.py
     6Output: pts file stored in project.topographies_dir
     7The run_geraldton.py is reliant on the output of this script.
    68
    7 The scenario is defined by a triangular mesh created from project.polygon,
    8 the elevation data and a simulated submarine landslide.
    9 
    10 Ole Nielsen and Duncan Gray, GA - 2005 and Jane Sexton, Nick Bartzis, GA - 2006
    119"""
    1210
     
    2523# Related major packages
    2624from anuga.shallow_water import Domain
    27 from anuga.shallow_water.data_manager import convert_dem_from_ascii2netcdf, dem2pts,start_screen_catcher, copy_code_files
     25from anuga.shallow_water.data_manager import convert_dem_from_ascii2netcdf, dem2pts
    2826from anuga.geospatial_data.geospatial_data import *
    2927from anuga.shallow_water.data_manager import start_screen_catcher, copy_code_files
     
    4240start_screen_catcher(project.output_build_time_dir)
    4341
    44 print 'USER: ', project.user, project.host
     42print 'USER:    ', project.user
    4543
    4644#-------------------------------------------------------------------------------
     
    5149# Fine pts file to be clipped to area of interest
    5250#-------------------------------------------------------------------------------
     51print "project.poly_all", project.poly_all
     52print "project.combined_dir_name", project.combined_dir_name
    5353
    54 # topography directory filenames
     54# input elevation directory filenames
    5555onshore_in_dir_name = project.onshore_in_dir_name
    5656coast_in_dir_name = project.coast_in_dir_name
     
    6161offshore_in_dir_name3 = project.offshore_in_dir_name3
    6262
     63# output elevation directory filenames
    6364onshore_dir_name = project.onshore_dir_name
    6465coast_dir_name = project.coast_dir_name
     
    7071
    7172# creates DEM from asc data
    72 print "creates DEMs from ascii data"
     73print "creates DEMs from asc data"
    7374convert_dem_from_ascii2netcdf(onshore_in_dir_name, basename_out=onshore_dir_name, use_cache=True, verbose=True)
    7475convert_dem_from_ascii2netcdf(island_in_dir_name, basename_out=island_dir_name, use_cache=True, verbose=True)
    7576
    76 #creates pts file for onshore DEM
    77 dem2pts(onshore_dir_name, use_cache=True, verbose=True)
     77# creates pts file for onshore DEM
     78print "creates pts file for onshore DEM"
     79dem2pts(onshore_dir_name ,use_cache=True,verbose=True)
    7880
    79 #creates pts file for island DEM
     81# creates pts file for island DEM
    8082dem2pts(island_dir_name, use_cache=True, verbose=True)
    8183
     84# create onshore pts files
    8285print'create Geospatial data1 objects from topographies'
    83 G1 = Geospatial_data(file_name = onshore_dir_name + '.pts',verbose=True)
    84 G2 = Geospatial_data(file_name = coast_in_dir_name + '.txt',verbose=True)
    85 G3 = Geospatial_data(file_name = island_dir_name + '.pts',verbose=True)
    86 print'create Geospatial data1 objects from bathymetry'
    87 G_off = Geospatial_data(file_name = offshore_in_dir_name + '.txt',verbose=True)
    88 G_off1 = Geospatial_data(file_name = offshore_in_dir_name1 + '.txt',verbose=True)
    89 G_off2 = Geospatial_data(file_name = offshore_in_dir_name2 + '.txt',verbose=True)
    90 G_off3 = Geospatial_data(file_name = offshore_in_dir_name3 + '.txt',verbose=True)
    91 print'finished reading in files'
     86G1 = Geospatial_data(file_name = onshore_dir_name + '.pts')
     87print'create Geospatial data2 objects from topographies'
     88G2 = Geospatial_data(file_name = island_dir_name + '.pts')
    9289
    93 G_off1_clip = G_off1.clip(project.poly_CBD_1km, verbose=True)
     90# create coastal and offshore pts files
     91print'create Geospatial data6 objects from topographies'
     92G_coast = Geospatial_data(file_name = coast_in_dir_name)
     93print'create Geospatial data7 objects from topographies'
     94G_off = Geospatial_data(file_name = offshore_in_dir_name)
     95print'create Geospatial data8 objects from topographies'
     96G_off1 = Geospatial_data(file_name = offshore_in_dir_name1)
     97print'create Geospatial data9 objects from topographies'
     98G_off2 = Geospatial_data(file_name = offshore_in_dir_name2)
     99print'create Geospatial data10 objects from topographies'
     100G_off3 = Geospatial_data(file_name = offshore_in_dir_name3)
     101
     102#-------------------------------------------------------------------------------
     103# Combine, clip and export dataset
     104#-------------------------------------------------------------------------------
     105
     106print 'clipping port data to area of significance'
     107G_off2_clip = G_off2.clip(project.poly_CBD_1km, verbose=True)
     108print 'add all bathymetry files'
     109G_off = G_off1 + G_off2_clip + G_off3 + G_off4
     110print 'clipping data to coast'
     111G_off_clip = G_off.clip(project.poly_ocean, verbose=True)
     112G1_clip = G1.clip(project.poly_mainland, verbose=True)
    94113
    95114print'add all geospatial objects'
    96 G = G1 + G2 + G3 + G_off + G_off1_clip + G_off2 + G_off3
     115G = G1_clip + G2 + G3 + G_off_clip
    97116
    98117print'clip combined geospatial object by bounding polygon'
    99 G_clip = G.clip(project.poly_all, verbose=True)
     118G_clipped = G.clip(project.poly_all)
    100119
    101120print'export combined DEM file'
    102121if access(project.topographies_dir,F_OK) == 0:
    103122    mkdir (project.topographies_dir)
    104 G_clip.export_points_file(project.combined_dir_name + '.pts')
     123G_clipped.export_points_file(project.combined_dir_name + '.pts')
     124#G_clipped.export_points_file(project.combined_dir_name + '.txt') #Use for comparision in ARC
    105125
    106 import sys
    107 sys.exit()
    108 
    109 
    110 
    111 
    112 
    113 
  • anuga_work/production/geraldton/export_results.py

    r5752 r5789  
    77
    88
    9 time_dir = '20080908_173226_run_final_0_elevation_alpha0.1_kvanputt'
     9time_dir = '20080911_094915_run_final_0.6_27255_alpha0.1_kvanputt'
     10#time_dir = '20080911_100609_run_final_0.6_27283_alpha0.1_kvanputt'
    1011
    1112
    1213cellsize = 25
    1314#cellsize = 150
    14 timestep = 0
     15#timestep = 0
    1516directory = project.output_dir
    1617name = directory+sep+time_dir+sep+project.scenario_name
    17 
    18 
    19 
     18#var = [0,4]
     19var = [2] # depth and Speed
     20#var = [2,3] # elevation, depth and Speed
    2021
    2122is_parallel = False
     
    2627
    2728
    28 var = [0,4]
    29 #var = [2,3] # depth and Speed
    30 #var = [2,3] # elevation, depth and Speed
     29name1 = directory+time_dir+sep+project.scenario_name
     30name2 = directory+time_dir+sep+'sww2'+sep+project.scenario_name+'_time_41700_0' #need to get assistance on how to make this into anything
    3131
     32names = [name1, name2]
    3233
    33 for which_var in var:
    34     if which_var == 0:  # Stage
    35         outname = name + '_stage'
    36         quantityname = 'stage'
     34for name in names:
    3735
    38     if which_var == 1:  # Absolute Momentum
    39         outname = name + '_momentum'
    40         quantityname = '(xmomentum**2 + ymomentum**2)**0.5' 
     36    for which_var in var:
     37        if which_var == 0:  # Stage
     38            outname = name + '_stage'
     39            quantityname = 'stage'
    4140
    42     if which_var == 2:  # Depth
    43         outname = name + '_depth'
    44         quantityname = 'stage-elevation
     41        if which_var == 1:  # Absolute Momentum
     42            outname = name + '_momentum'
     43            quantityname = '(xmomentum**2 + ymomentum**2)**0.5
    4544
    46     if which_var == 3:  # Speed
    47         outname = name + '_speed'
    48         quantityname = '(xmomentum**2 + ymomentum**2)**0.5/(stage-elevation+1.e-6/(stage-elevation))'  #Speed
     45        if which_var == 2:  # Depth
     46            outname = name + '_depth'
     47            quantityname = 'stage-elevation' 
    4948
    50     if which_var == 4:  # Elevation
    51         outname = name + '_elevation'
    52         quantityname = 'elevation'  #Elevation
     49        if which_var == 3:  # Speed
     50            outname = name + '_speed'
     51            quantityname = '(xmomentum**2 + ymomentum**2)**0.5/(stage-elevation+1.e-6/(stage-elevation))'  #Speed
    5352
    54     if is_parallel == True:
    55     #    print 'is_parallel',is_parallel
    56         for i in range(0,nodes):
    57             namei = name + '_P%d_%d' %(i,nodes)
    58             outnamei = outname + '_P%d_%d' %(i,nodes)
    59             print 'start sww2dem for sww file %d' %(i)
    60             sww2dem(namei, basename_out = outnamei,
     53        if which_var == 4:  # Elevation
     54            outname = name + '_elevation'
     55            quantityname = 'elevation'  #Elevation
     56
     57        if is_parallel == True:
     58        #    print 'is_parallel',is_parallel
     59            for i in range(0,nodes):
     60                namei = name + '_P%d_%d' %(i,nodes)
     61                outnamei = outname + '_P%d_%d' %(i,nodes)
     62                print 'start sww2dem for sww file %d' %(i)
     63                sww2dem(namei, basename_out = outnamei,
     64                            quantity = quantityname,
     65                            #timestep = timestep,
     66                            cellsize = cellsize,     
     67                            easting_min = project_grad.e_min_area,
     68                            easting_max = project_grad.e_max_area,
     69                            northing_min = project_grad.n_min_area,
     70                            northing_max = project_grad.n_max_area,       
     71                            reduction = max,
     72                            verbose = True,
     73                            format = 'asc')
     74        else:
     75            print 'start sww2dem'
     76            sww2dem(name, basename_out = outname,
    6177                        quantity = quantityname,
    6278                        #timestep = timestep,
    63                         cellsize = cellsize,     
    64                         easting_min = project_grad.e_min_area,
    65                         easting_max = project_grad.e_max_area,
    66                         northing_min = project_grad.n_min_area,
    67                         northing_max = project_grad.n_max_area,       
     79                        cellsize = cellsize,
     80                        number_of_decimal_places = 4,
    6881                        reduction = max,
    6982                        verbose = True,
    7083                        format = 'asc')
    71     else:
    72         print 'start sww2dem'
    73         sww2dem(name, basename_out = outname,
    74                     quantity = quantityname,
    75                     timestep = timestep,
    76                     cellsize = cellsize,
    77                     number_of_decimal_places = 4,
    78                     reduction = max,
    79                     verbose = True,
    80                     format = 'asc')
    8184
  • anuga_work/production/geraldton/get_gauges.py

    r5751 r5789  
    2828
    2929    maxname = 'max_sts_stage.csv'
    30     fid_max = open(project.boundaries_dir+sep+maxname,'w')
    31     s = 'x, y, max_stage \n'
     30    fid_max = open(project.boundaries_dir_event+sep+maxname,'w')
     31    s = 'index, x, y, max_stage \n'
    3232    fid_max.write(s)   
    3333    for j in range(len(x)):
     
    4040        fid_max.write(s)
    4141       
    42         fid_sts = open(project.boundaries_dir+sep+basename+'_'+ str(index)+'.csv', 'w')
     42        fid_sts = open(project.boundaries_dir_event+sep+basename+'_'+ str(index)+'.csv', 'w')
    4343        s = 'time, stage, xmomentum, ymomentum \n'
    4444        fid_sts.write(s)
     
    5454    return quantities,elevation,time
    5555
    56 quantities,elevation,time=get_sts_gauge_data(os.path.join(project.boundaries_dir,project.scenario_name),verbose=False)
     56quantities,elevation,time=get_sts_gauge_data(os.path.join(project.boundaries_dir_event,project.scenario_name),verbose=False)
    5757
    5858print len(elevation), len(quantities['stage'][0,:])
  • anuga_work/production/geraldton/project.py

    r5751 r5789  
    1 # -*- coding: cp1252 -*-
    2 """Common filenames and locations for topographic data, meshes and outputs.
     1"""Common filenames and locations for elevation, meshes and outputs.
     2This script is the heart of all scripts in the folder
    33"""
    4 
    5 from os import sep, environ, getenv, getcwd ,umask
     4#------------------------------------------------------------------------------
     5# Import necessary modules
     6#------------------------------------------------------------------------------
     7
     8from os import sep, environ, getenv, getcwd
    69from os.path import expanduser
    710import sys
    811from time import localtime, strftime, gmtime
    912from anuga.utilities.polygon import read_polygon, plot_polygons, is_inside_polygon, number_mesh_triangles
    10 #from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees, convert_points_from_latlon_to_utm
    1113from anuga.utilities.system_tools import get_user_name, get_host_name
    1214from anuga.shallow_water.data_manager import urs2sts,create_sts_boundary
    1315from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon
    1416
    15 # file and system info
    16 #---------------------------------
    17 
    18 home = getenv('INUNDATIONHOME') + sep +'data'+sep #Sandpit's parent dir   
     17#------------------------------------------------------------------------------
     18# Directory setup
     19#------------------------------------------------------------------------------
     20# Note: INUNDATIONHOME is the inundation directory, not the data directory.
     21
     22home = getenv('INUNDATIONHOME') + sep +'data'+sep #Sandpit's parent diruser = get_user_name()
    1923muxhome = getenv('MUXHOME')
    2024user = get_user_name()
    2125host = get_host_name()
    2226
    23 # INUNDATIONHOME is the inundation directory, not the data directory.
    24 
    25 #time stuff
    26 time = strftime('%Y%m%d_%H%M%S',localtime()) #gets time for new dir
    27 gtime = strftime('%Y%m%d_%H%M%S',gmtime()) #gets time for new dir
     27# determines time for setting up output directories
     28time = strftime('%Y%m%d_%H%M%S',localtime())
     29gtime = strftime('%Y%m%d_%H%M%S',gmtime())
    2830build_time = time+'_build'
    2931run_time = time+'_run'
    30 print 'gtime: ', gtime
    31 
    32 #Making assumptions about the location of scenario data
     32
     33#------------------------------------------------------------------------------
     34# Initial Conditions
     35#------------------------------------------------------------------------------
     36
     37# this section needs to be updated to reflect the modelled community.
     38# Note, the user needs to set up the directory system accordingly
    3339state = 'western_australia'
    3440scenario_name = 'geraldton'
    3541scenario = 'geraldton_tsunami_scenario'
    3642
    37 
    38 tide = 0 #0.75 #??? must check!!!
    39 
    40 alpha = 0.1
    41 friction=0.01
    42 starttime=0
    43 finaltime=1000
    44 export_cellsize=25
    45 setup='final'
    46 source='elevation'
    47 
     43# Model specific parameters. One or all can be changed each time the
     44# run_scenario script is executed
     45tide = 0                #0.6
     46#event_number = 27255   # linked to hazard map
     47event_number = 27283
     48alpha = 0.1             # smoothing parameter for mesh
     49friction=0.01           # manning's friction coefficient
     50starttime=0             
     51finaltime=80000         # final time for simulation
     52
     53setup='final'  # Final can be replaced with trial or basic.
     54               # Either will result in a coarser mesh that will allow a
     55               # faster, but less accurate, simulation.
    4856
    4957if setup =='trial':
     
    6371    yieldstep=60
    6472
    65 dir_comment='_'+setup+'_'+str(tide)+'_'+str(source)+'_'+ 'alpha' +str(alpha)+'_'+str(user)
    66 
    67 
    68 # onshore data provided by WA DLI
    69 onshore_name = 'landgate_dem_clip' # original
    70 island_name = 'dted_srtm_islands'
    71 
    72 # AHO + DPI data
    73 coast_name = 'XYcoastline_KVP'
    74 offshore_name = 'Geraldton_bathymetry'
    75 offshore_name1 = 'DPI_Data'
    76 offshore_name2 = 'grid250'
    77 offshore_name3 = 'Top_bathymetry'
    78 
    79 
    80 #final topo name
     73#------------------------------------------------------------------------------
     74# Output Filename
     75#------------------------------------------------------------------------------
     76# Important to distinguish each run - ensure str(user) is included!
     77# Note, the user is free to include as many parameters as desired
     78dir_comment='_'+setup+'_'+str(tide)+'_'+str(event_number)+'_'+ 'alpha' +str(alpha)+'_'+str(user)
     79
     80#------------------------------------------------------------------------------
     81# Input Data
     82#------------------------------------------------------------------------------
     83
     84# elevation data used in build_geraldton.py
     85# onshore data: format ascii grid with accompanying projection file
     86onshore_name = 'landgate_dem_clip'
     87# island: format ascii grid with accompanying projection file
     88island_name = 'dted_srtm_islands'
     89# coastline: format x,y,elevation (with title)
     90coast_name = 'XYcoastline_KVP.txt'
     91# bathymetry: format x,y,elevation (with title)
     92offshore_name = 'Geraldton_bathymetry.txt'
     93offshore_name1 = 'DPI_Data.txt'
     94offshore_name2 = 'grid250.txt'
     95offshore_name3 = 'Top_bathymetry.txt'
     96
     97# gauges - used in get_timeseries.py
     98gauge_name = 'geraldton.csv'
     99gauge_name2 = 'thinned_MGA50.csv'
     100
     101# BOUNDING POLYGON - used in build_boundary.py and run_geraldton.py respectively
     102# NOTE: when files are put together the points must be in sequence - for ease go clockwise!
     103# Check the run_geraldton.py for boundary_tags
     104# thinned ordering file from Hazard Map: format is index,latitude,longitude (with title)
     105order_filename = 'thinned_boundary_ordering.txt'
     106#landward bounding points
     107landward = 'landward_bounding_polygon.txt'
     108
     109#------------------------------------------------------------------------------
     110# Output Elevation Data
     111#------------------------------------------------------------------------------
     112# Output filename for elevation
     113# this is a combination of all the data (utilisied in build_boundary)
    81114combined_name ='geraldton_combined_elevation'
    82 combined_name_small = 'geraldton_combined_elevation_small'
    83 
     115combined_smaller_name = 'geraldton_combined_elevation_smaller'
     116
     117#------------------------------------------------------------------------------
     118# Directory Structure
     119#------------------------------------------------------------------------------
    84120anuga_dir = home+state+sep+scenario+sep+'anuga'+sep
    85 
    86121topographies_in_dir = home+state+sep+scenario+sep+'elevation_final'+sep+'points'+sep
    87 topographies_dir = home+state+sep+scenario+sep+'anuga'+sep+'topographies'+sep
    88 
    89 ##input topo file location
     122topographies_dir = anuga_dir+'topographies'+sep
     123polygons_dir = anuga_dir+'polygons'+sep
     124tide_dir = anuga_dir+'tide_data'+sep
     125boundaries_dir = anuga_dir+'boundaries'+ sep
     126output_dir = anuga_dir+'outputs'+sep
     127gauges_dir = anuga_dir+'gauges'+sep
     128meshes_dir = anuga_dir+'meshes'+sep
     129
     130#------------------------------------------------------------------------------
     131# Location of input and output data
     132#------------------------------------------------------------------------------
     133# where the input data sits
    90134onshore_in_dir_name = topographies_in_dir + onshore_name
    91135island_in_dir_name = topographies_in_dir + island_name
    92 
    93136coast_in_dir_name = topographies_in_dir + coast_name
    94137offshore_in_dir_name = topographies_in_dir + offshore_name
     
    97140offshore_in_dir_name3 = topographies_in_dir + offshore_name3
    98141
    99 ##output topo file location
     142# where the output data sits
    100143onshore_dir_name = topographies_dir + onshore_name
    101144island_dir_name = topographies_dir + island_name
     
    106149offshore_dir_name3 = topographies_dir + offshore_name3
    107150
    108 #final topo files
     151# where the combined elevation file sits
    109152combined_dir_name = topographies_dir + combined_name
    110 combined_dir_name_small = topographies_dir + combined_name_small
    111 
    112 meshes_dir = anuga_dir+'meshes'+sep
    113 meshes_dir_name = meshes_dir + scenario_name
    114 
    115 polygons_dir = anuga_dir+'polygons'+sep
    116 tide_dir = anuga_dir+'tide_data'+sep
    117 
    118 #boundaries_source = '1'
    119 
    120 #boundaries locations
    121 boundaries_dir = anuga_dir+'boundaries'+sep
    122 boundaries_dir_name = boundaries_dir + scenario_name
     153combined_smaller_name_dir = topographies_dir + combined_smaller_name
     154
     155# where the mesh sits (this is created during the run_geraldton.py)
     156meshes_dir_name = meshes_dir + scenario_name+'.msh'
     157
     158# where the boundary ordering files sit (this is used within build_boundary.py)
     159order_filename_dir = boundaries_dir + order_filename
     160
     161# where the landward points of boundary extent sit (this is used within run_geraldton.py)
     162landward_dir = boundaries_dir + landward
     163
     164# where the event sts files sits (this is created during the build_boundary.py)
     165boundaries_dir_event = boundaries_dir + str(event_number) + sep
    123166boundaries_dir_mux = muxhome
    124167
    125 #output locations
    126 output_dir = anuga_dir+'outputs'+sep
    127 output_build_time_dir = anuga_dir+'outputs'+sep+build_time+dir_comment+sep
    128 output_run_time_dir = anuga_dir+'outputs'+sep+run_time+dir_comment+sep
     168# where the directory of the output filename sits
     169output_build_time_dir = output_dir+build_time+dir_comment+sep   #used for build_geraldton.py
     170output_run_time_dir = output_dir+run_time+dir_comment+sep       #used for run_geraldton.py
    129171output_run_time_dir_name = output_run_time_dir + scenario_name  #Used by post processing
    130172
    131 vertex_filename = output_run_time_dir + 'mesh_vertex.csv'
    132 
    133 #gauges
    134 gauge_name = '???.csv'
    135 gauges_dir = home+state+sep+scenario+sep+'anuga'+sep+'gauges'+sep
    136 gauges_dir_name = gauges_dir + gauge_name
    137 
    138 buildings_filename = gauges_dir + 'Busselton_res_Project.csv'
    139 buildings_filename_out = 'Busselton_res_Project_modified.csv'
    140 
    141 community_filename = gauges_dir +''
    142 community_broome = gauges_dir + ''
    143 
    144 
    145 ###############################
     173#w here the directory of the gauges sit
     174gauges_dir_name = gauges_dir + gauge_name       #used for get_timeseries.py
     175gauges_dir_name2 = gauges_dir + gauge_name2     #used for get_timeseries.py
     176
     177#------------------------------------------------------------------------------
    146178# Interior region definitions
    147 ###############################
    148 
    149 # bounding polygon for study area
     179#------------------------------------------------------------------------------
     180
     181#Land, to set the initial stage/water to be offcoast only
     182#Land and Ocean to clip data
     183poly_mainland=read_polygon(polygons_dir +'land_initial_condition.csv')
     184poly_ocean=read_polygon(polygons_dir +'ocean_initial_condition.csv')
     185
     186# Initial bounding polygon for data clipping
    150187poly_all = read_polygon(polygons_dir+'poly_all.csv')
    151188res_poly_all = 100000*res_factor
    152189
    153 poly_neg20_pos20 = read_polygon(polygons_dir+'neg20_pos20.csv')
    154 res_neg20_pos20 = 25000*res_factor
    155 
    156 poly_CBD_1km= read_polygon(polygons_dir+'CBD_1km.csv')
    157 res_CBD_1km = 800*res_factor
    158 
    159 poly_cbd = read_polygon(polygons_dir+'CBD_500m.csv')
    160 res_cbd = 500*res_factor
    161 
    162 poly_wallabi = read_polygon(polygons_dir+'island_wallabi_poly.csv')
    163 res_wallabi = 5000*res_factor
    164 
    165 poly_dingiville = read_polygon(polygons_dir+'island_dingiville_poly.csv')
    166 res_dingiville = 5000*res_factor
    167 
    168 poly_pelsaert = read_polygon(polygons_dir+'island_pelsaert_poly.csv')
    169 res_pelsaert = 5000*res_factor
    170 
    171 
    172 #plot_polygons([polyAll,poly_broome1,poly_broome2,poly_broome3],figname='boundingpoly2',verbose=False)
    173 
    174 interior_regions = [[poly_neg20_pos20,res_neg20_pos20],[poly_CBD_1km,res_CBD_1km],
    175                     [poly_cbd,res_cbd],[poly_wallabi,res_wallabi],
    176                     [poly_dingiville,res_dingiville],[poly_pelsaert, res_pelsaert]]
    177 
     190# Area of Interest 1 (Geraldton)
     191poly_aoi1 = read_polygon(polygons_dir+'CBD_500m.csv')
     192res_aoi1 = 500*res_factor
     193
     194# Area of Significance 1 (Geraldton)
     195poly_aos1 = read_polygon(polygons_dir+'CBD_1km.csv')
     196res_aos1 = 800*res_factor
     197
     198# Area of Significance 2 (island1)
     199poly_aos2 = read_polygon(polygons_dir+'island_wallabi_poly.csv')
     200res_aos2 = 5000*res_factor
     201
     202# Area of Significance 3 (island2)
     203poly_aos3 = read_polygon(polygons_dir+'island_dingiville_poly.csv')
     204res_aos3 = 5000*res_factor
     205
     206# Area of Significance 4 (island3)
     207poly_aos4 = read_polygon(polygons_dir+'island_pelsaert_poly.csv')
     208res_aos4 = 5000*res_factor
     209
     210# Shallow water 1
     211poly_sw1 = read_polygon(polygons_dir+'neg20_pos20.csv')
     212res_sw1 = 25000*res_factor
     213
     214# Combined all regions, must check that all are included!
     215interior_regions = [[poly_aoi1,res_aoi1],[poly_aos1,res_aos1]
     216                     ,[poly_aos2,res_aos2],[poly_aos3,res_aos3]
     217                     ,[poly_aos4,res_aos4],[poly_sw1,res_sw1]]
     218
     219   
    178220trigs_min = number_mesh_triangles(interior_regions, poly_all, res_poly_all)
    179 print 'min number triangles', trigs_min
    180 
    181 ##For no input boundary file
    182 #boundary_tags={'back': [2,3,4,5], 'side': [0,1,6], 'ocean': [7,8,9]}
    183 
    184 poly_mainland=read_polygon(topographies_in_dir +'initial_condition.csv')
    185 
    186 
    187 
    188 ###################################################################
     221print 'min estimated number of triangles', trigs_min
     222   
     223#------------------------------------------------------------------------------
    189224# Clipping regions for export to asc and regions for clipping data
    190 ###################################################################
    191 
     225# Final inundation maps should only be created in regions of the finest mesh
     226#------------------------------------------------------------------------------
    192227
    193228# Geraldton CBD extract ascii grid
     
    196231yminCBD = 6811500
    197232ymaxCBD = 6816400
    198 
  • anuga_work/production/geraldton/run_geraldton.py

    r5751 r5789  
    1 """Script for running tsunami inundation scenario for Dampier, WA, Australia.
    2 
    3 Source data such as elevation and boundary data is assumed to be available in
    4 directories specified by project.py
    5 The output sww file is stored in project.output_run_time_dir
     1"""Script for running a tsunami inundation scenario for geraldton, WA, Australia.
    62
    73The scenario is defined by a triangular mesh created from project.polygon,
    8 the elevation data and a simulated tsunami generated with URS code.
    9 
    10 Ole Nielsen and Duncan Gray, GA - 2005 and Jane Sexton, Nick Bartzis, GA - 2006
     4the elevation data is compiled into a pts file through build_geraldton.py
     5and a simulated tsunami is generated through an sts file from build_boundary.py.
     6
     7Input: sts file (build_boundary.py for respective event)
     8       pts file (build_geraldton.py)
     9       information from project file
     10Outputs: sww file stored in project.output_run_time_dir
     11The export_results_all.py and get_timeseries.py is reliant
     12on the outputs of this script
     13
     14Ole Nielsen and Duncan Gray, GA - 2005, Jane Sexton, Nick Bartzis, GA - 2006
     15Ole Nielsen, Jane Sexton and Kristy Van Putten - 2008
    1116"""
    1217
     
    3237from Numeric import allclose
    3338from anuga.shallow_water.data_manager import export_grid, create_sts_boundary
    34 
    3539from anuga.pmesh.mesh_interface import create_mesh_from_regions
    3640from anuga.shallow_water.data_manager import start_screen_catcher, copy_code_files,store_parameters
    37 #from anuga_parallel.parallel_api import distribute, numprocs, myid, barrier
    3841from anuga_parallel.parallel_abstraction import get_processor_name
    3942from anuga.caching import myhash
     
    4245from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon
    4346from anuga.geospatial_data.geospatial_data import find_optimal_smoothing_parameter
    44 from Scientific.IO.NetCDF import NetCDFFile
    45 
     47from polygon import Polygon_function
     48   
    4649# Application specific imports
    47 import project                 # Definition of file names and polygons
     50import project  # Definition of file names and polygons
    4851numprocs = 1
    4952myid = 0
     
    5154def run_model(**kwargs):
    5255   
    53 
    5456    #------------------------------------------------------------------------------
    5557    # Copy scripts to time stamped output directory and capture screen
     
    5961
    6062    #copy script must be before screen_catcher
    61     #print kwargs
    6263
    6364    print 'output_dir',kwargs['output_dir']
    64     if myid == 0:
    65         copy_code_files(kwargs['output_dir'],__file__,
    66                  dirname(project.__file__)+sep+ project.__name__+'.py' )
    67 
    68         store_parameters(**kwargs)
    69 
    70    # barrier()
     65   
     66    copy_code_files(kwargs['output_dir'],__file__,
     67             dirname(project.__file__)+sep+ project.__name__+'.py' )
     68
     69    store_parameters(**kwargs)
    7170
    7271    start_screen_catcher(kwargs['output_dir'], myid, numprocs)
    7372
    7473    print "Processor Name:",get_processor_name()
    75 
    7674   
    7775    #-----------------------------------------------------------------------
     
    8078
    8179    # Read in boundary from ordered sts file
    82     urs_bounding_polygon=create_sts_boundary(os.path.join(project.boundaries_dir,project.scenario_name))
     80    urs_bounding_polygon=create_sts_boundary(os.path.join(project.boundaries_dir_event,project.scenario_name))
    8381
    8482    # Reading the landward defined points, this incorporates the original clipping
    8583    # polygon minus the 100m contour
    86     landward_bounding_polygon = read_polygon(project.boundaries_dir+'landward_boundary_polygon.txt')
     84    landward_bounding_polygon = read_polygon(project.landward_dir)
    8785
    8886    # Combine sts polyline with landward points
     
    9189    # counting segments
    9290    N = len(urs_bounding_polygon)-1
    93     boundary_tags={'back': [N+2,N+3], 'side': [N,N+1,N+4],'ocean': range(N)}
    94 
    95     print 'boundary tags',boundary_tags
    96        
     91
     92    # boundary tags refer to project.landward 4 points equals 5 segments start at N
     93    boundary_tags={'back': [N+2,N+3], 'side': [N,N+1,N+4], 'ocean': range(N)}
     94
    9795    #--------------------------------------------------------------------------
    98     # Create the triangular mesh based on overall clipping polygon with a
    99     # tagged
     96    # Create the triangular mesh based on overall clipping polygon with a tagged
    10097    # boundary and interior regions defined in project.py along with
    10198    # resolutions (maximal area of per triangle) for each polygon
    10299    #--------------------------------------------------------------------------
    103100
    104     #IMPORTANT don't cache create_mesh_from_region and Domain(mesh....) as it
     101    # IMPORTANT don't cache create_mesh_from_region and Domain(mesh....) as it
    105102    # causes problems with the ability to cache set quantity which takes alot of times
    106     if myid == 0:
    107    
    108         print 'start create mesh from regions'
    109 
    110         create_mesh_from_regions(bounding_polygon,
    111                              boundary_tags=boundary_tags,
    112                              maximum_triangle_area=project.res_poly_all,
    113                              interior_regions=project.interior_regions,
    114                              filename=project.meshes_dir_name+'.msh',
    115                              use_cache=True,
    116                              verbose=True)
    117    # barrier()
    118 
    119 ##        covariance_value,alpha = find_optimal_smoothing_parameter (data_file= kwargs['elevation_file'],
    120 ##                                alpha_list=[0.001, 0.01, 0.1, 0.15, 0.2, 0.25, 0.3, 0.4, 0.5],
    121 ##                                mesh_file = project.meshes_dir_name+'.msh')
    122 ##        print 'optimal alpha', covariance_value,alpha       
    123 
     103       
     104    print 'start create mesh from regions'
     105
     106    create_mesh_from_regions(bounding_polygon,
     107                         boundary_tags=boundary_tags,
     108                         maximum_triangle_area=project.res_poly_all,
     109                         interior_regions=project.interior_regions,
     110                         filename=project.meshes_dir_name,
     111                         use_cache=True,
     112                         verbose=True)
     113   
    124114    #-------------------------------------------------------------------------
    125115    # Setup computational domain
     
    127117    print 'Setup computational domain'
    128118
    129     domain = Domain(project.meshes_dir_name+'.msh', use_cache=False, verbose=True)
     119    domain = Domain(project.meshes_dir_name, use_cache=False, verbose=True)
    130120    print 'memory usage before del domain',mem_usage()
    131121       
     
    139129    # Setup initial conditions
    140130    #-------------------------------------------------------------------------
    141     if myid == 0:
    142 
    143         print 'Setup initial conditions'
    144 
    145         from polygon import Polygon_function
    146         #following sets the stage/water to be offcoast only
    147         IC = Polygon_function( [(project.poly_mainland, 0)], default = kwargs['tide'],
    148                                  geo_reference = domain.geo_reference)
    149         domain.set_quantity('stage', IC)
    150         #domain.set_quantity('stage',kwargs['tide'] )
    151         domain.set_quantity('friction', kwargs['friction'])
    152        
    153         print 'Start Set quantity',kwargs['elevation_file']
    154 
    155         domain.set_quantity('elevation',
    156                             filename = kwargs['elevation_file'],
    157                             use_cache = False,
    158                             verbose = True,
    159                             alpha = kwargs['alpha'])
    160         print 'Finished Set quantity'
    161     #barrier()
    162 
     131    print 'Setup initial conditions'
     132
     133    # sets the initial stage in the offcoast region only
     134    IC = Polygon_function( [(project.poly_mainland, 0)], default = kwargs['tide'],
     135                             geo_reference = domain.geo_reference)
     136    domain.set_quantity('stage', IC)
     137    #domain.set_quantity('stage',kwargs['tide'] )
     138    domain.set_quantity('friction', kwargs['friction'])
     139   
     140    print 'Start Set quantity',kwargs['elevation_file']
     141
     142    domain.set_quantity('elevation',
     143                        filename = kwargs['elevation_file'],
     144                        use_cache = False,
     145                        verbose = True,
     146                        alpha = kwargs['alpha'])
     147    print 'Finished Set quantity'
     148
     149##   #------------------------------------------------------
     150##    # Distribute domain to implement parallelism !!!
    163151##    #------------------------------------------------------
    164 ##    # Create x,y,z file of mesh vertex!!!
    165 ##    #------------------------------------------------------
    166 ##        coord = domain.get_vertex_coordinates()
    167 ##        depth = domain.get_quantity('elevation')
    168 ##       
    169 ##        # Write vertex coordinates to file
    170 ##        filename=project.vertex_filename
    171 ##        fid=open(filename,'w')
    172 ##        fid.write('x (m), y (m), z(m)\n')
    173 ##        for i in range(len(coord)):
    174 ##            pt=coord[i]
    175 ##            x=pt[0]
    176 ##            y=pt[1]
    177 ##            z=depth[i]
    178 ##            fid.write('%.6f,%.6f,%.6f\n' %(x, y, z))
    179152##
    180 
    181     #------------------------------------------------------
    182     # Distribute domain to implement parallelism !!!
    183     #------------------------------------------------------
    184 
    185     if numprocs > 1:
    186         domain=distribute(domain)
     153##    if numprocs > 1:
     154##        domain=distribute(domain)
    187155
    188156    #------------------------------------------------------
     
    190158    #------------------------------------------------------
    191159    print 'domain id', id(domain)
    192     domain.set_name(kwargs['aa_scenario_name'])
     160    domain.set_name(kwargs['scenario_name'])
    193161    domain.set_datadir(kwargs['output_dir'])
    194     domain.set_default_order(2) # Apply second order scheme
    195     domain.set_minimum_storable_height(0.01) # Don't store anything less than 1cm
     162    domain.set_default_order(2)                 # Apply second order scheme
     163    domain.set_minimum_storable_height(0.01)    # Don't store anything less than 1cm
    196164    domain.set_store_vertices_uniquely(False)
    197165    domain.set_quantities_to_be_stored(['stage', 'xmomentum', 'ymomentum'])
     
    205173    print 'domain id', id(domain)
    206174   
    207     boundary_urs_out=project.boundaries_dir_name
     175    boundary_urs_out=project.boundaries_dir_event + sep + project.scenario_name
    208176
    209177    Br = Reflective_boundary(domain)
     
    237205        domain.write_boundary_statistics(tags = 'ocean')
    238206
    239            
     207    # these outputs should be checked with the resultant inundation map
    240208    x, y = domain.get_maximum_inundation_location()
    241209    q = domain.get_maximum_inundation_elevation()
    242 
    243210    print 'Maximum runup observed at (%.2f, %.2f) with elevation %.2f' %(x,y,q)
    244211
    245     print 'That took %.2f seconds' %(time.time()-t0)
     212    print 'Simulation took %.2f seconds' %(time.time()-t0)
    246213
    247214    #kwargs 'completed' must be added to write the final parameters to file
    248215    kwargs['completed']=str(time.time()-t0)
    249    
    250     if myid==0:
    251         store_parameters(**kwargs)
    252    # barrier
    253    
     216     
     217    store_parameters(**kwargs)
     218     
    254219    print 'memory usage before del domain1',mem_usage()
    255220   
     
    259224   
    260225    kwargs={}
    261     kwargs['est_num_trigs']=project.trigs_min
    262     kwargs['num_cpu']=numprocs
    263     kwargs['host']=project.host
    264     kwargs['res_factor']=project.res_factor
    265     kwargs['starttime']=project.starttime
    266     kwargs['yieldstep']=project.yieldstep
    267226    kwargs['finaltime']=project.finaltime
    268    
    269227    kwargs['output_dir']=project.output_run_time_dir
    270228    kwargs['elevation_file']=project.combined_dir_name+'.pts'
    271     kwargs['file_name']=project.home+'detail.csv'
    272     kwargs['aa_scenario_name']=project.scenario_name
    273     kwargs['ab_time']=project.time
    274     kwargs['res_factor']= project.res_factor
     229    kwargs['scenario_name']=project.scenario_name
    275230    kwargs['tide']=project.tide
    276     kwargs['user']=project.user
    277231    kwargs['alpha'] = project.alpha
    278232    kwargs['friction']=project.friction
    279     kwargs['time_thinning'] = project.time_thinning
    280     kwargs['dir_comment']=project.dir_comment
    281     kwargs['export_cellsize']=project.export_cellsize
    282    
    283 
     233    #kwargs['num_cpu']=numprocs
     234    #kwargs['host']=project.host
     235    #kwargs['starttime']=project.starttime
     236    #kwargs['yieldstep']=project.yieldstep
     237    #kwargs['user']=project.user
     238    #kwargs['time_thinning'] = project.time_thinning
     239     
    284240    run_model(**kwargs)
    285241     
    286        #barrier
     242   
Note: See TracChangeset for help on using the changeset viewer.