Changeset 7678


Ignore:
Timestamp:
Apr 9, 2010, 3:16:39 PM (15 years ago)
Author:
fountain
Message:

updates to bunbury storm surge model

Location:
branches/anuga_1_1/anuga_work/production/bunbury_storm_surge_2009
Files:
2 added
5 edited

Legend:

Unmodified
Added
Removed
  • branches/anuga_1_1/anuga_work/production/bunbury_storm_surge_2009/Arc_asc2raster_GDA94z50.py

    r7613 r7678  
    2323
    2424scenario_dir="\\\\nas2\\gemd\\georisk_models\\inundation\\data\\western_australia\\bunbury_storm_surge_scenario_2009\\"
     25#scenario_dir = "C:\\WorkSpace\\data\\bunbury_storm_surge_scenario_2009\\"
    2526output_dir = "anuga\\outputs\\"
    2627
    27 time_dir1 = '20100103_102505_run_storm_surge_final_0_alby_coarse_lfountai'
     28time_dir1 = '20100311_181323_run_storm_surge_final_0_alby_coarse_fountl' #No storm gate
     29time_dir2 = '20100312_132550_run_storm_surge_final_0_alby_coarse_fountl' #Closed storm gate
    2830
    2931
    30 time_dirs = [time_dir1] ##, time_dir2, time_dir3, time_dir4, time_dir5,
     32time_dirs = [time_dir1, time_dir2] ##, time_dir2, time_dir3, time_dir4, time_dir5,
    3133             ##time_dir6, time_dir7, time_dir8, time_dir9, time_dir10]
    3234
     
    3638    folder =  scenario_dir + output_dir + time_dir + '\\'
    3739    raster_gbd = folder + 'raster.gdb'
    38 ##    elevation_gdb = folder + 'elevation.gdb'
    39     #contour = raster_gbd + '\\contour_dep'
    40     land = scenario_dir + "ArcGIS\\data.gdb\\initial_conditions_10m"
    41 ##    ocean = scenario_dir + "map_work\\Perth.gdb\\Outlines\\initial_condition_ocean"
     40    land = scenario_dir + "ArcGIS\\data.gdb\\initial_conditions_10m_clip"
     41    ocean = scenario_dir + "ArcGIS\\data.gdb\\initial_conditions_ocean"
    4242   
    4343    print 'Process: Create File GDB'
    44 ##    gp.createFileGDB_management(folder, "elevation")
    45 ##    gp.CreateFileGDB_management(folder, "raster")
     44    # gp.CreateFileGDB_management(folder, "raster")
    4645
    4746    gp.Workspace = raster_gbd
    48 ##    gp.Workspace = elevation_gdb
    4947
    5048    print gp.Workspace
     
    5250    #replication dictionary
    5351    replicate = (('gold_coast', ''), ('_time_29220_0', 'b'), ('_time_58440_0', 'c'),
    54                  ('_time_28860_0', 'b'), ('_time_57720_0', 'c'),
    55                  ('_', ''), ('max','M_'), ('depth','_dep_'),
     52                 ('_time_28860_0', 'b'), ('_time_57720_0', 'c'), ('storm_gate', 'gate'),
     53                 ('_', ''), ('max','M_'), ('depth','_dep_'), ('albycoarse', ''),
    5654                 ('speed', '_spe_'), ('elevation', '_ele_'), ('stage','_stage'))
    5755
    5856    generate_filename = []
    59 ##    input_ascii = glob.glob(folder + '*elevation*.asc')
    60     input_ascii = glob.glob(folder + '*depth*.asc')
     57    # input_ascii = glob.glob(folder + '*elevation*.asc')
     58    # input_ascii = glob.glob(folder + '*speed*.asc')
     59    # input_ascii = glob.glob(folder + '*depth*.asc')
     60    input_ascii = glob.glob(folder + '*stage*.asc')
     61   
    6162    print time_dir
    6263
     
    8687   
    8788        print 'Process: Extract by Mask'
    88         gp.ExtractByMask_sa(output_DEM, land, output_extract)
     89        # gp.ExtractByMask_sa(output_DEM, land, output_extract)
     90        # gp.ExtractByMask_sa(output_DEM, ocean, output_extract)
    8991
    9092       
  • branches/anuga_1_1/anuga_work/production/bunbury_storm_surge_2009/build_elevation.py

    r7658 r7678  
    9494G9 = geospatial_data[keylist[8]]
    9595G10 = geospatial_data[keylist[9]]
    96 #G11 = geospatial_data[keylist[10]]
     96G11 = geospatial_data[keylist[10]]
     97G12 = geospatial_data[keylist[11]]
     98G13 = geospatial_data[keylist[12]]
    9799
    98100
     
    120122#~ print 'G10', G10.attributes
    121123
    122 G = G1 + G2 + G3 + G4 + G5 + G6 + G7 + G8 + G9 + G10 #+ G11
     124G = G1 + G2 + G3 + G4 + G5 + G6 + G7 + G8 + G9 + G10 + G11 + G12 + G13
    123125
    124126# G = None
     
    133135
    134136print 'Export combined DEM file'
    135 G.export_points_file(project.combined_elevation + '.pts')
     137G.export_points_file(project.combined_elevation + '_GEMS.pts')
    136138
    137139print 'Do txt version too'
     
    141143        #~ g.export_points_file(join(project.topographies_folder, (str(keylist[i]) +'_export.txt')))
    142144
    143 G.export_points_file(project.combined_elevation + '.txt')
     145G.export_points_file(project.combined_elevation + '_GEMS.txt')
    144146
    145147print 'Export individual text files'
     
    149151G1.export_points_file(join(project.topographies_folder, keylist[0] +'_export.txt'))
    150152G2.export_points_file(join(project.topographies_folder, keylist[1] +'_export.txt'))
    151 G3.export_points_file(join(project.topographies_folder, keylist[2] +'_export.txt'))
    152 G4.export_points_file(join(project.topographies_folder, keylist[3] +'_export.txt'))
    153 G5.export_points_file(join(project.topographies_folder, keylist[4] +'_export.txt'))
    154 G6.export_points_file(join(project.topographies_folder, keylist[5] +'_export.txt'))
    155 G7.export_points_file(join(project.topographies_folder, keylist[6] +'_export.txt'))
    156 G8.export_points_file(join(project.topographies_folder, keylist[7] +'_export.txt'))
    157 G9.export_points_file(join(project.topographies_folder, keylist[8] +'_export.txt'))
    158 G10.export_points_file(join(project.topographies_folder, keylist[9] +'_export.txt'))
    159 #G11.export_points_file(join(project.topographies_folder, keylist[10] +'_export.txt'))
     153G3.export_points_file(join(project.topographies_folder, keylist[2][:-4] +'_export.txt'))
     154G4.export_points_file(join(project.topographies_folder, keylist[3][:-4] +'_export.txt'))
     155G5.export_points_file(join(project.topographies_folder, keylist[4][:-4] +'_export.txt'))
     156G6.export_points_file(join(project.topographies_folder, keylist[5][:-4] +'_export.txt'))
     157G7.export_points_file(join(project.topographies_folder, keylist[6][:-4] +'_export.txt'))
     158G8.export_points_file(join(project.topographies_folder, keylist[7][:-4] +'_export.txt'))
     159G9.export_points_file(join(project.topographies_folder, keylist[8][:-4] +'_export.txt'))
     160G10.export_points_file(join(project.topographies_folder, keylist[9][:-4] +'_export.txt'))
     161G11.export_points_file(join(project.topographies_folder, keylist[10][:-4] +'_export.txt'))
     162G12.export_points_file(join(project.topographies_folder, keylist[11][:-4] +'_export.txt'))
     163G13.export_points_file(join(project.topographies_folder, keylist[12][:-4] +'_export.txt'))
  • branches/anuga_1_1/anuga_work/production/bunbury_storm_surge_2009/export_results_max.py

    r7613 r7678  
    2121
    2222directory = project.output_folder
    23 time_dir1 = '20100103_102505_run_storm_surge_final_0_alby_coarse_lfountai'
     23time_dir1 = '20100311_181323_run_storm_surge_final_0_alby_coarse_fountl' #No storm gate
     24#time_dir2 = '20100312_132550_run_storm_surge_final_0_alby_coarse_fountl' #Closed storm gate
    2425
    25 time_dirs = [time_dir1]
     26time_dirs = [time_dir1] #, time_dir2]
    2627
    2728# sww filename extensions ie. hobart_time_17640_0.sww, input into list 17640
     
    3132#Modify the cellsize value to set the size of the raster you require
    3233#Take into account mesh size when aplying this paramater
    33 cellsize = 20 #250
     34cellsize = 0.5 #10
    3435
    3536#Now set the timestep at which you want the raster generated.
    3637#Either set the actual timestep required or use 'None' to indicate that
    3738#you want the maximum values in the raster over all timesteps
    38 timestep = 0
     39timestep = None
    3940
    4041# Set the special areas of interest.  If none, do: area=['All']
     
    4243
    4344#area = ['South', 'NW', 'Hobart']
    44 area = ['All']     
     45area =  ['Storm_gate'] #['All'] #   
    4546
    4647# one or more key strings from var_equations below
    47 var = ['elevation']
     48var = ['depth', 'stage', 'speed']
    4849
    4950######
     
    100101                sww2dem(name, basename_out = outname,
    101102                            quantity = quantityname,
    102                             timestep = timestep,
     103                            reduction = timestep,
    103104                            cellsize = cellsize,     
    104105                            easting_min = easting_min,
     
    106107                            northing_min = northing_min,
    107108                            northing_max = northing_max,       
    108                             reduction = max,
    109109                            verbose = True,
    110110                            format = 'asc')
  • branches/anuga_1_1/anuga_work/production/bunbury_storm_surge_2009/project.py

    r7658 r7678  
    3838friction = 0.01           # manning's friction coefficient
    3939starttime = 86400          # start time for simulation - -equivalent to 0000h 4 April 1978
    40 finaltime = 172800         # final time for simulation - 24 hours for TC Alby
     40duration = 86400
     41#finaltime = 172800         # final time for simulation - 24 hours for TC Alby
    4142
    4243setup = 'storm_surge_final'      # This can be one of four values
     
    8384# Format for point is x,y,elevation (with header)
    8485point_filenames = ['DPI.txt',                                                   # Bathymetry data from DPI
    85                    'Busselton_Chart_Clip_ss.txt',               # Clipped from Busselton_Chart  - see Busselton Tsunami Scenario 2009
    86                    'Busselton_NavyFinal_Clip_ss.txt',   # Clipped from Busselton_NavyFinal - see Busselton Tsunami Scenario 2009
    87                    'DPI5U1A02_01a_edited_v2.txt',          # Bathymetric LiDAR from DPI - split into manageable pieces and edited so
    88                    'DPI5U1A02_01b_edited_v2.txt',               # depths below 0 m are negative, and all soundings on land (ie positive)
    89                    'DPI5U1A02_01c_edited_v2.txt',          # are removed as these are not corrected to "bare earth".
     86                   'Busselton_Chart.txt',                       # see Busselton Tsunami Scenario 2009
     87                   'Busselton_NavyFinal.txt',           # see Busselton Tsunami Scenario 2009
     88                   'Busselton_250m.txt',                # see Busselton Tsunami Scenario 2009
     89                   'DPI5U1A02_01a_edited_v2.txt',       # Bathymetric LiDAR from DPI - split into manageable pieces and edited so
     90                   'DPI5U1A02_01b_edited_v2.txt',           # depths below 0 m are negative, and all soundings on land (ie positive)
     91                   'DPI5U1A02_01c_edited_v2.txt',       # are removed as these are not corrected to "bare earth".
    9092                   'DPI5U1A02_01d_edited_v2.txt',
    91                    'DPI5U1A02_01e_edited_v2.txt']
    92 #                   'Leschenault_TIN.txt']               # TIN created over the Leschenault Estuary and Inlet]                                 
     93                   'DPI5U1A02_01e_edited_v2.txt',
     94                   'Bunbury_2010_shoalest.csv',         # Multibeam infill for areas not captured in the Bathy LiDAR
     95                   # 'bu0506hy_cut.csv',                  # Hydro survey of Leschenault Inlet and Estuary, extra colums cut
     96                   'Leschenault_TIN.txt']               # TIN created over the Leschenault Estuary and Inlet]                                   
    9397
    9498# BOUNDING POLYGON - for data clipping and estimate of triangles in mesh
    9599# Used in build_elevation.py & run_model.py
    96100# Format for points easting,northing (no header)
    97 bounding_polygon_filename = 'bounding_polygon_ss.csv'
     101bounding_polygon_filename = 'bounding_polygon_GEMS.csv' #'bounding_polygon_ss.csv'
    98102bounding_polygon_maxarea = 50000
     103
    99104
    100105# INTERIOR REGIONS -  for designing the mesh
     
    102107# Format for points easting,northing (no header)                   
    103108interior_regions_data = [['intermediate.csv', 2500],
    104                          ['area_of_interest.csv', 100],
    105                          ['storm_gate_area.csv', 1],
    106                          ['stormgates.csv', 1]]
     109                         #['aoi_small.csv',200],
     110                         ['area_of_interest.csv', 200],
     111                         ['storm_gate_area.csv', 20]]
     112                         # ['stormgates.csv', 1]]
    107113
    108114# LAND - used to set the initial stage/water to be offcoast only
     
    110116land_initial_conditions_filename = [['initial_conditions.csv', 0]]
    111117
    112 # GEMS order filename
    113 # Format is index,northing, easting, elevation (without header)
     118# GEMS order filename - should be in same direction as landward boundary points ie clockwise or anti-clockwise
     119# Format is northing, easting (without header)
    114120gems_order_filename = 'gems_boundary_order_thinned.csv'
    115121
     
    117123# Format is as for a building file to be read by csv2building_polygons,
    118124# easting, northing, id, floors (with header)
    119 storm_gate_filename = 'storm_gates.csv'
     125storm_gate_filename = 'storm_gates_large.csv' #'storm_gates.csv'
    120126
    121127# GAUGES - for creating timeseries at a specific point
     
    130136
    131137# Landward bounding points
    132 # Format easting,northing (no header)
     138# Format easting,northing (no header) - should be in same direction as GEMS order filename ie clockwise or anti-clockwise
    133139landward_boundary_filename = 'landward_boundary.csv'
    134140
     
    258264##mux_input = join(event_folder, mux_input_filename)
    259265
    260 
     266# Areas for export of results
     267# Used in export_results_max.py
     268
     269# Storm gate area:
     270xminStorm_gate=373515
     271xmaxStorm_gate=373582
     272yminStorm_gate=6312314
     273ymaxStorm_gate=6312358
  • branches/anuga_1_1/anuga_work/production/bunbury_storm_surge_2009/run_model.py

    r7658 r7678  
    3838from anuga.interface import Field_boundary
    3939from anuga.interface import create_sts_boundary
    40 ##from anuga.interface import csv2building_polygons
     40from anuga.interface import csv2building_polygons
    4141from file_length import file_length
    4242
     
    4545##from anuga.shallow_water.data_manager import urs2sts
    4646from anuga.utilities.polygon import read_polygon, Polygon_function
     47from anuga.utilities.system_tools import get_revision_number
    4748
    4849# Application specific imports
     
    6162start_screen_catcher(project.output_run, 0, 1)
    6263
     64print 'SVN revision number: ', get_revision_number()
     65
    6366#-------------------------------------------------------------------------------
    6467# Create the computational domain based on overall clipping polygon with
     
    6871
    6972print 'Create computational domain'
    70 
    71 ### Create the STS file
    72 ##print 'project.mux_data_folder=%s' % project.mux_data_folder
    73 ##if not os.path.exists(project.event_sts + '.sts'):
    74 ##    bub.build_urs_boundary(project.mux_input_filename, project.event_sts)
    75 
    76 ### Read in boundary from ordered sts file
    77 ##gems_boundary = create_sts_boundary(project.event_sts)
    7873
    7974#reading the GEMS boundary points
     
    139134# Add storm surge gate
    140135#--------------------------
    141 storm_gate_polygon, storm_gate_height = csv2building_polygons(project.storm_gate, floor_height=4)
     136storm_gate_polygon, storm_gate_height = csv2building_polygons(project.storm_gate, floor_height=4.4)
    142137
    143138gate = []
     
    179174t0 = time.time()
    180175for t in domain.evolve(yieldstep=project.yieldstep,
    181                        finaltime=project.finaltime,
     176                       duration=project.duration,
    182177                       skip_initial_step=False):
    183178    print domain.timestepping_statistics()
Note: See TracChangeset for help on using the changeset viewer.