source: production/sydney_2006/run_sydney_smf_polyingo.py @ 3347

Last change on this file since 3347 was 3190, checked in by sexton, 18 years ago

MOST and ANUGA comparisons and updates

File size: 12.0 KB
RevLine 
[2487]1"""Script for running a tsunami inundation scenario for Sydney, NSW, Australia.
2
3Source data such as elevation and boundary data is assumed to be available in
4directories specified by project.py
5The output sww file is stored in project.outputdir
6
7The scenario is defined by a triangular mesh created from project.polygon,
8the elevation data and a simulated submarine landslide.
9
10Ole Nielsen and Duncan Gray, GA - 2005 and Adrian Hitchman and Jane Sexton, GA - 2006
11"""
12
13
14#-------------------------------------------------------------------------------# Import necessary modules
15#-------------------------------------------------------------------------------
16
17# Standard modules
18import os
19import time
20
21# Related major packages
[3190]22from pyvolution.shallow_water import Domain, Reflective_boundary, Time_boundary, Dirichlet_boundary
[2487]23from pyvolution.data_manager import convert_dem_from_ascii2netcdf, dem2pts
24from pyvolution.combine_pts import combine_rectangular_points_files
25from pyvolution.pmesh2domain import pmesh_to_domain_instance
26from pyvolution.quantity import Quantity
27from geospatial_data import *
[3190]28from utilities.polygon import inside_polygon
29from Numeric import allclose
[2487]30
31# Application specific imports
32import project                 # Definition of file names and polygons
33from smf import slump_tsunami  # Function for submarine mudslide
34
35
36#-------------------------------------------------------------------------------
37# Preparation of topographic data
38#
39# Convert ASC 2 DEM 2 PTS using source data and store result in source data
40# Do for coarse and fine data
41# Fine pts file to be clipped to area of interest
42#-------------------------------------------------------------------------------
43# filenames
44coarsedemname = project.coarsedemname
45finedemname = project.finedemname
46
[2732]47
[2487]48# coarse data
49convert_dem_from_ascii2netcdf(coarsedemname, use_cache=True, verbose=True)
50#dem2pts(coarsedemname, use_cache=True, verbose=True)
51dem2pts(coarsedemname,
52        easting_min=project.eastingmin,
53        easting_max=project.eastingmax,
54        northing_min=project.northingmin,
55        northing_max= project.northingmax,
56        use_cache=True, 
57        verbose=True)
58
59# fine data (clipping the points file to smaller area)
60convert_dem_from_ascii2netcdf(finedemname, use_cache=True, verbose=True)
61dem2pts(finedemname,
62        easting_min=project.eastingmin,
63        easting_max=project.eastingmax,
64        northing_min=project.northingmin,
65        northing_max= project.northingmax,
66        use_cache=True, 
67        verbose=True)
68
69
70# combining the coarse and fine data
71combine_rectangular_points_files(project.finedemname + '.pts',
72                                 project.coarsedemname + '.pts',
73                                 project.combineddemname + '.pts')
74
75#-------------------------------------------------------------------------------                                 
76# Create the triangular mesh based on overall clipping polygon with a tagged
77# boundary and interior regions defined in project.py along with
78# resolutions (maximal area of per triangle) for each polygon
79#-------------------------------------------------------------------------------
80#from pmesh.create_mesh import create_mesh_from_regions
81from pmesh.mesh_interface import create_mesh_from_regions
82
[2732]83meshname = project.meshname4+'.msh'
84
[3190]85interior_res1 = 1000
[2732]86interior_res2 = 1315
[2487]87
88# Read in files containing polygon points (generated by GIS)
89# and construct list of interior polygons into interior_regions.
90
91def get_polygon_from_file(filename):
92    """ Function to read in output from GIS determined polygon
93    """
94    fid = open(filename)
95    lines = fid.readlines()
96    fid.close()
97   
98    polygon = []
99    for line in lines[1:]:
100       fields = line.split(',')
101       x = float(fields[1])
102       y = float(fields[2])
103       polygon.append([x, y])
104       
105    return polygon
106
[3190]107num_polygons = 4
[2487]108fileext = '.csv'
109filename = project.polygonptsfile
110
111interior_regions = []
[3190]112bounding_polygon = project.diffpolygonall
113count = 0
[2487]114for p in range(3, num_polygons+1):
115    thefilename = filename + str(p) + fileext
116    print 'reading in polygon points', thefilename
117    interior_polygon = get_polygon_from_file(thefilename)
118    interior_regions.append([interior_polygon, interior_res1])
[3190]119    n = len(interior_polygon)
120    # check interior polygon falls in bounding polygon
121    if len(inside_polygon(interior_polygon, bounding_polygon,
122                   closed = True, verbose = False)) <> len(interior_polygon):
123        print 'WARNING: interior polygon %d is outside bounding polygon' %(p)
124        count += 1
125    # check for duplicate points in interior polygon
126   
[2487]127print 'number of interior polygons: ', len(interior_regions)
[3190]128if count == 0: print 'interior polygons OK'
[2487]129
130# original
131#interior_regions = []
132#interior_regions = [[project.harbour_polygon_2, interior_res1],
[2732]133#                    [project.botanybay_polygon_2, interior_res1]]
[2487]134
[2732]135# finer set
136#interior_regions = [[project.newpoly1, interior_res1],
137#                    #[project.parrariver, interior_res1], #remove for second run
138#                    [project.south1, interior_res1],
139#                    [project.finepolymanly, interior_res2],
140#                    [project.finepolyquay, interior_res2]]
141
[3190]142
[2487]143#FIXME: Fix caching of this one once the mesh_interface is ready
144from caching import cache
145
[3190]146
[2732]147#_ = cache(create_mesh_from_regions,
148#          project.diffpolygonall,
149#          {'boundary_tags': {'bottom': [0],
150#                             'right1': [1], 'right0': [2],
151#                             'right2': [3], 'top': [4], 'left1': [5],
152#                             'left2': [6], 'left3': [7]},
[3190]153#           'maximum_triangle_area': 150000,
[2732]154#           'filename': meshname,           
155#           'interior_regions': interior_regions},
156#          verbose = True)
157
[3190]158# for demo construction
[2487]159_ = cache(create_mesh_from_regions,
[3190]160          project.demopoly,
[2732]161          {'boundary_tags': {'bottom': [0], 'right': [1],
162                             'top': [2], 'left': [3]},
163           'maximum_triangle_area': 100000,
[2487]164           'filename': meshname,           
165           'interior_regions': interior_regions},
166          verbose = True)
167
168#mesh_geo_reference:
169#-------------------------------------------------------------------------------                                 
170# Setup computational domain
171#-------------------------------------------------------------------------------                                 
172
173domain = pmesh_to_domain_instance(meshname, Domain,
174                                  use_cache = True,
175                                  verbose = True)
176
177print 'Number of triangles = ', len(domain)
178print 'The extent is ', domain.get_extent()
[2732]179print domain.statistics()
[2487]180
181domain.set_name(project.basename4)
182domain.set_datadir(project.outputdir)
183domain.set_quantities_to_be_stored(['stage', 'xmomentum', 'ymomentum'])
184
185
186#-------------------------------------------------------------------------------
187# Set up scenario (tsunami_source is a callable object used with set_quantity)
188#-------------------------------------------------------------------------------
189
190tsunami_source = slump_tsunami(length=30000.0,
191                               depth=400.0,
192                               slope=6.0,
193                               thickness=176.0, 
194                               radius=3330,
195                               dphi=0.23,
196                               x0=project.slump_origin[0], 
197                               y0=project.slump_origin[1], 
198                               alpha=0.0, 
199                               domain=domain)
200
201#-------------------------------------------------------------------------------                                 
202# Setup initial conditions
203#-------------------------------------------------------------------------------
204
205G = Geospatial_data(project.test_pts, project.test_elev) #points, attributes
[2732]206domain.set_quantity('stage', 0) #tsunami_source)
[2487]207domain.set_quantity('friction', 0.0) # supplied by Benfield, initial value 0.03
[3190]208domain.set_quantity('elevation', 
[2732]209                    filename = project.coarsedemname + '.pts',
[3190]210                    use_cache = True,
[2487]211                    verbose = True)
212
[2732]213from pyvolution.least_squares import fit_to_mesh_file, DEFAULT_ALPHA
[2487]214
[2732]215#Add elevation data to the mesh
216#fit_to_mesh_file(mesh_file, point_file, mesh_output_file, DEFAULT_ALPHA,
217#                 verbose=True, expand_search=True, precrop=True)
218#pointname = project.coarsedemname + '.pts'
219#mesh_elevname = project.meshelevname
220#cache(fit_to_mesh_file,(meshname,
221#                 pointname,
222#                 mesh_elevname,
223#                 DEFAULT_ALPHA),
224#      {'verbose': True,
225#       'expand_search': True,
226#       'precrop': True}
227#      ,dependencies = [meshname, pointname]
228#      #,evaluate = True     
229#      ,verbose = False
230#      )
[2487]231
232#-------------------------------------------------------------------------------                                 
233# Setup boundary conditions (all reflective)
234#-------------------------------------------------------------------------------
235
236print 'Available boundary tags', domain.get_boundary_tags()
237
238Br = Reflective_boundary(domain)
[3190]239Bd = Dirichlet_boundary([0, 0, 0])
[2732]240
[3190]241# for demo
242domain.set_boundary( {'bottom': Bd, 'right': Bd, 'left': Bd, 'top': Bd} )
[2487]243#domain.set_boundary( {'bottom': Br, 'right1': Br, 'right0': Br,
244#                      'right2': Br, 'top': Br, 'left1': Br,
245#                      'left2': Br, 'left3': Br} )
246
247
248#-------------------------------------------------------------------------------                                 
249# Evolve system through time
250#-------------------------------------------------------------------------------
251
252import time
253t0 = time.time()
254thisfile = project.integraltimeseries+'.csv'
255fid = open(thisfile, 'w')
256   
[3190]257for t in domain.evolve(yieldstep = 10, finaltime = 600):
[2487]258    domain.write_time()
259    domain.write_boundary_statistics(tags = 'bottom')
[2732]260    stagestep = domain.get_quantity('stage') 
261    s = '%.2f, %.2f\n' %(t, stagestep.get_integral())
[2487]262    fid.write(s)
[3190]263    if allclose(t, 30):
264        slump = Quantity(domain)
265        slump.set_values(tsunami_source)
266        domain.set_quantity('stage', slump + stagestep)
[2487]267
[3190]268# save every two minutes leading up to interesting period
269for t in domain.evolve(yieldstep = 120, finaltime = 660, # steps
270                       skip_initial_step = True): 
271    domain.write_time()
272    domain.write_boundary_statistics(tags = 'bottom')
273    # calculate integral
274    thisstagestep = domain.get_quantity('stage') 
275    s = '%.2f, %.2f\n' %(t, thisstagestep.get_integral())
276    fid.write(s)
277   
278# save every thirty secs during interesting period
279for t in domain.evolve(yieldstep = 30, finaltime = 3500, # steps
280                       skip_initial_step = True):
281    domain.write_time()
282    domain.write_boundary_statistics(tags = 'bottom') #quantities = 'stage')
283    # calculate integral
284    thisstagestep = domain.get_quantity('stage') 
285    s = '%.2f, %.2f\n' %(t, thisstagestep.get_integral())
286    fid.write(s)
287
288# save every two mins for next 5000 secs
289for t in domain.evolve(yieldstep = 120, finaltime = 10000, # about 42 steps
290                       skip_initial_step = True):
291    domain.write_time()
292    domain.write_boundary_statistics(tags = 'bottom') #quantities = 'stage')
293    # calculate integral
294    thisstagestep = domain.get_quantity('stage') 
295    s = '%.2f, %.2f\n' %(t, thisstagestep.get_integral())
296    fid.write(s)
297   
298# save every half hour to end of simulation   
299for t in domain.evolve(yieldstep = 1800, finaltime = 10*60*60, # 14 steps
300                       skip_initial_step = True):
301    domain.write_time()
302    domain.write_boundary_statistics(tags = 'bottom') #quantities = 'stage'
303    # calculate integral
304    thisstagestep = domain.get_quantity('stage') 
305    s = '%.2f, %.2f\n' %(t, thisstagestep.get_integral())
306    fid.write(s)
307
[2487]308print 'That took %.2f seconds' %(time.time()-t0)
309
310
311
Note: See TracBrowser for help on using the repository browser.