source: production/onslow_2006/run_onslow_new.py @ 3514

Last change on this file since 3514 was 3514, checked in by duncan, 18 years ago

Hi all,
I'm doing a change in the anuga structure, moving the code to

\anuga_core\source\anuga

After you have done an svn update, the PYTHONPATH has to be changed to;
PYTHONPATH = anuga_core/source/

This is part of changes required to make installation of anuga quicker and reducing the size of our sandpits.

If any imports are broken, try fixing them. With adding anuga. to them for example. If this seems to have really broken things, email/phone me.

Cheers
Duncan

File size: 8.7 KB
Line 
1"""Script for running a tsunami inundation scenario for Onslow, WA, Australia.
2
3Source data such as elevation and boundary data is assumed to be available in
4directories specified by project.py
5The output sww file is stored in project.outputdir
6
7The scenario is defined by a triangular mesh created from project.polygon,
8the elevation data and a simulated submarine landslide.
9
10Ole Nielsen and Duncan Gray, GA - 2005 and Nick Bartzis, GA - 2006
11"""
12
13
14#-------------------------------------------------------------------------------# Import necessary modules
15#-------------------------------------------------------------------------------
16
17# Standard modules
18import os
19import time
20
21# Related major packages
22from anuga.pyvolution.shallow_water import Domain, Reflective_boundary, \
23                            Dirichlet_boundary, Time_boundary, File_boundary
24from anuga.pyvolution.data_manager import convert_dem_from_ascii2netcdf, dem2pts
25from anuga.pyvolution.combine_pts import combine_rectangular_points_files
26from anuga.pyvolution.pmesh2domain import pmesh_to_domain_instance
27from anuga.geospatial_data.geospatial_data import add_points_files
28
29# Application specific imports
30import project                 # Definition of file names and polygons
31from smf import slump_tsunami  # Function for submarine mudslide
32
33from shutil import copy
34from os import mkdir, access, F_OK
35
36from anuga.geospatial_data.geospatial_data import *
37
38#-------------------------------------------------------------------------------
39# Preparation of topographic data
40#
41# Convert ASC 2 DEM 2 PTS using source data and store result in source data
42# Do for coarse and fine data
43# Fine pts file to be clipped to area of interest
44#-------------------------------------------------------------------------------
45
46# filenames
47coarsedemname = project.coarsedemname
48
49onshore_dem_name = project.onshore_dem_name
50
51offshore_points = project.offshore_dem_name
52
53meshname = project.meshname+'.msh'
54
55source_dir = project.boundarydir
56
57# creates copy of code in output dir
58if access(project.outputdir,F_OK) == 0 :
59    mkdir (project.outputdir)
60copy (project.codedirname, project.outputdir + project.codename)
61copy (project.codedir + 'run_onslow.py', project.outputdir + 'run_onslow.py')
62
63
64'''
65# coarse data
66convert_dem_from_ascii2netcdf(coarsedemname, use_cache=True, verbose=True)
67dem2pts(coarsedemname, use_cache=True, verbose=True)
68
69
70# fine data (clipping the points file to smaller area)
71convert_dem_from_ascii2netcdf(onshore_dem_name, use_cache=True, verbose=True)
72dem2pts(onshore_dem_name,
73        easting_min=project.eastingmin,
74        easting_max=project.eastingmax,
75        northing_min=project.northingmin,
76        northing_max= project.northingmax,
77        use_cache=True,
78        verbose=True)
79
80        '''
81print'create G1'
82G1 = Geospatial_data(file_name = project.offshore_dem_name + '.xya')
83
84print'create G2'
85G2 = Geospatial_data(file_name = project.onshore_dem_name + '.pts')
86
87print'add G1+G2'
88G = G1 + G2
89
90print'export G'
91G.new_export_points_file(project.combined_dem_name + '.pts')
92
93'''
94add_points_files(
95                  project.offshore_dem_name + '.xya',
96                  project.onshore_dem_name + '.pts',
97                  project.combined_dem_name + '.pts')
98'''
99#-------------------------------------------------------------------------------                                 
100# Create the triangular mesh based on overall clipping polygon with a tagged
101# boundary and interior regions defined in project.py along with
102# resolutions (maximal area of per triangle) for each polygon
103#-------------------------------------------------------------------------------
104
105from pmesh.mesh_interface import create_mesh_from_regions
106
107# original
108interior_res = 5000
109interior_regions = [[project.poly_onslow, interior_res],
110                    [project.poly_thevenard, interior_res],
111                    [project.poly_direction, interior_res]]
112                    #[project.testpoly, interior_res]]
113print 'number of interior regions', len(interior_regions)
114
115from caching import cache
116_ = cache(create_mesh_from_regions,
117          project.polyAll,
118          {'boundary_tags': {'top': [0], 'topleft': [1],
119                             'left': [2], 'bottom': [3],
120                             'bottomright': [4], 'topright': [5]},
121           'maximum_triangle_area': 100000,
122           'filename': meshname,           
123           'interior_regions': interior_regions},
124          verbose = True)
125
126
127#-------------------------------------------------------------------------------                                 
128# Setup computational domain
129#-------------------------------------------------------------------------------                                 
130
131domain = pmesh_to_domain_instance(meshname, Domain,
132                                  use_cache = True,
133                                  verbose = True)
134
135print 'Number of triangles = ', len(domain)
136print 'The extent is ', domain.get_extent()
137print domain.statistics()
138
139domain.set_name(project.basename)
140domain.set_datadir(project.outputdir)
141domain.set_quantities_to_be_stored(['stage'])
142
143
144#-------------------------------------------------------------------------------
145# Set up scenario (tsunami_source is a callable object used with set_quantity)
146#-------------------------------------------------------------------------------
147'''
148tsunami_source = slump_tsunami(length=30000.0,
149                               depth=400.0,
150                               slope=6.0,
151                               thickness=176.0,
152                               radius=3330,
153                               dphi=0.23,
154                               x0=project.slump_origin[0],
155                               y0=project.slump_origin[1],
156                               alpha=0.0,
157                               domain=domain)
158
159'''
160#-------------------------------------------------------------------------------                                 
161# Setup initial conditions
162#-------------------------------------------------------------------------------
163
164tide = 0.
165
166domain.set_quantity('stage', tide)
167domain.set_quantity('friction', 0.0) 
168print 'hi and file',project.combined_dem_name + '.pts'
169domain.set_quantity('elevation', 
170#                    0.
171#                    filename = project.onshore_dem_name + '.pts',
172                    filename = project.combined_dem_name + '.pts',
173#                    filename = project.offshore_dem_name + '.pts',
174                    use_cache = False,
175                    verbose = True,
176                    alpha = 0.1
177                    )
178print 'hi1'
179
180#-------------------------------------------------------------------------------                                 
181# Setup boundary conditions (all reflective)
182#-------------------------------------------------------------------------------
183
184from anuga.pyvolution.data_manager import ferret2sww
185
186south = project.south
187north = project.north
188west = project.west
189east = project.east
190
191cache(ferret2sww,
192      (source_dir + project.boundary_basename,
193       source_dir + project.boundary_basename), 
194      {'verbose': True,
195# note didn't work with the below
196#       'minlat': south - 1,
197#       'maxlat': north + 1,
198#       'minlon': west - 1,
199#       'maxlon': east + 1,
200       'minlat': south,
201       'maxlat': north,
202       'minlon': west,
203       'maxlon': east,
204#       'origin': project.mesh_origin,
205       'origin': domain.geo_reference.get_origin(),
206       'mean_stage': tide,
207       'zscale': 1,                 #Enhance tsunami
208       'fail_on_NaN': False,
209       'inverted_bathymetry': True},
210      #evaluate = True,
211       verbose = True)
212
213
214print 'Available boundary tags', domain.get_boundary_tags()
215
216Bf = File_boundary(source_dir + project.boundary_basename + '.sww', 
217                    domain, verbose = True)
218Br = Reflective_boundary(domain)
219Bd = Dirichlet_boundary([tide,0,0])
220
221
222# 7 min square wave starting at 1 min, 6m high
223Bw = Time_boundary(domain = domain,
224                   f=lambda t: [(60<t<480)*6, 0, 0])
225
226domain.set_boundary( {'top': Bf, 'topleft': Bf,
227                             'left': Br, 'bottom': Br,
228                             'bottomright': Br, 'topright': Bf} )
229
230
231#-------------------------------------------------------------------------------                                 
232# Evolve system through time
233#-------------------------------------------------------------------------------
234import time
235t0 = time.time()
236
237for t in domain.evolve(yieldstep = 50, finaltime = 100): 
238    domain.write_time()
239    domain.write_boundary_statistics(tags = 'top')     
240   
241print 'That took %.2f seconds' %(time.time()-t0)
242
243print 'finished'
Note: See TracBrowser for help on using the repository browser.