Changeset 3788 for anuga_work/production/broome_2006/project.py
- Timestamp:
- Oct 16, 2006, 2:12:04 PM (17 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
anuga_work/production/broome_2006/project.py
r3769 r3788 1 # -*- coding: cp1252 -*- 1 2 """Common filenames and locations for topographic data, meshes and outputs. 2 3 """ 3 4 4 5 from os import sep, environ, getenv, getcwd 5 from os.path import expanduser, basename 6 #from anuga.utilities.polygon import read_polygon 6 from os.path import expanduser 7 7 import sys 8 from anuga.pmesh.create_mesh import convert_from_latlon_to_utm 9 from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees 10 from time import localtime, strftime 11 from anuga.geospatial_data.geospatial_data import * 8 from time import localtime, strftime, gmtime 9 from anuga.utilities.polygon import read_polygon, plot_polygons, polygon_area, is_inside_polygon 10 from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees, convert_points_from_latlon_to_utm 11 12 if sys.platform == 'win32': 13 home = getenv('INUNDATIONHOME') 14 user = getenv('USERPROFILE') 15 16 else: 17 home = getenv('INUNDATIONHOME', sep+'d'+sep+'xrd'+sep+'gem'+sep+'2'+sep+'ramp'+sep+'risk_assessment_methods_project'+sep+'inundation') 18 user = getenv('LOGNAME') 19 print 'USER:', user 20 21 # INUNDATIONHOME is the inundation directory, not the data directory. 22 home += sep +'data' 12 23 13 24 #Making assumptions about the location of scenario data … … 15 26 scenario_dir_name = 'broome_tsunami_scenario_2006' 16 27 17 # all data to be delivered by National Mapping 18 # onshore data from 30m DTED level 2 19 onshore_name_dted = 'broome_onshore_30m_dted' 20 onshore_name_dli = 'broome_onshore_20m_dli' 28 # onshore data provided by WA DLI 29 onshore_name = '' # original 21 30 22 # offshore data from GA digitised charts23 offshore_name 1 = 'broome_offshore_points'31 # offshore data provided by WA DPI 32 offshore_name_dpi1 = '' 24 33 25 # offshore data from AHO fairsheets26 offshore_name 2 = 'broome_offshore_points_fairsheet'34 # AHO data 35 offshore_name1 = 'xy100003760' 27 36 28 # coastline developed from aerial photography and 1.5m DLI contour29 coast_name = ' broome_coastline_points'37 # developed by NM&I 38 coast_name = 'coastline_points' 30 39 31 boundary_basename = 'SU-AU _clip'40 boundary_basename = 'SU-AU' # Mw ? 32 41 33 42 #swollen/ all data output … … 35 44 codename = 'project.py' 36 45 37 if sys.platform == 'win32':38 home = getenv('INUNDATIONHOME') #Sandpit's parent dir39 user = getenv('USERPROFILE')40 else:41 # update to perlite 242 home = getenv('INUNDATIONHOME', sep+'d'+sep+'cit'+sep+'2'+sep+'cit'+sep+'inundation'+sep+'data')43 user = getenv('LOGNAME')44 45 # INUNDATIONHOME is the inundation directory, not the data directory.46 home += sep +'data'47 48 46 #Derive subdirectories and filenames 49 time = strftime('%Y%m%d_%H%M%S',localtime()) #gets time for new dir 50 outputtimedir = home+sep+state+sep+scenario_dir_name+sep+'anuga'+sep+'outputs'+sep+time+sep 51 47 local_time = strftime('%Y%m%d_%H%M%S',gmtime()) 52 48 meshdir = home+sep+state+sep+scenario_dir_name+sep+'anuga'+sep+'meshes'+sep 53 49 datadir = home+sep+state+sep+scenario_dir_name+sep+'anuga'+sep+'topographies'+sep 54 50 gaugedir = home+sep+state+sep+scenario_dir_name+sep+'anuga'+sep+'gauges'+sep 55 51 polygondir = home+sep+state+sep+scenario_dir_name+sep+'anuga'+sep+'polygons'+sep 56 boundarydir = home+sep+s tate+sep+scenario_dir_name+sep+'anuga'+sep+'boundaries'+sep52 boundarydir = home+sep+sep+state+sep+scenario_dir_name+sep+'anuga'+sep+'boundaries'+sep 57 53 outputdir = home+sep+state+sep+scenario_dir_name+sep+'anuga'+sep+'outputs'+sep 58 tidedir = home+sep+state+sep+scenario_dir_name+sep+'anuga'+sep+'tide_data'+sep 54 outputtimedir = outputdir + local_time + sep 55 polygondir = home+sep+state+sep+scenario_dir_name+sep+'anuga'+sep+'polygons'+sep 59 56 60 gauge_filename = gaugedir + 'gauge_location_broome.csv' 61 buildings_filename = gaugedir + 'broome_res.csv' 62 buildings_filename_out = 'broome_res_modified.csv' 63 buildings_filename_damage_out = 'broome_res_modified_damage.csv' 64 community_filename = gaugedir + 'CHINS_v2.csv' 65 community_scenario = gaugedir + 'community_pt_hedland.csv' 66 tidal_filename = tidedir + 'pt_hedland_tide.txt' 57 gauge_filename = gaugedir + 'broome_gauges.csv' 67 58 68 meshname = meshdir + basename 69 onshore_dem_name = datadir + onshore_name_dli 70 offshore_dem_name1 = datadir + offshore_name1 71 offshore_dem_name2 = datadir + offshore_name2 59 codedir = getcwd()+sep 60 codedirname = codedir + 'project.py' 61 meshname = outputtimedir + 'mesh_' + basename 62 63 # Necessary if using point datasets, rather than grid 64 onshore_dem_name = datadir + onshore_name 65 offshore_dem_name_local1 = datadir + offshore_name_dpi1 66 offshore_dem_name_aho1 = datadir + offshore_name1 67 72 68 coast_dem_name = datadir + coast_name 73 combined_dem_name = datadir + 'broome_combined_elevation'74 outputname = outputtimedir + basename #Used by post processing75 69 76 # for ferret2sww 77 south = degminsec2decimal_degrees(-20,30,0) 78 north = degminsec2decimal_degrees(-17,10,0) 79 west = degminsec2decimal_degrees(117,00,0) 80 east = degminsec2decimal_degrees(120,00,0) 70 combined_dem_name = datadir + 'broome_combined_elevation' 81 71 82 # region to export (used from export_results.py) 83 e_min_area = 648000 84 e_max_area = 675000 85 n_min_area = 7745000 86 n_max_area = 7761000 72 ############################### 73 # Domain definitions 74 ############################### 87 75 88 export_region = [[e_min_area,n_min_area],[e_min_area,n_max_area],[e_max_area,n_max_area],[e_max_area,n_min_area]] 89 90 refzone = 50 76 # bounding box for clipping MOST/URS output (much bigger than study area) 77 south = degminsec2decimal_degrees(-19,0,0) 78 north = degminsec2decimal_degrees(-17,15,0) 79 west = degminsec2decimal_degrees(120,0,0) 80 east = degminsec2decimal_degrees(122,30,0) 91 81 92 from anuga.coordinate_transforms.redfearn import redfearn 93 # boundary up to 50 m contour 94 lat1_50 = degminsec2decimal_degrees(-19,20,0) 95 lat2_50 = degminsec2decimal_degrees(-19,30,0) 96 lat3_50 = degminsec2decimal_degrees(-19,45,0) 97 lon1_50 = degminsec2decimal_degrees(119,05,0) 98 lon2_50 = degminsec2decimal_degrees(118,20,0) 99 lon3_50 = degminsec2decimal_degrees(117,45,0) 100 z, easting, northing = redfearn(lat1_50, lon1_50) 101 d0_50 = [easting, northing] 102 z, easting, northing = redfearn(lat2_50, lon2_50) 103 d1_50 = [easting, northing] 104 z, easting, northing= redfearn(lat3_50, lon3_50) 105 d2_50 = [easting, northing] 82 d0 = [south, west] 83 d1 = [south, east] 84 d2 = [north, east] 85 d3 = [north, west] 86 poly_bc, zone = convert_points_from_latlon_to_utm([d0, d1, d2, d3]) 87 refzone = zone 106 88 107 d4_50 = [285000, 7585000] 108 d6_50 = [330000, 7605000] 109 #bounding_poly50 = [p0_50, p1_50, p2_50, d6_50, d5, d4_50] 89 # bounding polygon for study area 90 polyAll = read_polygon(polygondir+'extent.csv') 110 91 111 d0 = [763852.0, 7934358.0] 112 d1 = [710987.0, 7925797.0] 113 d2 = [658264.0, 7926314.0] 114 d3 = [552686.0, 7871580.0] 115 #d4 = [604415.81, 7733013.56] 116 d4 = [638000.0, 7733013.56] 117 #d5 = [656561.15, 7732615.11] 118 d5 = [662000.0, 7732615.11] 119 #d6 = [708940.32, 7750510.33] 120 d6 = [690000.0, 7740510.33] 121 #polyAll = [d0, d1, d2, d3, d4, d5, d6] 122 #polyAll = [d0_50, d1_50, d2_50, d4, d5, d6] 123 # from Hamish 124 h0=[629262.17, 7747205.47] 125 h1=[552686.00, 7871579.99] #d3 126 h2=[658264.00, 7926314.00] #d2 127 h3=[710986.99, 7925796.99] #d1 128 h4=[763851.99, 7934357.99] #d0 129 h5=[701485.21, 7770656.86] 130 h6=[698273.75, 7762227.38] 131 h7=[698194.23, 7762018.65] 132 h8=[691627.41, 7744781.98] 133 h9=[679220.75, 7743604.59] 134 h10=[653512.59, 7740528.56] 135 h11=[634777.71, 7738247.17] 136 h12=[629443.86, 7746910.37] 137 h13=[629396.84, 7746986.75] 138 h14=[629352.32, 7747059.06] 139 h15=[629276.24, 7747182.63] 140 h16=[629262.17, 7747205.47] #repeat of h0 141 # using Hamish's new bounding polygon 142 #polyAll = [d0_50, d1_50, d2_50, h16,h15,h14,h13,h12,h11,h10,h9,h8,h7,h6,h5] 143 polyAll = [d0_50, d1_50, d2_50, h16,h11,h8,h6, h5] 92 # plot bounding polygon and make sure BC info surrounds it 93 plot_polygons([polyAll, poly_bc],'boundingpoly',verbose=False) 94 print 'Area of bounding polygon', polygon_area(polyAll)/1000000.0 144 95 145 #Interior region - Broome centre 146 i0 = [668000, 7757000] 147 i1 = [659000, 7755000] 148 i2 = [660000, 7749000] 149 i3 = [667000, 7746000] 150 i4 = [678000, 7751000] 96 ################################################################### 97 # Clipping regions for export to asc and regions for clipping data 98 ################################################################### 151 99 152 poly_broome = [i0, i1, i2, i3, i4] 100 # clipping 20m onshore data set 101 #eastingmin = 520000 102 #eastingmax = 536000 103 #northingmin = 5245000 104 #northingmax = 5260000 153 105 154 #Are there other significant features? 155 j0 = [670000, 7760000] 156 j1 = [633000, 7745000] 157 j2 = [665000, 7743000] 158 j3 = [690000, 7755000] 106 ############################### 107 # Interior region definitions 108 ############################### 159 109 160 poly_region = [j0, j1, j2, j3] 110 # broome digitized polygons 111 poly_broome1 = read_polygon(polygondir+'Broome_Local_Polygon.csv') 112 poly_broome2 = read_polygon(polygondir+'Broome_Close2.csv') 113 poly_broome3 = read_polygon(polygondir+'Broome_Coast.csv') 114 poly_broome4 = read_polygon(polygondir+'Cable_Beach.csv') 115 116 plot_polygons([polyAll,poly_broome1,poly_broome2,poly_broome3,poly_broome4],'boundingpoly2',verbose=False) 117 print 'Area of local polygon', polygon_area(poly_broome1)/1000000.0 118 print 'Area of close polygon', polygon_area(poly_broome2)/1000000.0 119 print 'Area of coastal polygon', polygon_area(poly_broome3)/1000000.0 120 print 'Area of cable beach polygon', polygon_area(poly_broome4)/1000000.0 121 122 for i in poly_broome3: 123 v = is_inside_polygon(i,poly_broome1, verbose=False) 124 if v == False: print v
Note: See TracChangeset
for help on using the changeset viewer.