source: anuga_work/production/new_south_wales/batemans_bay/compare_inundation_areas.py @ 7572

Last change on this file since 7572 was 7572, checked in by jgriffin, 14 years ago

Now calculates differences in area and writes to file

File size: 11.1 KB
Line 
1"""# ---------------------------------------------------------------------------
2 elevation_model.py
3 Created on: Tue Nov 17 2009 02:11:08
4 Created by: Jonathan Griffin
5 This script compares calculated inundate areas for run-up solutions from analystical models
6 and ANUGA modelling.
7
8
9# ---------------------------------------------------------------------------
10"""
11# Import system modules
12import sys, string, os, arcgisscripting
13from os.path import join
14from collections import defaultdict
15import csv
16
17
18
19# Create the Geoprocessor object
20gp = arcgisscripting.create(9.3)
21
22# Check out any necessary licenses
23gp.CheckOutExtension("spatial")
24
25# Load required toolboxes...
26gp.AddToolbox("C:/Program Files/ArcGIS/ArcToolbox/Toolboxes/Spatial Analyst Tools.tbx")
27gp.AddToolbox("C:/Program Files/ArcGIS/ArcToolbox/Toolboxes/Conversion Tools.tbx")
28gp.AddToolbox("C:/Program Files/ArcGIS/ArcToolbox/Toolboxes/Data Management Tools.tbx")
29gp.AddToolbox("C:/Program Files/ArcGIS/ArcToolbox/Toolboxes/Analysis Tools.tbx")
30
31gp.overwriteoutput = 1
32
33filePath = "N:\\georisk_models\\inundation\\data\\new_south_wales\\batemans_bay_tsunami_scenario_2009\\anuga\\outputs\\"
34boundary_path = "N:\\georisk_models\\inundation\\data\\new_south_wales\\batemans_bay_tsunami_scenario_2009\\anuga\\boundaries\\"
35boundary_file_name = 'wave_energy_summary.csv'
36out_file_name = 'area_comparisons.csv'
37#figure = 'area_comparisons.png'
38
39boundary_file = join(boundary_path,boundary_file_name)
40out_file = join(boundary_path, out_file_name)
41#figure_path = join(boundary_path, 'figures',figure)
42plot = True
43
44event_dict = {'Event1_MSL':58346,
45##              'Event1_HAT':58346,
46              'Event2_MSL':51204,
47##              'Event2_HAT':51204,
48              'Event3_MSL':58284,#,
49##              'Event3_HAT':58284,
50              'Puysegur_200yr':58129,
51              'Puysegur_500yr':58115,
52              'Puysegur_1000yr':58226,
53              'Puysegur_5000yr':58286,
54              'Puysegur_15000yr':58272,
55              'New_Hebrides_200yr':51077,
56              'New_Hebrides_500yr':51378,
57              'New_Hebrides_1000yr':51347,
58              'New_Hebrides_2000yr':51292,
59              'New_Hebrides_5000yr':51424,}
60
61# Dictionaries for storing area comparison
62area_comp = defaultdict(list)
63area_percent = defaultdict(list)
64
65boundaries = file(boundary_file,'r')
66for line in boundaries.readlines():
67    line_split = line.split(',')
68    if line_split[0]=='filename':
69        continue
70    gauge_file = line_split[0]
71    event_id = gauge_file.split('/')[10]
72    crest_integrated_energy = line_split[1]
73    instantaneous_energy = line_split[2]
74    run_up_madsen = line_split[3][0:5]
75    event_name = False
76    for key, value in event_dict.iteritems():
77        if value == int(event_id):
78            event_name = key
79    if event_name == False:
80        continue
81    raster_path = join(filePath,event_name,'raster.gdb')
82
83   
84    print event_id
85    print gauge_file
86    #break
87
88
89
90   
91    ##run_up_madsen = 6.0
92    run_up_height = str(run_up_madsen)
93    run_up_name = run_up_height.replace('.','_')[0]
94
95
96    ####################################################################################
97    ##    Extract areas for analytical solution
98    ####################################################################################
99    # Local variables...
100    elevation = filePath+"elevation\\elevation.gdb\\elevation"
101    elevation_extract = filePath+"elevation\\elevation.gdb\\elevation_extract"
102    inundation_area1 = "N:\\georisk_models\\inundation\data\\new_south_wales\\batemans_bay_tsunami_scenario_2009\\anuga\\polygons\\polygons.gdb\inundation_area1"
103    elevation_reclass = filePath+"elevation\\elevation.gdb\\elevation_"+run_up_name+"_reclass"
104    elevation_poly = filePath+"elevation\\features.gdb\\elevation_"+run_up_name+"_poly"
105    elevation_less = filePath+"elevation\\features.gdb\\elevation_"+run_up_name+"_less"
106    batemans_bay_SMARTLINE = "N:\\georisk_models\\inundation\\data\\new_south_wales\\coastline\\batemans_bay_SMARTLINE.shp"
107    elevation_less_final = filePath+"elevation\\features.gdb\\elevation_less_final"
108
109    # Process: Extract elevation data by Mask...
110    print 'check if elevation already extracted'
111    if gp.Exists(elevation_extract)!=True:
112        print 'not already extracted'
113        print 'extracting by mask'
114        gp.ExtractByMask_sa(elevation, inundation_area1, elevation_extract)
115    else:
116        print 'extracted elevation already exists'
117
118    # Process: Reclassify elevation data
119    print 'check if elevation already reclassified'
120    if gp.Exists(elevation_reclass)!=True:
121        print 'reclassify based on elevation height of:', run_up_height
122        reclass_level = "-200 "+run_up_height+" 1;"+run_up_height+" 300 0"
123        gp.Reclassify_sa(elevation_extract, "Value", reclass_level, elevation_reclass, "DATA")
124    else:
125        print 'elevation has previously been reclassified for this height:', run_up_height
126
127    # Process: Raster to Polygon...
128    print 'check if already converted from raster to polyon'
129    if gp.Exists(elevation_poly)!=True:
130        print 'raster to polyon'
131        gp.RasterToPolygon_conversion(elevation_reclass, elevation_poly, "NO_SIMPLIFY", "VALUE")
132    else:
133        print 'elevation raster already converted to polygon'
134
135    # Process: Select...
136    print 'select by attribute'
137    gp.Select_analysis(elevation_poly, elevation_less, "\"GRIDCODE\" = 1")
138
139    # Process: Create layer...
140    print 'creating layers'
141    gp.MakeFeatureLayer(batemans_bay_SMARTLINE,'smartline_layer')
142    gp.MakeFeatureLayer(elevation_less,'elevation_layer')
143
144    # Process: Select Layer By Location...
145    print 'select by location'
146    gp.SelectLayerByLocation_management('elevation_layer', "INTERSECT", 'smartline_layer', "", "NEW_SELECTION")
147    print 'joining'
148    gp.SpatialJoin_analysis('elevation_layer',inundation_area1, elevation_less_final,"JOIN_ONE_TO_ONE","","","INTERSECTS")
149
150    # Process: Copy Features...
151    ##print 'copy features to output feature class'
152    ##gp.CopyFeatures_management('elevation_layer', elevation_less_final, "", "0", "0", "0")
153
154
155    ####################################################################################
156    ##    Extract areas for ANUGA solution
157    ####################################################################################
158    # Local variables...
159    gp.workspace = raster_path
160    print raster_path
161    inundation_name = gp.ListRasters("*depth*","")
162    for raster in inundation_name:
163        print raster
164    inundation = raster_path + "\\"+ inundation_name[0]
165    inundation_area1 = "N:\\georisk_models\\inundation\data\\new_south_wales\\batemans_bay_tsunami_scenario_2009\\anuga\\polygons\\polygons.gdb\inundation_area1"
166    inundation_extract = "inundation_"+run_up_name+"_extract"
167    inundation_reclass = "inundation_"+run_up_name+"_reclass"
168    inundation_poly = "inundation_"+run_up_name+"_poly"
169    inundation_less = "inundation_"+run_up_name+"_less"
170    inundation_less_final = "inundation_less_final"
171
172    # Process: Extract inundation data by Mask...
173    print 'check if inundation already extracted'
174    if gp.Exists(inundation_extract)!=True:
175        print 'not already extracted'
176        print 'extracting by mask'
177        gp.ExtractByMask_sa(inundation, inundation_area1, inundation_extract)
178    else:
179        print 'extracted inundation already exists'
180
181    # Process: Reclassify inundation data
182    print 'check if inundation already reclassified'
183    if gp.Exists(inundation_reclass)!=True:
184        print 'reclassify based on inundation'
185        gp.Reclassify_sa(inundation_extract, "Value", "-200 0 1;0 300 0", inundation_reclass, "DATA")
186    else:
187        print 'inundation has previously been reclassified for this height:', run_up_height
188
189    # Process: Raster to Polygon...
190    print 'check if already converted from raster to polyon'
191    if gp.Exists(inundation_poly)!=True:
192        print 'raster to polyon'
193        gp.RasterToPolygon_conversion(inundation_reclass, inundation_poly, "NO_SIMPLIFY", "VALUE")
194    else:
195        print 'inundation raster already converted to polygon'
196
197    # Process: Select...
198    print 'select by attribute'
199    gp.Select_analysis(inundation_poly, inundation_less, "\"GRIDCODE\" = 0")
200
201    # Process: Create layer...
202    print 'creating layers'
203    gp.MakeFeatureLayer(batemans_bay_SMARTLINE,'smartline_layer')
204    gp.MakeFeatureLayer(inundation_less,'inundation_layer')
205
206    # Process: Select Layer By Location...
207    print 'select by location'
208    gp.SelectLayerByLocation_management('inundation_layer', "INTERSECT", 'smartline_layer', "", "NEW_SELECTION")
209    print 'joining'
210    gp.SpatialJoin_analysis('inundation_layer',inundation_area1, inundation_less_final,"JOIN_ONE_TO_ONE","","","INTERSECTS")
211
212
213
214
215
216    # Search feature class for areas
217    print 'get areas'
218    analytical_area_dict = {}
219    #analytical_areas = []
220    cur = gp.SearchCursor(elevation_less_final)
221    sRow = cur.Next()
222    while sRow:
223        if not(sRow.GetValue("inundation_ID")in analytical_area_dict):
224            analytical_area_dict[sRow.GetValue("inundation_ID")] = sRow.GetValue("Shape_Area")
225        else:
226            analytical_area_dict[sRow.GetValue("inundation_ID")] = (analytical_area_dict[sRow.GetValue("inundation_ID")]+sRow.GetValue("Shape_Area"))
227        #analytical_areas.append(sRow.GetValue("Shape_Area"))
228        sRow = cur.Next()
229    #print analytical_areas
230    print analytical_area_dict
231
232    # Search feature class for areas
233    print 'get areas'
234    anuga_area_dict = {}
235    #anuga_areas = []
236    cur = gp.SearchCursor(inundation_less_final)
237    sRow = cur.Next()
238    while sRow:
239        if not(sRow.GetValue("inundation_ID") in anuga_area_dict):
240            anuga_area_dict[sRow.GetValue("inundation_ID")]=sRow.GetValue("Shape_Area")
241        else:
242            anuga_area_dict[sRow.GetValue("inundation_ID")]= anuga_area_dict[sRow.GetValue("inundation_ID")]+sRow.GetValue("Shape_Area") 
243        #anuga_areas.append(sRow.GetValue("Shape_Area"))
244        sRow = cur.Next()
245    #print anuga_areas
246    print anuga_area_dict
247
248    for key in anuga_area_dict:
249        if not key in analytical_area_dict:
250            analytical_area_dict[key] = 0.
251        diff = anuga_area_dict[key]-analytical_area_dict[key]
252        percent_diff = 100*diff/anuga_area_dict[key]
253        area_comp[key].append(diff)
254        area_percent[key].append(percent_diff)
255print 'area_comp', area_comp
256print 'area_percent', area_percent
257
258##csv_dict = csv.DictWriter(open(out_file,'w'),[1,2,3,4,5,6,7,8,9])
259##csv_dict.writerow(dict(zip([1,2,3,4,5,6,7,8,9],[1,2,3,4,5,6,7,8,9])))
260##csv_dict.writerow(area_percent)
261
262csv_out = csv.writer(open(out_file,'w'))
263csv_out.writerow([1,2,3,4,5,6,7,8,9])
264for i in range(len(area_percent[1])):
265    value_list =[]
266    for key in area_percent:
267        value_list.append(area_percent[key][i])
268    csv_out.writerow(value_list)
269     
270out_file.close()   
271##for key, value in area_percent:
272##    key_list = []
273##    for val in value:
274##        key_list.append(key)
275##    pylab.scatter(key_list,value)
276       
277
278
279
280
281boundaries.close()
282
283
Note: See TracBrowser for help on using the repository browser.