1 | """# --------------------------------------------------------------------------- |
---|
2 | elevation_model.py |
---|
3 | Created on: Tue Nov 17 2009 02:11:08 |
---|
4 | Created by: Jonathan Griffin |
---|
5 | This script compares calculated inundate areas for run-up solutions from analystical models |
---|
6 | and ANUGA modelling. |
---|
7 | |
---|
8 | |
---|
9 | # --------------------------------------------------------------------------- |
---|
10 | """ |
---|
11 | # Import system modules |
---|
12 | import sys, string, os, arcgisscripting |
---|
13 | from os.path import join |
---|
14 | from collections import defaultdict |
---|
15 | import csv |
---|
16 | |
---|
17 | |
---|
18 | |
---|
19 | # Create the Geoprocessor object |
---|
20 | gp = arcgisscripting.create(9.3) |
---|
21 | |
---|
22 | # Check out any necessary licenses |
---|
23 | gp.CheckOutExtension("spatial") |
---|
24 | |
---|
25 | # Load required toolboxes... |
---|
26 | gp.AddToolbox("C:/Program Files/ArcGIS/ArcToolbox/Toolboxes/Spatial Analyst Tools.tbx") |
---|
27 | gp.AddToolbox("C:/Program Files/ArcGIS/ArcToolbox/Toolboxes/Conversion Tools.tbx") |
---|
28 | gp.AddToolbox("C:/Program Files/ArcGIS/ArcToolbox/Toolboxes/Data Management Tools.tbx") |
---|
29 | gp.AddToolbox("C:/Program Files/ArcGIS/ArcToolbox/Toolboxes/Analysis Tools.tbx") |
---|
30 | |
---|
31 | gp.overwriteoutput = 1 |
---|
32 | |
---|
33 | filePath = "N:\\georisk_models\\inundation\\data\\new_south_wales\\batemans_bay_tsunami_scenario_2009\\anuga\\outputs\\" |
---|
34 | boundary_path = "N:\\georisk_models\\inundation\\data\\new_south_wales\\batemans_bay_tsunami_scenario_2009\\anuga\\boundaries\\" |
---|
35 | boundary_file_name = 'wave_energy_summary.csv' |
---|
36 | out_file_name_madsen = 'area_comparisons_madsen.csv' |
---|
37 | out_file_name_energy = 'area_comparisons_energy.csv' |
---|
38 | out_file_name_anuga = 'area_comparisons_anuga.csv' |
---|
39 | out_file_name_anuga_sum = 'area_comparisons_anuga_sum.csv' |
---|
40 | #figure = 'area_comparisons.png' |
---|
41 | |
---|
42 | boundary_file = join(boundary_path,boundary_file_name) |
---|
43 | out_file_madsen = join(boundary_path, out_file_name_madsen) |
---|
44 | out_file_energy = join(boundary_path, out_file_name_energy) |
---|
45 | out_file_anuga = join(boundary_path, out_file_name_anuga) |
---|
46 | out_file_anuga_sum = join(boundary_path, out_file_name_anuga_sum) |
---|
47 | |
---|
48 | |
---|
49 | event_dict = {'Event1_MSL':58346, |
---|
50 | ## 'Event1_HAT':58346, |
---|
51 | 'Event2_MSL':51204, |
---|
52 | 'Event2_HAT':51204, |
---|
53 | 'Event3_MSL':58284,#, |
---|
54 | ## 'Event3_HAT':58284, |
---|
55 | 'Puysegur_200yr':58129, |
---|
56 | ## 'Puysegur_500yr':58115, |
---|
57 | 'Puysegur_1000yr':58226, |
---|
58 | 'Puysegur_5000yr':58286, |
---|
59 | 'Puysegur_15000yr':58272, |
---|
60 | 'New_Hebrides_200yr':51077, |
---|
61 | 'New_Hebrides_500yr':51378, |
---|
62 | 'New_Hebrides_1000yr':51347, |
---|
63 | 'New_Hebrides_2000yr':51292, |
---|
64 | 'New_Hebrides_5000yr':51424,} |
---|
65 | |
---|
66 | # Dictionaries for storing area comparison |
---|
67 | area_comp_madsen = defaultdict(list) |
---|
68 | area_comp_energy = defaultdict(list) |
---|
69 | area_percent_madsen = defaultdict(list) |
---|
70 | area_percent_energy = defaultdict(list) |
---|
71 | area_total_anuga = defaultdict(list) |
---|
72 | anuga_sum_areas = {} |
---|
73 | |
---|
74 | boundaries = file(boundary_file,'r') |
---|
75 | for line in boundaries.readlines(): |
---|
76 | line_split = line.split(',') |
---|
77 | if line_split[0]=='filename': |
---|
78 | continue |
---|
79 | gauge_file = line_split[0] |
---|
80 | event_id = gauge_file.split('/')[10] |
---|
81 | crest_integrated_energy = line_split[1] |
---|
82 | instantaneous_energy = line_split[2] |
---|
83 | run_up_madsen = line_split[3][0:5] |
---|
84 | run_up_energy = line_split[4][0:5] |
---|
85 | event_name = False |
---|
86 | for key, value in event_dict.iteritems(): |
---|
87 | if value == int(event_id): |
---|
88 | event_name = key |
---|
89 | if event_name == False: |
---|
90 | continue |
---|
91 | raster_path = join(filePath,event_name,'raster.gdb') |
---|
92 | |
---|
93 | |
---|
94 | print event_id |
---|
95 | print gauge_file |
---|
96 | #break |
---|
97 | |
---|
98 | do_all = True |
---|
99 | |
---|
100 | |
---|
101 | ##run_up_madsen = 6.0 |
---|
102 | run_up_height = str(run_up_madsen) |
---|
103 | run_up_name = run_up_height.replace('.','_')[0:5] |
---|
104 | energy_run_up_height = str(run_up_energy) |
---|
105 | energy_run_up_name = run_up_energy.replace('.','_')[0:5] |
---|
106 | |
---|
107 | |
---|
108 | #################################################################################### |
---|
109 | ## Extract areas for Madsen analytical solution |
---|
110 | #################################################################################### |
---|
111 | print '\nDoing Madsen solution...' |
---|
112 | # Local variables... |
---|
113 | elevation = filePath+"elevation\\elevation.gdb\\elevation" |
---|
114 | elevation_extract = filePath+"elevation\\elevation.gdb\\elevation_extract" |
---|
115 | inundation_area1 = "N:\\georisk_models\\inundation\data\\new_south_wales\\batemans_bay_tsunami_scenario_2009\\anuga\\polygons\\polygons.gdb\inundation_area1" |
---|
116 | elevation_reclass = filePath+"elevation\\elevation.gdb\\elevation_"+run_up_name+"_reclass" |
---|
117 | elevation_poly = filePath+"elevation\\features.gdb\\elevation_"+run_up_name+"_poly" |
---|
118 | elevation_less = filePath+"elevation\\features.gdb\\elevation_"+run_up_name+"_less" |
---|
119 | batemans_bay_SMARTLINE = "N:\\georisk_models\\inundation\\data\\new_south_wales\\coastline\\batemans_bay_SMARTLINE.shp" |
---|
120 | elevation_less_final = filePath+"elevation\\features.gdb\\elevation_"+run_up_name+"_less_final" |
---|
121 | elevation_less_final_dis = filePath+"elevation\\features.gdb\\elevation_"+run_up_name+"_less_final_dis" |
---|
122 | |
---|
123 | # Check if already done... |
---|
124 | ## if do_all!=True: |
---|
125 | ## print 'continuing' |
---|
126 | if gp.Exists(elevation_less_final_dis): |
---|
127 | print 'analysis already done, skipping to next one' |
---|
128 | else: |
---|
129 | # Process: Extract elevation data by Mask... |
---|
130 | print 'check if elevation already extracted' |
---|
131 | if gp.Exists(elevation_extract)!=True: |
---|
132 | print 'not already extracted' |
---|
133 | print 'extracting by mask' |
---|
134 | gp.ExtractByMask_sa(elevation, inundation_area1, elevation_extract) |
---|
135 | else: |
---|
136 | print 'extracted elevation already exists' |
---|
137 | |
---|
138 | # Process: Reclassify elevation data |
---|
139 | print 'check if elevation already reclassified' |
---|
140 | if gp.Exists(elevation_reclass)!=True: |
---|
141 | print 'reclassify based on elevation height of:', run_up_height |
---|
142 | reclass_level = "-200 "+run_up_height+" 1;"+run_up_height+" 300 0" |
---|
143 | gp.Reclassify_sa(elevation_extract, "Value", reclass_level, elevation_reclass, "DATA") |
---|
144 | else: |
---|
145 | print 'elevation has previously been reclassified for this height:', run_up_height |
---|
146 | |
---|
147 | # Process: Raster to Polygon... |
---|
148 | print 'check if already converted from raster to polyon' |
---|
149 | if gp.Exists(elevation_poly)!=True: |
---|
150 | print 'raster to polyon' |
---|
151 | gp.RasterToPolygon_conversion(elevation_reclass, elevation_poly, "NO_SIMPLIFY", "VALUE") |
---|
152 | else: |
---|
153 | print 'elevation raster already converted to polygon' |
---|
154 | |
---|
155 | # Process: Select... |
---|
156 | print 'select by attribute' |
---|
157 | gp.Select_analysis(elevation_poly, elevation_less, "\"GRIDCODE\" = 1") |
---|
158 | |
---|
159 | # Process: Create layer... |
---|
160 | print 'creating layers' |
---|
161 | gp.MakeFeatureLayer(batemans_bay_SMARTLINE,'smartline_layer') |
---|
162 | gp.MakeFeatureLayer(elevation_less,'elevation_layer') |
---|
163 | |
---|
164 | # Process: Select Layer By Location... |
---|
165 | print 'select by location' |
---|
166 | gp.SelectLayerByLocation_management('elevation_layer', "INTERSECT", 'smartline_layer', "", "NEW_SELECTION") |
---|
167 | print 'joining' |
---|
168 | print 'inundation_area1',inundation_area1 |
---|
169 | print 'energy_less_final',elevation_less_final |
---|
170 | gp.SpatialJoin_analysis('elevation_layer',inundation_area1, elevation_less_final,"JOIN_ONE_TO_ONE","","","INTERSECTS") |
---|
171 | |
---|
172 | print 'dissolving fields' |
---|
173 | gp.Dissolve_management(elevation_less_final,elevation_less_final_dis,'inundation_ID') |
---|
174 | |
---|
175 | |
---|
176 | |
---|
177 | #################################################################################### |
---|
178 | ## Extract areas for Energy Balance analytical solution |
---|
179 | #################################################################################### |
---|
180 | print '\nDoing energy solution...' |
---|
181 | # Local variables... |
---|
182 | elevation = filePath+"elevation\\elevation.gdb\\elevation" |
---|
183 | elevation_extract = filePath+"elevation\\elevation.gdb\\elevation_extract" |
---|
184 | inundation_area1 = "N:\\georisk_models\\inundation\data\\new_south_wales\\batemans_bay_tsunami_scenario_2009\\anuga\\polygons\\polygons.gdb\inundation_area1" |
---|
185 | energy_reclass = filePath+"elevation\\elevation.gdb\\energy_"+energy_run_up_name+"_reclass" |
---|
186 | energy_poly = filePath+"elevation\\features.gdb\\energy_"+energy_run_up_name+"_poly" |
---|
187 | energy_less = filePath+"elevation\\features.gdb\\energy_"+energy_run_up_name+"_less" |
---|
188 | batemans_bay_SMARTLINE = "N:\\georisk_models\\inundation\\data\\new_south_wales\\coastline\\batemans_bay_SMARTLINE.shp" |
---|
189 | energy_less_final = filePath+"elevation\\features.gdb\\energy_"+energy_run_up_name+"_less_final" |
---|
190 | energy_less_final_dis = filePath+"elevation\\features.gdb\\energy_"+energy_run_up_name+"_less_final_dis" |
---|
191 | |
---|
192 | # Check if already done... |
---|
193 | ## if do_all!=True: |
---|
194 | ## print 'continuing' |
---|
195 | if gp.Exists(energy_less_final_dis): |
---|
196 | print 'analysis already done, skipping to next one' |
---|
197 | else: |
---|
198 | # Process: Extract elevation data by Mask... |
---|
199 | print 'check if elevation already extracted' |
---|
200 | if gp.Exists(elevation_extract)!=True: |
---|
201 | print 'not already extracted' |
---|
202 | print 'extracting by mask' |
---|
203 | gp.ExtractByMask_sa(elevation, inundation_area1, elevation_extract) |
---|
204 | else: |
---|
205 | print 'extracted elevation already exists' |
---|
206 | |
---|
207 | # Process: Reclassify elevation data |
---|
208 | print 'check if elevation already reclassified' |
---|
209 | if gp.Exists(energy_reclass)!=True: |
---|
210 | print 'reclassify based on elevation height of:', energy_run_up_height |
---|
211 | reclass_level = "-200 "+energy_run_up_height+" 1;"+energy_run_up_height+" 300 0" |
---|
212 | gp.Reclassify_sa(elevation_extract, "Value", reclass_level, energy_reclass, "DATA") |
---|
213 | else: |
---|
214 | print 'elevation has previously been reclassified for this height:', energy_run_up_height |
---|
215 | |
---|
216 | # Process: Raster to Polygon... |
---|
217 | print 'check if already converted from raster to polyon' |
---|
218 | if gp.Exists(energy_poly)!=True: |
---|
219 | print 'raster to polyon' |
---|
220 | gp.RasterToPolygon_conversion(energy_reclass, energy_poly, "NO_SIMPLIFY", "VALUE") |
---|
221 | else: |
---|
222 | print 'elevation raster already converted to polygon' |
---|
223 | |
---|
224 | # Process: Select... |
---|
225 | print 'select by attribute' |
---|
226 | gp.Select_analysis(energy_poly, energy_less, "\"GRIDCODE\" = 1") |
---|
227 | |
---|
228 | # Process: Create layer... |
---|
229 | print 'creating layers' |
---|
230 | gp.MakeFeatureLayer(batemans_bay_SMARTLINE,'smartline_layer') |
---|
231 | gp.MakeFeatureLayer(energy_less,'elevation_layer') |
---|
232 | |
---|
233 | # Process: Select Layer By Location... |
---|
234 | print 'select by location' |
---|
235 | gp.SelectLayerByLocation_management('elevation_layer', "INTERSECT", 'smartline_layer', "", "NEW_SELECTION") |
---|
236 | print 'joining' |
---|
237 | print 'inundation_area1',inundation_area1 |
---|
238 | print 'energy_less_final',energy_less_final |
---|
239 | gp.SpatialJoin_analysis('elevation_layer',inundation_area1, energy_less_final,"JOIN_ONE_TO_ONE","","","INTERSECTS") |
---|
240 | |
---|
241 | print 'dissolving fields' |
---|
242 | gp.Dissolve_management(energy_less_final,energy_less_final_dis,'inundation_ID') |
---|
243 | |
---|
244 | #################################################################################### |
---|
245 | ## Extract areas for ANUGA solution |
---|
246 | #################################################################################### |
---|
247 | print '\nDoing ANUGA solution...' |
---|
248 | # Local variables... |
---|
249 | gp.workspace = raster_path |
---|
250 | print raster_path |
---|
251 | inundation_name = gp.ListRasters("*depth*","") |
---|
252 | for raster in inundation_name: |
---|
253 | print raster |
---|
254 | inundation = raster_path + "\\"+ inundation_name[0] |
---|
255 | inundation_area1 = "N:\\georisk_models\\inundation\data\\new_south_wales\\batemans_bay_tsunami_scenario_2009\\anuga\\polygons\\polygons.gdb\inundation_area1" |
---|
256 | inundation_extract = "inundation_"+run_up_name+"_extract" |
---|
257 | inundation_reclass = "inundation_"+run_up_name+"_reclass" |
---|
258 | inundation_poly = "inundation_"+run_up_name+"_poly" |
---|
259 | inundation_less = "inundation_"+run_up_name+"_less" |
---|
260 | inundation_less_final = "inundation_less_final" |
---|
261 | inundation_less_final_dis = "inundation_less_final_dis" |
---|
262 | |
---|
263 | # Check if already done... |
---|
264 | if gp.Exists(inundation_less_final_dis): |
---|
265 | print 'analysis already done, skipping to next one' |
---|
266 | else: |
---|
267 | # Process: Extract inundation data by Mask... |
---|
268 | print 'check if inundation already extracted' |
---|
269 | if gp.Exists(inundation_extract)!=True: |
---|
270 | print 'not already extracted' |
---|
271 | print 'extracting by mask' |
---|
272 | gp.ExtractByMask_sa(inundation, inundation_area1, inundation_extract) |
---|
273 | else: |
---|
274 | print 'extracted inundation already exists' |
---|
275 | |
---|
276 | # Process: Reclassify inundation data |
---|
277 | print 'check if inundation already reclassified' |
---|
278 | if gp.Exists(inundation_reclass)!=True: |
---|
279 | print 'reclassify based on inundation' |
---|
280 | gp.Reclassify_sa(inundation_extract, "Value", "-200 0 1;0 300 0", inundation_reclass, "DATA") |
---|
281 | else: |
---|
282 | print 'inundation has previously been reclassified for this height for this ANUGA run' |
---|
283 | |
---|
284 | # Process: Raster to Polygon... |
---|
285 | print 'check if already converted from raster to polyon' |
---|
286 | if gp.Exists(inundation_poly)!=True: |
---|
287 | print 'raster to polyon' |
---|
288 | gp.RasterToPolygon_conversion(inundation_reclass, inundation_poly, "NO_SIMPLIFY", "VALUE") |
---|
289 | else: |
---|
290 | print 'inundation raster already converted to polygon' |
---|
291 | |
---|
292 | # Process: Select... |
---|
293 | print 'select by attribute' |
---|
294 | gp.Select_analysis(inundation_poly, inundation_less, "\"GRIDCODE\" = 0") |
---|
295 | |
---|
296 | # Process: Create layer... |
---|
297 | print 'creating layers' |
---|
298 | gp.MakeFeatureLayer(batemans_bay_SMARTLINE,'smartline_layer') |
---|
299 | gp.MakeFeatureLayer(inundation_less,'inundation_layer') |
---|
300 | |
---|
301 | # Process: Select Layer By Location... |
---|
302 | print 'select by location' |
---|
303 | gp.SelectLayerByLocation_management('inundation_layer', "INTERSECT", 'smartline_layer', "", "NEW_SELECTION") |
---|
304 | print 'joining' |
---|
305 | gp.SpatialJoin_analysis('inundation_layer',inundation_area1, inundation_less_final,"JOIN_ONE_TO_ONE","","","INTERSECTS") |
---|
306 | |
---|
307 | print 'dissolving fields' |
---|
308 | gp.Dissolve_management(inundation_less_final,inundation_less_final_dis,'inundation_ID') |
---|
309 | |
---|
310 | ########################################################## |
---|
311 | # Compare areas |
---|
312 | ########################################################## |
---|
313 | |
---|
314 | # Search feature class for areas |
---|
315 | print 'get Madsen areas' |
---|
316 | analytical_area_dict = {} |
---|
317 | #analytical_areas = [] |
---|
318 | cur = gp.SearchCursor(elevation_less_final_dis) |
---|
319 | sRow = cur.Next() |
---|
320 | while sRow: |
---|
321 | if not(sRow.GetValue("inundation_ID")in analytical_area_dict): |
---|
322 | analytical_area_dict[sRow.GetValue("inundation_ID")] = sRow.GetValue("Shape_Area") |
---|
323 | else: |
---|
324 | analytical_area_dict[sRow.GetValue("inundation_ID")] = (analytical_area_dict[sRow.GetValue("inundation_ID")]+sRow.GetValue("Shape_Area")) |
---|
325 | #analytical_areas.append(sRow.GetValue("Shape_Area")) |
---|
326 | sRow = cur.Next() |
---|
327 | #print analytical_areas |
---|
328 | print analytical_area_dict |
---|
329 | |
---|
330 | # Search feature class for areas |
---|
331 | print 'get Energy areas' |
---|
332 | energy_area_dict = {} |
---|
333 | #analytical_areas = [] |
---|
334 | cur = gp.SearchCursor(energy_less_final_dis) |
---|
335 | sRow = cur.Next() |
---|
336 | while sRow: |
---|
337 | if not(sRow.GetValue("inundation_ID")in energy_area_dict): |
---|
338 | energy_area_dict[sRow.GetValue("inundation_ID")] = sRow.GetValue("Shape_Area") |
---|
339 | else: |
---|
340 | energy_area_dict[sRow.GetValue("inundation_ID")] = (energy_area_dict[sRow.GetValue("inundation_ID")]+sRow.GetValue("Shape_Area")) |
---|
341 | #analytical_areas.append(sRow.GetValue("Shape_Area")) |
---|
342 | sRow = cur.Next() |
---|
343 | #print analytical_areas |
---|
344 | print energy_area_dict |
---|
345 | |
---|
346 | # Search feature class for areas |
---|
347 | print 'get ANUGA areas' |
---|
348 | anuga_area_dict = {} |
---|
349 | anuga_sum_areas[event_id] = 0 |
---|
350 | #anuga_areas = [] |
---|
351 | cur = gp.SearchCursor(inundation_less_final_dis) |
---|
352 | sRow = cur.Next() |
---|
353 | while sRow: |
---|
354 | anuga_sum_areas[event_id] = anuga_sum_areas[event_id]+sRow.GetValue("Shape_Area") |
---|
355 | if not(sRow.GetValue("inundation_ID") in anuga_area_dict): |
---|
356 | anuga_area_dict[sRow.GetValue("inundation_ID")]=sRow.GetValue("Shape_Area") |
---|
357 | else: |
---|
358 | anuga_area_dict[sRow.GetValue("inundation_ID")]= anuga_area_dict[sRow.GetValue("inundation_ID")]+sRow.GetValue("Shape_Area") |
---|
359 | #anuga_areas.append(sRow.GetValue("Shape_Area")) |
---|
360 | sRow = cur.Next() |
---|
361 | #print anuga_areas |
---|
362 | print anuga_area_dict |
---|
363 | |
---|
364 | for key in anuga_area_dict: |
---|
365 | area_total_anuga[key].append(anuga_area_dict[key]) |
---|
366 | |
---|
367 | if not key in analytical_area_dict: |
---|
368 | #analytical_area_dict[key] = None |
---|
369 | area_comp_madsen[key].append(None) |
---|
370 | area_percent_madsen[key].append(None) |
---|
371 | else: |
---|
372 | diff_madsen = anuga_area_dict[key]-analytical_area_dict[key] |
---|
373 | percent_diff_madsen = 100*diff_madsen/anuga_area_dict[key] |
---|
374 | area_comp_madsen[key].append(diff_madsen) |
---|
375 | area_percent_madsen[key].append(percent_diff_madsen) |
---|
376 | |
---|
377 | if not key in energy_area_dict: |
---|
378 | area_comp_energy[key].append(None) |
---|
379 | area_percent_energy[key].append(None) |
---|
380 | else: |
---|
381 | diff_energy = anuga_area_dict[key]-energy_area_dict[key] |
---|
382 | percent_diff_energy = 100*diff_energy/anuga_area_dict[key] |
---|
383 | area_comp_energy[key].append(diff_energy) |
---|
384 | area_percent_energy[key].append(percent_diff_energy) |
---|
385 | |
---|
386 | print 'area_comp', area_comp_madsen |
---|
387 | print 'area_percent', area_percent_madsen |
---|
388 | |
---|
389 | |
---|
390 | |
---|
391 | csv_anuga = csv.writer(open(out_file_anuga,'w')) |
---|
392 | csv_anuga_sum = csv.writer(open(out_file_anuga_sum,'w')) |
---|
393 | csv_madsen = csv.writer(open(out_file_madsen,'w')) |
---|
394 | csv_energy = csv.writer(open(out_file_energy,'w')) |
---|
395 | |
---|
396 | csv_anuga.writerow([1,2,3,4,5,6,7,8,9]) |
---|
397 | csv_madsen.writerow([1,2,3,4,5,6,7,8,9]) |
---|
398 | csv_energy.writerow([1,2,3,4,5,6,7,8,9]) |
---|
399 | |
---|
400 | for key, value in anuga_sum_areas.iteritems(): |
---|
401 | write_list = [key,value] |
---|
402 | csv_anuga_sum.writerow(write_list) |
---|
403 | |
---|
404 | for i in range(len(area_total_anuga[1])): |
---|
405 | value_list_anuga =[] |
---|
406 | for key in area_total_anuga: |
---|
407 | try: |
---|
408 | area_total_anuga[key][i] |
---|
409 | except NameError: |
---|
410 | value_list_anuga.append(None) |
---|
411 | else: |
---|
412 | value_list_anuga.append(area_total_anuga[key][i]) |
---|
413 | csv_anuga.writerow(value_list_anuga) |
---|
414 | |
---|
415 | for i in range(len(area_percent_madsen[1])): |
---|
416 | value_list_madsen =[] |
---|
417 | for key in area_percent_madsen: |
---|
418 | try: |
---|
419 | area_percent_madsen[key][i] |
---|
420 | except NameError: |
---|
421 | value_list_madsen.append(None) |
---|
422 | else: |
---|
423 | value_list_madsen.append(area_percent_madsen[key][i]) |
---|
424 | csv_madsen.writerow(value_list_madsen) |
---|
425 | |
---|
426 | for i in range(len(area_percent_energy[1])): |
---|
427 | value_list_energy =[] |
---|
428 | for key in area_percent_energy: |
---|
429 | try: |
---|
430 | area_percent_energy[key][i] |
---|
431 | except NameError: |
---|
432 | value_list_energy.append(None) |
---|
433 | else: |
---|
434 | value_list_energy.append(area_percent_energy[key][i]) |
---|
435 | csv_energy.writerow(value_list_energy) |
---|
436 | |
---|
437 | |
---|
438 | |
---|
439 | |
---|
440 | boundaries.close() |
---|
441 | |
---|
442 | |
---|