Changeset 7577


Ignore:
Timestamp:
Dec 4, 2009, 10:43:32 AM (15 years ago)
Author:
jgriffin
Message:
 
Location:
anuga_work/production/new_south_wales/batemans_bay
Files:
3 added
5 edited

Legend:

Unmodified
Added
Removed
  • anuga_work/production/new_south_wales/batemans_bay/Arc_asc2raster_GDA94z56.py

    r7327 r7577  
    2525output_dir = "anuga\\outputs\\"
    2626
    27 time_dir1 = '20090529_143527_run_final_0.0_51424_jgriffin'
     27time_dir1 = '20090601_172248_run_final_0.0_58284__250m_jgriffin'
    2828
    2929time_dirs = [time_dir1]#, time_dir2, time_dir3, time_dir4, time_dir5]#, time_dir6, time_dir7, time_dir8, time_dir9]
     
    5252
    5353    generate_filename = []
    54     input_ascii = glob.glob(folder + '*elevation_max.asc')
     54    input_ascii = glob.glob(folder + '*.asc')
    5555    print time_dir
    5656
  • anuga_work/production/new_south_wales/batemans_bay/compare_inundation_areas.py

    r7572 r7577  
    3434boundary_path = "N:\\georisk_models\\inundation\\data\\new_south_wales\\batemans_bay_tsunami_scenario_2009\\anuga\\boundaries\\"
    3535boundary_file_name = 'wave_energy_summary.csv'
    36 out_file_name = 'area_comparisons.csv'
     36out_file_name_madsen = 'area_comparisons_madsen.csv'
     37out_file_name_energy = 'area_comparisons_energy.csv'
     38out_file_name_anuga = 'area_comparisons_anuga.csv'
     39out_file_name_anuga_sum = 'area_comparisons_anuga_sum.csv'
    3740#figure = 'area_comparisons.png'
    3841
    3942boundary_file = join(boundary_path,boundary_file_name)
    40 out_file = join(boundary_path, out_file_name)
    41 #figure_path = join(boundary_path, 'figures',figure)
    42 plot = True
     43out_file_madsen = join(boundary_path, out_file_name_madsen)
     44out_file_energy = join(boundary_path, out_file_name_energy)
     45out_file_anuga = join(boundary_path, out_file_name_anuga)
     46out_file_anuga_sum = join(boundary_path, out_file_name_anuga_sum)
     47
    4348
    4449event_dict = {'Event1_MSL':58346,
    4550##              'Event1_HAT':58346,
    4651              'Event2_MSL':51204,
    47 ##              'Event2_HAT':51204,
     52              'Event2_HAT':51204,
    4853              'Event3_MSL':58284,#,
    4954##              'Event3_HAT':58284,
    5055              'Puysegur_200yr':58129,
    51               'Puysegur_500yr':58115,
     56##              'Puysegur_500yr':58115,
    5257              'Puysegur_1000yr':58226,
    5358              'Puysegur_5000yr':58286,
     
    6065
    6166# Dictionaries for storing area comparison
    62 area_comp = defaultdict(list)
    63 area_percent = defaultdict(list)
     67area_comp_madsen = defaultdict(list)
     68area_comp_energy = defaultdict(list)
     69area_percent_madsen = defaultdict(list)
     70area_percent_energy = defaultdict(list)
     71area_total_anuga = defaultdict(list)
     72anuga_sum_areas = {}
    6473
    6574boundaries = file(boundary_file,'r')
     
    7382    instantaneous_energy = line_split[2]
    7483    run_up_madsen = line_split[3][0:5]
     84    run_up_energy = line_split[4][0:5]
    7585    event_name = False
    7686    for key, value in event_dict.iteritems():
     
    8696    #break
    8797
    88 
     98    do_all = True
    8999
    90100   
    91101    ##run_up_madsen = 6.0
    92102    run_up_height = str(run_up_madsen)
    93     run_up_name = run_up_height.replace('.','_')[0]
     103    run_up_name = run_up_height.replace('.','_')[0:5]
     104    energy_run_up_height = str(run_up_energy)
     105    energy_run_up_name = run_up_energy.replace('.','_')[0:5]
    94106
    95107
    96108    ####################################################################################
    97     ##    Extract areas for analytical solution
     109    ##    Extract areas for Madsen analytical solution
    98110    ####################################################################################
     111    print '\nDoing Madsen solution...'
    99112    # Local variables...
    100113    elevation = filePath+"elevation\\elevation.gdb\\elevation"
     
    105118    elevation_less = filePath+"elevation\\features.gdb\\elevation_"+run_up_name+"_less"
    106119    batemans_bay_SMARTLINE = "N:\\georisk_models\\inundation\\data\\new_south_wales\\coastline\\batemans_bay_SMARTLINE.shp"
    107     elevation_less_final = filePath+"elevation\\features.gdb\\elevation_less_final"
    108 
    109     # Process: Extract elevation data by Mask...
    110     print 'check if elevation already extracted'
    111     if gp.Exists(elevation_extract)!=True:
    112         print 'not already extracted'
    113         print 'extracting by mask'
    114         gp.ExtractByMask_sa(elevation, inundation_area1, elevation_extract)
     120    elevation_less_final = filePath+"elevation\\features.gdb\\elevation_"+run_up_name+"_less_final"
     121    elevation_less_final_dis = filePath+"elevation\\features.gdb\\elevation_"+run_up_name+"_less_final_dis"
     122
     123    # Check if already done...
     124##    if do_all!=True:
     125##        print 'continuing'
     126    if gp.Exists(elevation_less_final_dis):
     127        print 'analysis already done, skipping to next one'
    115128    else:
    116         print 'extracted elevation already exists'
    117 
    118     # Process: Reclassify elevation data
    119     print 'check if elevation already reclassified'
    120     if gp.Exists(elevation_reclass)!=True:
    121         print 'reclassify based on elevation height of:', run_up_height
    122         reclass_level = "-200 "+run_up_height+" 1;"+run_up_height+" 300 0"
    123         gp.Reclassify_sa(elevation_extract, "Value", reclass_level, elevation_reclass, "DATA")
     129        # Process: Extract elevation data by Mask...
     130        print 'check if elevation already extracted'
     131        if gp.Exists(elevation_extract)!=True:
     132            print 'not already extracted'
     133            print 'extracting by mask'
     134            gp.ExtractByMask_sa(elevation, inundation_area1, elevation_extract)
     135        else:
     136            print 'extracted elevation already exists'
     137
     138        # Process: Reclassify elevation data
     139        print 'check if elevation already reclassified'
     140        if gp.Exists(elevation_reclass)!=True:
     141            print 'reclassify based on elevation height of:', run_up_height
     142            reclass_level = "-200 "+run_up_height+" 1;"+run_up_height+" 300 0"
     143            gp.Reclassify_sa(elevation_extract, "Value", reclass_level, elevation_reclass, "DATA")
     144        else:
     145            print 'elevation has previously been reclassified for this height:', run_up_height
     146
     147        # Process: Raster to Polygon...
     148        print 'check if already converted from raster to polyon'
     149        if gp.Exists(elevation_poly)!=True:
     150            print 'raster to polyon'
     151            gp.RasterToPolygon_conversion(elevation_reclass, elevation_poly, "NO_SIMPLIFY", "VALUE")
     152        else:
     153            print 'elevation raster already converted to polygon'
     154
     155        # Process: Select...
     156        print 'select by attribute'
     157        gp.Select_analysis(elevation_poly, elevation_less, "\"GRIDCODE\" = 1")
     158
     159        # Process: Create layer...
     160        print 'creating layers'
     161        gp.MakeFeatureLayer(batemans_bay_SMARTLINE,'smartline_layer')
     162        gp.MakeFeatureLayer(elevation_less,'elevation_layer')
     163
     164        # Process: Select Layer By Location...
     165        print 'select by location'
     166        gp.SelectLayerByLocation_management('elevation_layer', "INTERSECT", 'smartline_layer', "", "NEW_SELECTION")
     167        print 'joining'
     168        print 'inundation_area1',inundation_area1
     169        print 'energy_less_final',elevation_less_final
     170        gp.SpatialJoin_analysis('elevation_layer',inundation_area1, elevation_less_final,"JOIN_ONE_TO_ONE","","","INTERSECTS")
     171
     172        print 'dissolving fields'
     173        gp.Dissolve_management(elevation_less_final,elevation_less_final_dis,'inundation_ID')
     174
     175
     176
     177   ####################################################################################
     178    ##    Extract areas for Energy Balance analytical solution
     179    ####################################################################################
     180    print '\nDoing energy solution...'
     181    # Local variables...
     182    elevation = filePath+"elevation\\elevation.gdb\\elevation"
     183    elevation_extract = filePath+"elevation\\elevation.gdb\\elevation_extract"
     184    inundation_area1 = "N:\\georisk_models\\inundation\data\\new_south_wales\\batemans_bay_tsunami_scenario_2009\\anuga\\polygons\\polygons.gdb\inundation_area1"
     185    energy_reclass = filePath+"elevation\\elevation.gdb\\energy_"+energy_run_up_name+"_reclass"
     186    energy_poly = filePath+"elevation\\features.gdb\\energy_"+energy_run_up_name+"_poly"
     187    energy_less = filePath+"elevation\\features.gdb\\energy_"+energy_run_up_name+"_less"
     188    batemans_bay_SMARTLINE = "N:\\georisk_models\\inundation\\data\\new_south_wales\\coastline\\batemans_bay_SMARTLINE.shp"
     189    energy_less_final = filePath+"elevation\\features.gdb\\energy_"+energy_run_up_name+"_less_final"
     190    energy_less_final_dis = filePath+"elevation\\features.gdb\\energy_"+energy_run_up_name+"_less_final_dis"
     191
     192    # Check if already done...
     193##    if do_all!=True:
     194##        print 'continuing'
     195    if gp.Exists(energy_less_final_dis):
     196        print 'analysis already done, skipping to next one'
    124197    else:
    125         print 'elevation has previously been reclassified for this height:', run_up_height
    126 
    127     # Process: Raster to Polygon...
    128     print 'check if already converted from raster to polyon'
    129     if gp.Exists(elevation_poly)!=True:
    130         print 'raster to polyon'
    131         gp.RasterToPolygon_conversion(elevation_reclass, elevation_poly, "NO_SIMPLIFY", "VALUE")
    132     else:
    133         print 'elevation raster already converted to polygon'
    134 
    135     # Process: Select...
    136     print 'select by attribute'
    137     gp.Select_analysis(elevation_poly, elevation_less, "\"GRIDCODE\" = 1")
    138 
    139     # Process: Create layer...
    140     print 'creating layers'
    141     gp.MakeFeatureLayer(batemans_bay_SMARTLINE,'smartline_layer')
    142     gp.MakeFeatureLayer(elevation_less,'elevation_layer')
    143 
    144     # Process: Select Layer By Location...
    145     print 'select by location'
    146     gp.SelectLayerByLocation_management('elevation_layer', "INTERSECT", 'smartline_layer', "", "NEW_SELECTION")
    147     print 'joining'
    148     gp.SpatialJoin_analysis('elevation_layer',inundation_area1, elevation_less_final,"JOIN_ONE_TO_ONE","","","INTERSECTS")
    149 
    150     # Process: Copy Features...
    151     ##print 'copy features to output feature class'
    152     ##gp.CopyFeatures_management('elevation_layer', elevation_less_final, "", "0", "0", "0")
    153 
     198        # Process: Extract elevation data by Mask...
     199        print 'check if elevation already extracted'
     200        if gp.Exists(elevation_extract)!=True:
     201            print 'not already extracted'
     202            print 'extracting by mask'
     203            gp.ExtractByMask_sa(elevation, inundation_area1, elevation_extract)
     204        else:
     205            print 'extracted elevation already exists'
     206
     207        # Process: Reclassify elevation data
     208        print 'check if elevation already reclassified'
     209        if gp.Exists(energy_reclass)!=True:
     210            print 'reclassify based on elevation height of:', energy_run_up_height
     211            reclass_level = "-200 "+energy_run_up_height+" 1;"+energy_run_up_height+" 300 0"
     212            gp.Reclassify_sa(elevation_extract, "Value", reclass_level, energy_reclass, "DATA")
     213        else:
     214            print 'elevation has previously been reclassified for this height:', energy_run_up_height
     215
     216        # Process: Raster to Polygon...
     217        print 'check if already converted from raster to polyon'
     218        if gp.Exists(energy_poly)!=True:
     219            print 'raster to polyon'
     220            gp.RasterToPolygon_conversion(energy_reclass, energy_poly, "NO_SIMPLIFY", "VALUE")
     221        else:
     222            print 'elevation raster already converted to polygon'
     223
     224        # Process: Select...
     225        print 'select by attribute'
     226        gp.Select_analysis(energy_poly, energy_less, "\"GRIDCODE\" = 1")
     227
     228        # Process: Create layer...
     229        print 'creating layers'
     230        gp.MakeFeatureLayer(batemans_bay_SMARTLINE,'smartline_layer')
     231        gp.MakeFeatureLayer(energy_less,'elevation_layer')
     232
     233        # Process: Select Layer By Location...
     234        print 'select by location'
     235        gp.SelectLayerByLocation_management('elevation_layer', "INTERSECT", 'smartline_layer', "", "NEW_SELECTION")
     236        print 'joining'
     237        print 'inundation_area1',inundation_area1
     238        print 'energy_less_final',energy_less_final
     239        gp.SpatialJoin_analysis('elevation_layer',inundation_area1, energy_less_final,"JOIN_ONE_TO_ONE","","","INTERSECTS")
     240       
     241        print 'dissolving fields'
     242        gp.Dissolve_management(energy_less_final,energy_less_final_dis,'inundation_ID')
    154243
    155244    ####################################################################################
    156245    ##    Extract areas for ANUGA solution
    157246    ####################################################################################
     247    print '\nDoing ANUGA solution...'
    158248    # Local variables...
    159249    gp.workspace = raster_path
     
    169259    inundation_less = "inundation_"+run_up_name+"_less"
    170260    inundation_less_final = "inundation_less_final"
    171 
    172     # Process: Extract inundation data by Mask...
    173     print 'check if inundation already extracted'
    174     if gp.Exists(inundation_extract)!=True:
    175         print 'not already extracted'
    176         print 'extracting by mask'
    177         gp.ExtractByMask_sa(inundation, inundation_area1, inundation_extract)
     261    inundation_less_final_dis = "inundation_less_final_dis"
     262
     263    # Check if already done...
     264    if gp.Exists(inundation_less_final_dis):
     265        print 'analysis already done, skipping to next one'
    178266    else:
    179         print 'extracted inundation already exists'
    180 
    181     # Process: Reclassify inundation data
    182     print 'check if inundation already reclassified'
    183     if gp.Exists(inundation_reclass)!=True:
    184         print 'reclassify based on inundation'
    185         gp.Reclassify_sa(inundation_extract, "Value", "-200 0 1;0 300 0", inundation_reclass, "DATA")
    186     else:
    187         print 'inundation has previously been reclassified for this height:', run_up_height
    188 
    189     # Process: Raster to Polygon...
    190     print 'check if already converted from raster to polyon'
    191     if gp.Exists(inundation_poly)!=True:
    192         print 'raster to polyon'
    193         gp.RasterToPolygon_conversion(inundation_reclass, inundation_poly, "NO_SIMPLIFY", "VALUE")
    194     else:
    195         print 'inundation raster already converted to polygon'
    196 
    197     # Process: Select...
    198     print 'select by attribute'
    199     gp.Select_analysis(inundation_poly, inundation_less, "\"GRIDCODE\" = 0")
    200 
    201     # Process: Create layer...
    202     print 'creating layers'
    203     gp.MakeFeatureLayer(batemans_bay_SMARTLINE,'smartline_layer')
    204     gp.MakeFeatureLayer(inundation_less,'inundation_layer')
    205 
    206     # Process: Select Layer By Location...
    207     print 'select by location'
    208     gp.SelectLayerByLocation_management('inundation_layer', "INTERSECT", 'smartline_layer', "", "NEW_SELECTION")
    209     print 'joining'
    210     gp.SpatialJoin_analysis('inundation_layer',inundation_area1, inundation_less_final,"JOIN_ONE_TO_ONE","","","INTERSECTS")
    211 
    212 
    213 
    214 
    215 
     267        # Process: Extract inundation data by Mask...
     268        print 'check if inundation already extracted'
     269        if gp.Exists(inundation_extract)!=True:
     270            print 'not already extracted'
     271            print 'extracting by mask'
     272            gp.ExtractByMask_sa(inundation, inundation_area1, inundation_extract)
     273        else:
     274            print 'extracted inundation already exists'
     275
     276        # Process: Reclassify inundation data
     277        print 'check if inundation already reclassified'
     278        if gp.Exists(inundation_reclass)!=True:
     279            print 'reclassify based on inundation'
     280            gp.Reclassify_sa(inundation_extract, "Value", "-200 0 1;0 300 0", inundation_reclass, "DATA")
     281        else:
     282            print 'inundation has previously been reclassified for this height for this ANUGA run'
     283
     284        # Process: Raster to Polygon...
     285        print 'check if already converted from raster to polyon'
     286        if gp.Exists(inundation_poly)!=True:
     287            print 'raster to polyon'
     288            gp.RasterToPolygon_conversion(inundation_reclass, inundation_poly, "NO_SIMPLIFY", "VALUE")
     289        else:
     290            print 'inundation raster already converted to polygon'
     291
     292        # Process: Select...
     293        print 'select by attribute'
     294        gp.Select_analysis(inundation_poly, inundation_less, "\"GRIDCODE\" = 0")
     295
     296        # Process: Create layer...
     297        print 'creating layers'
     298        gp.MakeFeatureLayer(batemans_bay_SMARTLINE,'smartline_layer')
     299        gp.MakeFeatureLayer(inundation_less,'inundation_layer')
     300
     301        # Process: Select Layer By Location...
     302        print 'select by location'
     303        gp.SelectLayerByLocation_management('inundation_layer', "INTERSECT", 'smartline_layer', "", "NEW_SELECTION")
     304        print 'joining'
     305        gp.SpatialJoin_analysis('inundation_layer',inundation_area1, inundation_less_final,"JOIN_ONE_TO_ONE","","","INTERSECTS")
     306
     307        print 'dissolving fields'
     308        gp.Dissolve_management(inundation_less_final,inundation_less_final_dis,'inundation_ID')
     309
     310    ##########################################################
     311    # Compare areas
     312    ##########################################################
     313   
    216314    # Search feature class for areas
    217     print 'get areas'
     315    print 'get Madsen areas'
    218316    analytical_area_dict = {}
    219317    #analytical_areas = []
    220     cur = gp.SearchCursor(elevation_less_final)
     318    cur = gp.SearchCursor(elevation_less_final_dis)
    221319    sRow = cur.Next()
    222320    while sRow:
     
    231329
    232330    # Search feature class for areas
    233     print 'get areas'
    234     anuga_area_dict = {}
    235     #anuga_areas = []
    236     cur = gp.SearchCursor(inundation_less_final)
     331    print 'get Energy areas'
     332    energy_area_dict = {}
     333    #analytical_areas = []
     334    cur = gp.SearchCursor(energy_less_final_dis)
    237335    sRow = cur.Next()
    238336    while sRow:
     337        if not(sRow.GetValue("inundation_ID")in energy_area_dict):
     338            energy_area_dict[sRow.GetValue("inundation_ID")] = sRow.GetValue("Shape_Area")
     339        else:
     340            energy_area_dict[sRow.GetValue("inundation_ID")] = (energy_area_dict[sRow.GetValue("inundation_ID")]+sRow.GetValue("Shape_Area"))
     341        #analytical_areas.append(sRow.GetValue("Shape_Area"))
     342        sRow = cur.Next()
     343    #print analytical_areas
     344    print energy_area_dict
     345
     346    # Search feature class for areas
     347    print 'get ANUGA areas'
     348    anuga_area_dict = {}
     349    anuga_sum_areas[event_id] = 0
     350    #anuga_areas = []
     351    cur = gp.SearchCursor(inundation_less_final_dis)
     352    sRow = cur.Next()
     353    while sRow:
     354        anuga_sum_areas[event_id] = anuga_sum_areas[event_id]+sRow.GetValue("Shape_Area")
    239355        if not(sRow.GetValue("inundation_ID") in anuga_area_dict):
    240356            anuga_area_dict[sRow.GetValue("inundation_ID")]=sRow.GetValue("Shape_Area")
     
    247363
    248364    for key in anuga_area_dict:
     365        area_total_anuga[key].append(anuga_area_dict[key])
     366       
    249367        if not key in analytical_area_dict:
    250             analytical_area_dict[key] = 0.
    251         diff = anuga_area_dict[key]-analytical_area_dict[key]
    252         percent_diff = 100*diff/anuga_area_dict[key]
    253         area_comp[key].append(diff)
    254         area_percent[key].append(percent_diff)
    255 print 'area_comp', area_comp
    256 print 'area_percent', area_percent
    257 
    258 ##csv_dict = csv.DictWriter(open(out_file,'w'),[1,2,3,4,5,6,7,8,9])
    259 ##csv_dict.writerow(dict(zip([1,2,3,4,5,6,7,8,9],[1,2,3,4,5,6,7,8,9])))
    260 ##csv_dict.writerow(area_percent)
    261 
    262 csv_out = csv.writer(open(out_file,'w'))
    263 csv_out.writerow([1,2,3,4,5,6,7,8,9])
    264 for i in range(len(area_percent[1])):
    265     value_list =[]
    266     for key in area_percent:
    267         value_list.append(area_percent[key][i])
    268     csv_out.writerow(value_list)
    269      
    270 out_file.close()   
    271 ##for key, value in area_percent:
    272 ##    key_list = []
    273 ##    for val in value:
    274 ##        key_list.append(key)
    275 ##    pylab.scatter(key_list,value)
    276        
     368            #analytical_area_dict[key] = None
     369            area_comp_madsen[key].append(None)
     370            area_percent_madsen[key].append(None)
     371        else:
     372            diff_madsen = anuga_area_dict[key]-analytical_area_dict[key]
     373            percent_diff_madsen = 100*diff_madsen/anuga_area_dict[key]
     374            area_comp_madsen[key].append(diff_madsen)
     375            area_percent_madsen[key].append(percent_diff_madsen)
     376           
     377        if not key in energy_area_dict:
     378            area_comp_energy[key].append(None)
     379            area_percent_energy[key].append(None)
     380        else:
     381            diff_energy = anuga_area_dict[key]-energy_area_dict[key]
     382            percent_diff_energy = 100*diff_energy/anuga_area_dict[key]
     383            area_comp_energy[key].append(diff_energy)
     384            area_percent_energy[key].append(percent_diff_energy)   
     385           
     386print 'area_comp', area_comp_madsen
     387print 'area_percent', area_percent_madsen
     388
     389
     390
     391csv_anuga = csv.writer(open(out_file_anuga,'w'))
     392csv_anuga_sum = csv.writer(open(out_file_anuga_sum,'w'))
     393csv_madsen = csv.writer(open(out_file_madsen,'w'))
     394csv_energy = csv.writer(open(out_file_energy,'w'))
     395
     396csv_anuga.writerow([1,2,3,4,5,6,7,8,9])
     397csv_madsen.writerow([1,2,3,4,5,6,7,8,9])
     398csv_energy.writerow([1,2,3,4,5,6,7,8,9])
     399
     400for key, value in anuga_sum_areas.iteritems():
     401    write_list = [key,value]
     402    csv_anuga_sum.writerow(write_list)
     403
     404for i in range(len(area_total_anuga[1])):
     405    value_list_anuga =[]
     406    for key in area_total_anuga:
     407        try:
     408            area_total_anuga[key][i]
     409        except NameError:
     410            value_list_anuga.append(None)
     411        else:
     412            value_list_anuga.append(area_total_anuga[key][i]) 
     413    csv_anuga.writerow(value_list_anuga)
     414
     415for i in range(len(area_percent_madsen[1])):
     416    value_list_madsen =[]
     417    for key in area_percent_madsen:
     418        try:
     419            area_percent_madsen[key][i]
     420        except NameError:
     421            value_list_madsen.append(None)
     422        else:
     423            value_list_madsen.append(area_percent_madsen[key][i])
     424    csv_madsen.writerow(value_list_madsen)
     425   
     426for i in range(len(area_percent_energy[1])):
     427    value_list_energy =[]
     428    for key in area_percent_energy:
     429        try:
     430            area_percent_energy[key][i]
     431        except NameError:
     432            value_list_energy.append(None)
     433        else:
     434            value_list_energy.append(area_percent_energy[key][i]) 
     435    csv_energy.writerow(value_list_energy)
    277436
    278437
  • anuga_work/production/new_south_wales/batemans_bay/export_results_max.py

    r7369 r7577  
    2323
    2424#Specify output directories
    25 time_dir1 = '20090529_143527_run_final_0.0_51424_jgriffin'
    26 time_dir2 = '20090529_143458_run_final_0.0_58346_jgriffin'
     25time_dir1 = '20090601_172248_run_final_0.0_58284__250m_jgriffin'
     26##time_dir2 = '20090529_143458_run_final_0.0_58346_jgriffin'
    2727
    28 time_dirs = [time_dir1, time_dir2]   
     28time_dirs = [time_dir1]#, time_dir2]   
    2929
    3030# sww filename extensions ie. if batemans_bay_time_37860_0.sww, input into list 37860
    3131# make sure numbers are in sequential order
    32 times = [37860]
     32times = []
    3333
    3434#Modify the cellsize value to set the size of the raster you require
    3535#Take into account mesh size when aplying this paramater
    36 cellsize = 20   #dependent on data resolution in area of interest.
     36cellsize = 250   #dependent on data resolution in area of interest.
    3737
    3838#Now set the timestep at which you want the raster generated.
     
    6161# one or more key strings from var_equations above
    6262#var = ['depth', 'speed','stage']
    63 var = ['elevation']
     63var = ['stage','speed','elevation']
    6464######
    6565# Start script, running through variables, area, folder, sww file (determine by times)
  • anuga_work/production/new_south_wales/batemans_bay/get_timeseries.py

    r7369 r7577  
    1818directory = project.output_folder
    1919
    20 time_dir1 = '20090529_143527_run_final_0.0_51424_jgriffin'
    21 #time_dir2 = '20090529_143442_run_final_0.0_51347_jgriffin'
     20time_dir1 = '20090601_172248_run_final_0.0_58284__250m_jgriffin'
     21time_dir2 = '20090520_145616_run_final_0.0_58284_jgriffin'
    2222
    23 time_dirs = [time_dir1]#, time_dir2]
     23time_dirs = [time_dir1, time_dir2]
    2424
    2525
  • anuga_work/production/new_south_wales/batemans_bay/project.py

    r7327 r7577  
    112112# Used in get_timeseries.py. 
    113113# Format easting,northing,name,elevation (with header)
    114 gauges_filename = 'gauges.csv'
     114gauges_filename = 'phase2_comp.csv'
    115115
    116116# BUILDINGS EXPOSURE - for identifying inundated houses
     
    131131# Thinned ordering file from Hazard Map (geographic)
    132132# Format is index,latitude,longitude (with header)
    133 urs_order_filename = 'thinned_boundary_ordering_extend.csv'
     133urs_order_filename = 'urs_order.csv'
    134134
    135135# Landward bounding points
    136136# Format easting,northing (no header)
    137 landward_boundary_filename = 'landward_boundary_extend.csv'
     137landward_boundary_filename = 'landward_boundary.csv'
    138138
    139139# MUX input filename.
Note: See TracChangeset for help on using the changeset viewer.