source: trunk/anuga_validation/uq_friction_2007/calc_norm.py @ 7877

Last change on this file since 7877 was 4895, checked in by duncan, 16 years ago

working on ANUGA report.

File size: 4.8 KB
Line 
1"""
2Idea's
3have it use cache!
4"""
5
6
7#----------------------------------------------------------------------------
8# Import necessary modules
9#----------------------------------------------------------------------------
10
11# Standard modules
12import os
13from os import sep, path
14import csv
15
16# Related major packages
17import project             
18from anuga.abstract_2d_finite_volumes.util import file_function
19from numerical_tools import  err   # norm, corr, 
20#X:\anuga_core\source\anuga\utilities\numerical_tools import  err
21
22def load_csv(file_name):
23    """
24    Load in the csv as a dic, title as key and column info as value, .
25    Also, create a dic, title as key and column index as value,
26    to keep track of the column order.
27    """
28    #
29    attribute_dic = {}
30    title_index_dic = {}
31    titles_stripped = [] # list of titles
32   
33    reader = csv.reader(file(file_name))
34
35    # Read in and manipulate the title info
36    titles = reader.next()
37    for i,title in enumerate(titles):
38        titles_stripped.append(title.strip())
39        title_index_dic[title.strip()] = i
40    title_count = len(titles_stripped)       
41    #print "title_index_dic",title_index_dic
42
43   
44    #create a dic of colum values, indexed by column title
45    for line in reader:
46        if len(line) <> title_count:
47            raise IOError #FIXME make this nicer
48        for i, value in enumerate(line):
49            attribute_dic.setdefault(titles_stripped[i],[]).append(value)
50       
51    return attribute_dic, title_index_dic
52
53
54def write_norm_results( file_name, norms):
55    fd = open(file_name, 'w')
56    fd.write( 'sww_file, friction, 2nd normal' + '\n')
57    for norm in norms:
58        fd.write(norm[0] + ', ' + str(norm[1]) + ', ' + str(norm[2]) + '\n')
59    fd.close()   
60   
61def calc_norm(flume_name,
62              is_trial_run=False,
63              outputdir_name=None,
64              pro_instance=None):   
65    if is_trial_run is True:
66        outputdir_name += '_test'
67    if pro_instance is None:
68        pro_instance = project.Project(['data','flumes','dam_2006'],
69                                       outputdir_name=outputdir_name)
70   
71   
72    #  Read in the flume data
73    flume_results = path.join(pro_instance.home, 'data', 'flumes',
74                              'dam_UQ_data_2006', 'longer_data',
75                              flume_name)
76    flume_results, _ = load_csv(flume_results)
77    #print "flume_results",flume_results #.keys()
78
79    #print "flume_results['time']", flume_results['time']
80    #Get a list of the files
81    files = os.listdir(pro_instance.outputdir)
82    files = [x for x in files if x[-4:] == '.sww']
83    files.sort()
84    print "checking against files: ",files
85
86    quantities = ['stage', 'elevation']
87    gauge_locations =  [[0.2, 0.2],
88                        [0.3, 0.2],
89                        [0.4, 0.2],
90                        [0.5, 0.2],
91                        [0.6, 0.2]]
92
93    norm_sums = []
94    for sww_file in files:
95        #print "sww_file",sww_file
96        file_instance = file_function(pro_instance.outputdir + sep + sww_file,
97                                      quantities = quantities,
98                                      interpolation_points = gauge_locations,
99                                      verbose = True,
100                                      use_cache = True)
101        # file_instance(0.02, point_id=0) #This works
102        norm_sum = 0
103        for location in range(len(gauge_locations)):
104            #print "location",location
105            #print " gauge_locations[location]", gauge_locations[location]
106            simulated_depths = []
107            #print "flume_results['time']",flume_results['time']
108            for time in flume_results['time']:
109                quantities_slice = file_instance(float(time),
110                                                 point_id=location)
111                #print "quantities", quantities_slice
112                depth = quantities_slice[0] - quantities_slice[1]
113                simulated_depths.append(depth)
114            #print "simulated_depths",simulated_depths
115            # get the flume depth at this location
116            flume_depths = flume_results[str(gauge_locations[location][0])]
117            flume_depths = [float(i) for i in flume_depths]
118            # calc the norm
119            #print "map(None, simulated_depths, flume_depths)", \
120            #      map(None, simulated_depths, flume_depths)
121            norm = err(simulated_depths,
122                       flume_depths, 2, relative = True)  # 2nd norm (rel. RMS)
123            norm_sum += norm
124        print "norm_sum",norm_sum
125        norm_sums.append([sww_file, sww_file[2:-4], norm_sum])
126        norm_file_name = pro_instance.outputdir + sep + \
127                           flume_name[:-4]+'_norm' + flume_name[-4:]
128        write_norm_results(norm_file_name, norm_sums)
129
130#-------------------------------------------------------------
131if __name__ == "__main__":
132    calc_norm('fromD_0.2_Slope0.05_x0.2-0.6_clean.csv',
133              is_trial_run = False,
134              outputdir_name='friction_set_A')
135   
Note: See TracBrowser for help on using the repository browser.