Ignore:
Timestamp:
Aug 27, 2008, 8:52:21 AM (15 years ago)
Author:
duncan
Message:

Adding a header to the boundary files

File:
1 edited

Legend:

Unmodified
Added
Removed
  • anuga_validation/Hinwood_2008/calc_norm.py

    r5689 r5691  
    11"""
    2 This is getting really messy.
    32
    43All err results are going into the same dir, and it can't really be changed.
     
    1211# Standard modules
    1312import os
    14 from os import sep, path
    1513from csv import writer
    1614from time import localtime, strftime
    1715
    1816# Related major packages
    19 from Numeric import arange, array, zeros, Float, where, greater, less, \
    20      compress, argmin, choose, searchsorted, sqrt, sum
    21 
    22 import project         
    23 from os import sep
     17from Numeric import zeros, Float, where, greater, less, compress, sqrt, sum
     18
    2419from anuga.shallow_water.data_manager import csv2dict
    2520from anuga.utilities.numerical_tools import ensure_numeric, err, norm
    26 
    2721from anuga.utilities.interp import interp
    2822
     
    3327    assert min > SMALL_MIN
    3428    no_maxs = where(less(vector,max), vector, SMALL_MIN)
    35     #print "no_maxs", no_maxs
    3629    band_condition = greater(no_maxs, min)
    3730    return band_condition
     
    4134              y_location_tag=':0.0'):
    4235    """
    43     Given a bunch of scenarios that have CSV guage files, calc the
     36    Given a list of scenarios that have CSV guage files, calc the
    4437    err, Number_of_samples and rmsd for all gauges in each scenario.
    4538    Write this info to a file for each scenario.
     
    6760        time_sim = ensure_numeric(time_sim)
    6861        time_exp = ensure_numeric(time_exp)
    69         #print "min(time_exp)", min(time_exp)
    70         #print "max(time_exp)", max(time_exp)
    7162       
    7263        condition = get_max_min_condition_array(run_data['wave_times'][0],
    7364                                                run_data['wave_times'][1],
    7465                                                time_exp)
    75         time_exp_cut = compress(condition, time_exp) #, axis=axis)
    76         #print "min(time_exp_cut)", min(time_exp_cut)
    77         #print "max(time_exp_cut)", max(time_exp_cut)
    78        
    79         #assert min(time_sim) < min(time_exp)
     66        time_exp_cut = compress(condition, time_exp)
    8067       
    8168        print "Writing to ", file_err
     
    10087            err_list.append(norm)
    10188            points.append(len(quantity_sim_interp))
    102             rmsd_list.append(norm/sqrt(len(quantity_sim_interp)))             
    103             #print "norm", norm
    104             #for i in range(len(quantity_sim_interp)):
    105                
    106                 #print "quantity_sim_interp", quantity_sim_interp[i]
    107                 #print "quantity_exp_cut", quantity_exp_cut[i]
     89            rmsd_list.append(norm/sqrt(len(quantity_sim_interp)))
    10890        assert len(location_exps) == len(err_list)
    10991
     
    124106    the first colum explains the rows.
    125107    """
    126     #slope, _ = csv2dict(file_sim)
    127108   
    128109    # Read the depth file
     
    137118    depths = zeros(n_time, Float)  #
    138119    sensors = zeros((n_time,n_sensors), Float)
    139     quantity_locations = title.split(',') #(',')
     120    quantity_locations = title.split(',')
    140121    quantity_locations.pop(0) # remove 'time'
    141122
     
    144125   
    145126    for i, line in enumerate(lines):
    146         fields = line.split(',') #(',')
     127        fields = line.split(',')
    147128        fields = [float(j) for j in fields]
    148129        times[i] = fields[0]
    149130        sensors[i] = fields[1:] # 1: to remove time
    150131
    151     #print "times",times
    152     #print "locations", locations
    153     #print "sensors", sensors
    154132    return times, locations, sensors                 
    155133   
     
    161139def err_files(scenarios, outputdir_tag, quantity='stage'):
    162140    """
    163     The err files, for a list of scenarios
     141    Create a list of err files, for a list of scenarios.
    164142    """
    165143    file_errs = []
     
    173151
    174152def compare_different_settings(outputdir_tag, scenarios, quantity='stage'):
    175 
     153    """
     154    Calculate the RMSD for all the tests in a scenario
     155    """
    176156    files = err_files(scenarios, outputdir_tag, quantity=quantity)
    177157    err = 0.0
     
    195175def err_addition(err_list, number_of_samples_list):
    196176    """
    197     err1 is the err value (sqrt(sum_over_x&y((xi - yi)^2))) for a set of values
    198     number_of_samples1 is the number of values associated with the err.
     177    This function 'sums' a list of errs and sums a list of samples
     178   
     179    err is the err value (sqrt(sum_over_x&y((xi - yi)^2))) for a set of values.
     180    number_of_samples is the number of values associated with the err.
    199181   
    200182    If this function gets used alot, maybe pull this out and make it an object
     
    212194    from scenarios import scenarios
    213195   
    214    
    215     scenarios = [scenarios[0]] # !!!!!!!!!!!!!!!!!!!!!!
    216     #scenarios = scenarios[4:] # !!!!!!!!!!!!!!!!!!!!!!
    217 
    218    
    219     outputdir_tag = "_nolmts_wdth_0.1_z_0.0_ys_0.5_mta_0.01"
     196    #scenarios = [scenarios[0]] # !!!!!!!!!!!!!!!!!!!!!!
     197
     198    outputdir_tag = "_nolmts_wdth_0.1_z_0.0_ys_0.01_mta_0.01"
    220199    calc_norms = True
    221200    #calc_norms = False
Note: See TracChangeset for help on using the changeset viewer.