[5076] | 1 | """ |
---|
| 2 | Script for running a breaking wave simulation of Jon Hinwoods wave tank. |
---|
| 3 | Note: this is based on the frinction_ua_flume_2006 structure. |
---|
| 4 | |
---|
| 5 | |
---|
| 6 | Duncan Gray, GA - 2007 |
---|
| 7 | |
---|
| 8 | """ |
---|
| 9 | |
---|
| 10 | |
---|
| 11 | #---------------------------------------------------------------------------- |
---|
| 12 | # Import necessary modules |
---|
| 13 | #---------------------------------------------------------------------------- |
---|
| 14 | |
---|
| 15 | from Scientific.IO.NetCDF import NetCDFFile |
---|
| 16 | from Numeric import array, zeros, Float |
---|
[5459] | 17 | from os.path import join |
---|
[5350] | 18 | |
---|
| 19 | from anuga.utilities.numerical_tools import ensure_numeric |
---|
[5076] | 20 | |
---|
[5350] | 21 | from interp import interp |
---|
[5076] | 22 | |
---|
[5392] | 23 | |
---|
[5459] | 24 | import project |
---|
| 25 | |
---|
[5392] | 26 | # from os import getenv |
---|
| 27 | # from os.path import join |
---|
| 28 | # home = getenv('INUNDATIONHOME') |
---|
| 29 | # Hinwood_dir = join(home,'data','flumes','Hinwood_2008') |
---|
| 30 | # raw_data_dir = join(Hinwood_dir, 'raw_data') |
---|
| 31 | |
---|
| 32 | def csv2tms(filename, offshore_bed_elevation): |
---|
[5076] | 33 | """Convert benchmark 2 time series to NetCDF tms file. |
---|
[5392] | 34 | the filename is the name of the output tms file, eg 'hi.tsm'. |
---|
| 35 | There must be an equivalent .csv file, eg 'hi.csv'. |
---|
[5370] | 36 | |
---|
[5076] | 37 | """ |
---|
| 38 | |
---|
| 39 | |
---|
| 40 | |
---|
| 41 | print 'Creating', filename |
---|
| 42 | |
---|
| 43 | # Read the ascii (.csv) version of this file |
---|
| 44 | fid = open(filename[:-4] + '.csv') |
---|
| 45 | |
---|
| 46 | # Read remaining lines |
---|
| 47 | lines = fid.readlines() |
---|
| 48 | fid.close() |
---|
| 49 | |
---|
| 50 | |
---|
| 51 | N = len(lines) |
---|
| 52 | T = zeros(N, Float) #Time |
---|
| 53 | Q = zeros(N, Float) #Stage |
---|
| 54 | X = zeros(N, Float) #XMomentum |
---|
| 55 | Y = zeros(N, Float) #YMomentum |
---|
| 56 | |
---|
| 57 | for i, line in enumerate(lines): |
---|
| 58 | fields = line.split(',') |
---|
| 59 | |
---|
| 60 | T[i] = float(fields[0]) |
---|
[5392] | 61 | Q[i] = float(fields[1]) |
---|
| 62 | depth = Q[i] - offshore_bed_elevation |
---|
[5076] | 63 | X[i] = float(fields[2]) * depth |
---|
[5350] | 64 | try: |
---|
| 65 | Y[i] = float(fields[3]) * depth |
---|
| 66 | except: |
---|
| 67 | pass |
---|
[5076] | 68 | |
---|
| 69 | |
---|
[5370] | 70 | |
---|
[5076] | 71 | # Create tms NetCDF file |
---|
| 72 | fid = NetCDFFile(filename, 'w') |
---|
| 73 | fid.institution = 'Geoscience Australia' |
---|
| 74 | fid.description = 'Input wave for Benchmark 2' |
---|
| 75 | fid.starttime = 0.0 |
---|
| 76 | fid.createDimension('number_of_timesteps', len(T)) |
---|
| 77 | fid.createVariable('time', Float, ('number_of_timesteps',)) |
---|
| 78 | fid.variables['time'][:] = T |
---|
| 79 | |
---|
| 80 | fid.createVariable('stage', Float, ('number_of_timesteps',)) |
---|
| 81 | fid.variables['stage'][:] = Q[:] |
---|
| 82 | |
---|
| 83 | fid.createVariable('xmomentum', Float, ('number_of_timesteps',)) |
---|
| 84 | fid.variables['xmomentum'][:] = X[:] |
---|
| 85 | |
---|
| 86 | fid.createVariable('ymomentum', Float, ('number_of_timesteps',)) |
---|
| 87 | fid.variables['ymomentum'][:] = Y[:] |
---|
| 88 | |
---|
| 89 | fid.close() |
---|
| 90 | |
---|
[5350] | 91 | |
---|
[5395] | 92 | def combine_velocity_depth(velocity_file, depth_file, out_file, metadata_dic): |
---|
[5350] | 93 | """ |
---|
| 94 | |
---|
[5370] | 95 | Convert the raw velocity and depth values, which have values at |
---|
[5350] | 96 | different times to a csv file, with values at the same time, with |
---|
| 97 | SI units. |
---|
| 98 | |
---|
[5370] | 99 | Set the depth values to be at the same times as the velocity values. |
---|
| 100 | |
---|
[5350] | 101 | The format for the velocity file is; |
---|
[5395] | 102 | [time, sec], [x-velocity +ve is towards the wave generator, cm/sec], |
---|
[5350] | 103 | [y-velocity], [z-velocity] |
---|
| 104 | |
---|
| 105 | The format for the pressure file is |
---|
| 106 | [time, sec], [mm above SWL for sensor A], many other sensors... |
---|
[5395] | 107 | |
---|
| 108 | The format of the output file is; |
---|
| 109 | [time, sec], [m above SWL for sensor A], |
---|
| 110 | [x-velocity +ve is towards the shore, m/sec], |
---|
| 111 | [y-velocity +ve is towards the left of the tank, looking from the |
---|
| 112 | generator to the shore, m/sec] |
---|
| 113 | |
---|
| 114 | |
---|
[5350] | 115 | """ |
---|
[5370] | 116 | missing = 1e+20 |
---|
| 117 | |
---|
[5350] | 118 | # Read velocity file |
---|
[5459] | 119 | #print "*********************" |
---|
| 120 | #print "velocity_file", velocity_file |
---|
[5350] | 121 | vfid = open(velocity_file) |
---|
| 122 | lines = vfid.readlines() |
---|
| 123 | vfid.close() |
---|
| 124 | |
---|
| 125 | |
---|
| 126 | n_velocity = len(lines) |
---|
| 127 | vtimes = zeros(n_velocity, Float) #Time |
---|
[5370] | 128 | x_velocities = zeros(n_velocity, Float) # |
---|
| 129 | y_velocities = zeros(n_velocity, Float) # |
---|
[5350] | 130 | |
---|
| 131 | for i, line in enumerate(lines): |
---|
[5370] | 132 | fields = line.split() #(',') |
---|
[5350] | 133 | |
---|
| 134 | vtimes[i] = float(fields[0]) |
---|
[5370] | 135 | x_velocities[i] = float(fields[1]) |
---|
| 136 | y_velocities[i] = float(fields[2]) |
---|
[5350] | 137 | |
---|
| 138 | # Read the depth file |
---|
[5459] | 139 | #print "depth_file", depth_file |
---|
[5350] | 140 | dfid = open(depth_file) |
---|
| 141 | lines = dfid.readlines() |
---|
| 142 | dfid.close() |
---|
| 143 | |
---|
[5370] | 144 | |
---|
[5350] | 145 | n_depth = len(lines) |
---|
[5370] | 146 | n_sensors = len(lines[0].split()) |
---|
[5350] | 147 | dtimes = zeros(n_depth, Float) #Time |
---|
| 148 | depths = zeros(n_depth, Float) # |
---|
[5370] | 149 | sensors = zeros((n_depth,n_sensors), Float) |
---|
[5350] | 150 | |
---|
| 151 | for i, line in enumerate(lines): |
---|
[5370] | 152 | fields = line.split() #(',') |
---|
| 153 | fields = [float(j) for j in fields] |
---|
| 154 | dtimes[i] = fields[0] |
---|
| 155 | depths[i] = fields[1] |
---|
| 156 | sensors[i] = fields |
---|
| 157 | #print "dtimes", dtimes |
---|
| 158 | #print "depths", depths |
---|
| 159 | #print "vtimes", vtimes |
---|
| 160 | depths_at_vtimes = interp( depths, dtimes, |
---|
| 161 | vtimes, missing=missing) |
---|
| 162 | depths_at_vtimes = ensure_numeric(depths_at_vtimes) |
---|
[5350] | 163 | |
---|
[5370] | 164 | #print "len(dtimes)", len(vtimes) |
---|
| 165 | #print "len(depths_at_vtimes)", len(depths_at_vtimes) |
---|
[5459] | 166 | #print "metadata_dic['scenario_id']", metadata_dic['scenario_id'] |
---|
| 167 | # for i in range(len(depths_at_vtimes)): |
---|
[5370] | 168 | # print "i", i |
---|
| 169 | # print "vtimes[i]", vtimes[i] |
---|
| 170 | # print "depths_at_vtimes[i]", depths_at_vtimes[i] |
---|
[5459] | 171 | |
---|
| 172 | #print "depths_at_vtimes", depths_at_vtimes |
---|
[5350] | 173 | depths_at_vtimes = depths_at_vtimes/1000.00 # convert from mm to m |
---|
[5370] | 174 | missing=missing/1000.00 # Do to missing what is done to depths_at_vtimes |
---|
| 175 | x_velocities = ensure_numeric(x_velocities) |
---|
[5395] | 176 | # Swap axis around convert cm/sec to m/sec |
---|
| 177 | x_velocities = x_velocities * -0.01 |
---|
[5370] | 178 | y_velocities = ensure_numeric(y_velocities) |
---|
[5395] | 179 | # Swap axis around convert cm/sec to m/sec |
---|
| 180 | y_velocities = y_velocities * -0.01 |
---|
[5350] | 181 | |
---|
| 182 | fid = open(out_file,'w') |
---|
| 183 | |
---|
| 184 | assert len(depths_at_vtimes) == len(vtimes) |
---|
[5370] | 185 | start_time = None |
---|
| 186 | #start_time = 0.0 |
---|
[5350] | 187 | #for vtime, depth_at_vtime, velocity in map(vtimes, depths_at_vtimes, |
---|
| 188 | # velocities): |
---|
| 189 | for i in xrange(len(vtimes)): |
---|
[5370] | 190 | if not depths_at_vtimes[i] == missing: |
---|
| 191 | # Make the times start at zero. |
---|
| 192 | if start_time is None: |
---|
[5395] | 193 | start_time = vtimes[i] |
---|
| 194 | final_time = vtimes[i]-start_time |
---|
| 195 | fid.write(str(final_time) \ |
---|
[5370] | 196 | + ',' + str(depths_at_vtimes[i]) \ |
---|
| 197 | + ',' + str(x_velocities[i]) \ |
---|
| 198 | + ',' + str(y_velocities[i])+'\n') |
---|
[5350] | 199 | |
---|
| 200 | fid.close() |
---|
[5459] | 201 | print "The start time for is", start_time |
---|
| 202 | if not start_time == metadata_dic['ANUGA_start_time']: |
---|
| 203 | raise ValueError, "The calc'ed and recorded start times are different" |
---|
| 204 | # Modify the sensor array to reflect the new start time |
---|
| 205 | sensors[:,0] -= start_time |
---|
[5350] | 206 | |
---|
[5370] | 207 | # Since there is a new time reference save the depth info using this |
---|
| 208 | # new reference. |
---|
[5395] | 209 | fid = open(depth_file[:-4] + '_exp_depth.csv','w') |
---|
[5370] | 210 | #print "depth_file", depth_file |
---|
| 211 | #print "start_time", start_time |
---|
[5395] | 212 | |
---|
[5455] | 213 | |
---|
| 214 | bed_elevation_list = metadata_dic['gauge_bed_elevation'] |
---|
| 215 | # +2 due to Time column and gauge A |
---|
| 216 | max_j = len(bed_elevation_list)+2 |
---|
[5395] | 217 | # Write a header |
---|
| 218 | fid.write('Time') |
---|
| 219 | for gauge_x in metadata_dic['gauge_x']: |
---|
| 220 | fid.write(',' + str(gauge_x)) |
---|
| 221 | fid.write('\n') |
---|
| 222 | for i in xrange(len(dtimes)): |
---|
| 223 | fid.write(str(sensors[i,0])) # Time |
---|
| 224 | # Don't write sensor A. |
---|
| 225 | # It is the boundary condition. |
---|
[5455] | 226 | for j, bed_elevation in map(None, |
---|
| 227 | xrange(2,max_j), |
---|
| 228 | bed_elevation_list): |
---|
[5395] | 229 | # depth, m |
---|
| 230 | gauge = sensors[i,j]/1000 - bed_elevation # Convert from mm to m |
---|
| 231 | fid.write(',' + str(gauge)) |
---|
[5370] | 232 | fid.write('\n') |
---|
| 233 | fid.close() |
---|
[5455] | 234 | |
---|
| 235 | |
---|
| 236 | # Since there is a new time reference save the stage info using this |
---|
| 237 | # new reference. |
---|
| 238 | fid = open(depth_file[:-4] + '_exp_stage.csv','w') |
---|
| 239 | |
---|
| 240 | # Write a header |
---|
| 241 | fid.write('Time') |
---|
| 242 | for gauge_x in metadata_dic['gauge_x']: |
---|
| 243 | fid.write(',' + str(gauge_x)) |
---|
| 244 | fid.write('\n') |
---|
| 245 | for i in xrange(len(dtimes)): |
---|
| 246 | fid.write(str(sensors[i,0])) # Time |
---|
| 247 | # Don't write sensor A. |
---|
| 248 | # It is the boundary condition. |
---|
| 249 | for j in xrange(2,max_j): |
---|
| 250 | # stage, m |
---|
| 251 | gauge = sensors[i,j]/1000 # Convert from mm to m |
---|
| 252 | fid.write(',' + str(gauge)) |
---|
| 253 | fid.write('\n') |
---|
| 254 | fid.close() |
---|
| 255 | |
---|
[5395] | 256 | return final_time |
---|
[5370] | 257 | |
---|
[5392] | 258 | def prepare_time_boundary(metadata_dic, raw_data_dir, output_dir): |
---|
| 259 | """ |
---|
[5459] | 260 | Use this if a project instance has already been created. |
---|
[5392] | 261 | """ |
---|
| 262 | scenario_id = metadata_dic['scenario_id'] |
---|
| 263 | velocity_file = join(raw_data_dir,scenario_id+'velfilt.txt') |
---|
| 264 | depth_file = join(raw_data_dir,scenario_id+'pressfilt.txt') |
---|
| 265 | out_file = join(output_dir, scenario_id+'_boundary.csv') |
---|
[5395] | 266 | #out_depth_file = join(output_dir, scenario_id+'_exp_depth.csv') |
---|
[5350] | 267 | |
---|
[5395] | 268 | final_time = combine_velocity_depth(velocity_file, depth_file, out_file, |
---|
[5459] | 269 | metadata_dic) |
---|
| 270 | #print "metadata_dic['xleft'][1]", metadata_dic['xleft'][1] |
---|
[5392] | 271 | if metadata_dic['xleft'][1] >= 0.0: |
---|
| 272 | # This should be a -ve value, since the still water level is the |
---|
| 273 | # z origin. |
---|
[5395] | 274 | print "Warning: The z origin seems incorrect." |
---|
| 275 | tsm_file = out_file[:-4] + '.tsm' |
---|
[5392] | 276 | csv2tms(tsm_file, metadata_dic['xleft'][1]) |
---|
[5395] | 277 | return final_time |
---|
[5459] | 278 | |
---|
| 279 | # Don't do this, since run-dam changes the metadata_dic['xleft'][1], |
---|
| 280 | # which is used by this function |
---|
| 281 | def prepare_time_boundary_for_scenarios_warning(scenarios, |
---|
| 282 | outputdir_tag): |
---|
| 283 | |
---|
| 284 | for run_data in scenarios: |
---|
| 285 | id = run_data['scenario_id'] |
---|
| 286 | outputdir_name = id + outputdir_tag |
---|
| 287 | pro_instance = project.Project(['data','flumes','Hinwood_2008'], |
---|
| 288 | outputdir_name=outputdir_name) |
---|
| 289 | prepare_time_boundary(run_data, |
---|
| 290 | pro_instance.raw_data_dir, |
---|
| 291 | pro_instance.boundarydir) |
---|
| 292 | |
---|
[5370] | 293 | |
---|
[5350] | 294 | #------------------------------------------------------------------- |
---|
[5076] | 295 | if __name__ == "__main__": |
---|
[5459] | 296 | |
---|
| 297 | import sys; sys.exit() |
---|
| 298 | |
---|
| 299 | from scenarios import scenarios |
---|
| 300 | outputdir_tag = "XXXX" |
---|
| 301 | prepare_time_boundary_for_scenarios(scenarios, |
---|
| 302 | outputdir_tag) |
---|