Changeset 7276 for anuga_validation
- Timestamp:
- Jun 30, 2009, 2:07:41 PM (16 years ago)
- Location:
- anuga_validation
- Files:
-
- 38 edited
- 1 copied
Legend:
- Unmodified
- Added
- Removed
-
anuga_validation/automated_validation_tests/README.txt
r3706 r7276 4 4 5 5 The validation tests take the form of automated unit tests except that they 6 all test the whole system rather than individual components and that they generally take considerably longer time to execute than normal unittests. 6 all test the whole system rather than individual components and that they 7 generally take considerably longer time to execute than normal unittests. 7 8 8 9 The master script, validate_all.py, will run through all tests. 10 11 NOTE: In some circumstances there may be problems running validate_all.py 12 IF MORE THAN ONE PYTHON IS INSTALLED. The workaround is to: 13 1. Decide which python you want to use, and get the absolute path to that 14 executable ('which python2.5', for example) 15 2. Create a local 'bin' directory: 16 mkdir ~/bin 17 3. Create a 'python' link in that directory to the python executable: 18 ln -s ~/bin/python <path_from_step_1> 19 4. Create or modify your PATH variable in ~/.bashrc to include: 20 export PATH=$HOME/bin:$PATH 21 That is, ensure your 'local' bin is searched first 22 5. Remove any 'python' alias you may have defined in ~/.bashrc (or elsewhere) 23 6. Ensure all the above changes are in effect by opening a new terminal 24 25 The above workaround was tested under Linux. Not sure about Windows. -
anuga_validation/automated_validation_tests/UQ_runup_2006/Dam break_240406_0011.lic
r5337 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 3 <ga_license_file> 1 <?xml version='1.0' encoding='iso-8859-1'?> 2 <ga_license_file> 4 3 <metadata> 5 4 <author>Duncan Gray</author> -
anuga_validation/automated_validation_tests/UQ_runup_2006/actual.lic
r5105 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 3 <ga_license_file> 1 <?xml version='1.0' encoding='iso-8859-1'?> 2 <ga_license_file> 4 3 <metadata> 5 4 <author>Duncan Gray</author> -
anuga_validation/automated_validation_tests/fitting/validate_benchmark_fit.py
r7259 r7276 24 24 mem 30200 25 25 26 2009 June 22 - RW 27 Changed the time/memory values to be exactly those found on the hardware. 28 Took 10 measurements, used maximum in found values. 29 26 30 """ 27 31 … … 34 38 def setUp(self): 35 39 pass 36 37 40 38 41 def tearDown(self): … … 45 48 # That's because v4872 is using geo-ref, whereas the 46 49 # previous version did not. 50 47 51 def test_fit_time_and_mem(self): 48 52 import socket 53 54 # get hostname - for *nix and Windows 49 55 host = socket.gethostname() 50 #print "host", host 56 57 # define dictionary of expected time/memory usage per machine. 58 # key must be unique *prefix* of machine name, lowercase. 59 # value is tuple: (time, memory) in seconds and KiB. 60 expected_results = {'tornado': (10.8, 40468), # tornado headnode 61 'cyclone': (7.4, 40468), # cyclone headnode 62 'compute': (10.8, 40468), # cluster computenode 63 'nautilus': (8.1, 16104), # Ole's 32bit Ubuntu 64 'bogong': (14.2, 30000), # ANU? 65 'pc-31569': (31.6, 15000), # DSG's PC? 66 'pc-32572': (12.8, 15788), # Ross' 32bit work Ubuntu 67 'saturn': (12.8, 39404) # Ross' 64bit home Ubuntu 68 } 69 70 # run trial, report on time and memory 51 71 ben = BenchmarkLeastSquares() 52 time, mem, num_tri, one_t, more_t, quad_t = ben.trial( 53 num_of_points=1000 54 ,maxArea=0.0001 55 ,is_fit=True 56 ,segments_in_mesh=False 57 ,use_file_type='pts' 58 ,save=False 59 ) 60 61 62 #print "time", time 63 #print "mem", mem 72 (time, mem, num_tri, one_t, 73 more_t, quad_t) = ben.trial(num_of_points=1000, 74 maxArea=0.0001, 75 is_fit=True, 76 segments_in_mesh=False, 77 use_file_type='pts', 78 save=False 79 ) 64 80 65 #Defaults 66 time_standard = 120. 67 mem_standard = 50000 68 69 if host.find('tornado') == 0: 70 # Tornado headnode 71 #time_standard = 14.5 72 time_standard = 24. 73 mem_standard = 40468 74 75 elif host.find('compute') == 0: # cyclone or tornado node 76 time_standard = 19.0 77 mem_standard = 40468 81 # Get expected machine values, else a default set of values 82 time_standard = 15.0 # max of above, plus a little 83 mem_standard = 40000 84 for key in expected_results: 85 if host.lower().startswith(key): 86 (time_standard, mem_standard) = expected_results[key] 87 break 78 88 79 elif host.find('cyclone') == 0: # cyclone headnode 80 time_standard = 13.3 81 mem_standard = 40468 82 #mem_standard = 29424 83 84 elif host.find('nautilus') == 0: 85 time_standard = 27.6 86 87 # v 4910 is giving a mem of 15572 88 mem_standard = 16104 89 90 91 elif host.find('bogong') == 0: 92 time_standard = 14.2 93 mem_standard = 30000 # Updated by Ole 20080507 94 95 96 elif host.find('pc-31569') == 0: # DSG's PC 97 time_standard = 31.6 98 mem_standard = 15000 #? 99 100 elif host.find('PC-32572') == 0: # Ross' Ubuntu box - nump trunk 101 time_standard = 12.8 102 mem_standard = 15788 #? 103 104 """ 105 test_fit_time_and_mem (__main__.Test_uq) ... very start mem_usage() 98076 106 before fitting mem_usage() 120012 107 after fitting mem_usage() 150212 108 time 15.19490695 109 mem 30200 110 111 test_fit_time_and_mem (__main__.Test_uq) ... very start mem_usage() 98108 112 before fitting mem_usage() 134696 113 after fitting mem_usage() 155820 114 115 """ 116 117 # Do the assertions here 89 # don't want exception here, report on time *and* memory 90 got_error = False 118 91 msg = ('Time used was %.1f s, standard is %.1f s +20%% (%.1f s)' 119 92 % (time, time_standard, int(time_standard*1.2))) 120 assert time < time_standard*1.2, msg 121 122 93 #print msg 94 #assert time < time_standard*1.2, msg 95 if time > time_standard*1.2: 96 print msg 97 got_error = True 123 98 124 99 if sys.platform == 'win32': … … 135 110 msg = ('Memory used was %d KiB, standard is %d KiB +20%% (%d KiB)' 136 111 % (mem, mem_standard, int(mem_standard*1.2))) 112 #print msg 137 113 assert mem < mem_standard*1.2, msg 114 115 if got_error: 116 raise RuntimeError 138 117 139 118 #------------------------------------------------------------- -
anuga_validation/automated_validation_tests/flow_tests/test_inflow_using_flowline.py
r7144 r7276 9 9 10 10 verbose = True 11 import Numericas num11 import numpy as num 12 12 13 13 #------------------------------------------------------------------------------ -
anuga_validation/automated_validation_tests/okushiri_tank_validation/Benchmark_2_Bathymetry.lic
r5025 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 1 <?xml version='1.0' encoding='iso-8859-1'?> 3 2 <ga_license_file> 4 3 <metadata> -
anuga_validation/automated_validation_tests/okushiri_tank_validation/Benchmark_2_input.lic
r5025 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 1 <?xml version='1.0' encoding='iso-8859-1'?> 3 2 <ga_license_file> 4 3 <metadata> -
anuga_validation/automated_validation_tests/okushiri_tank_validation/compare_timeseries_with_measures.py
r6160 r7276 6 6 """ 7 7 8 import Numericas num8 import numpy as num 9 9 from Scientific.IO.NetCDF import NetCDFFile 10 10 -
anuga_validation/automated_validation_tests/okushiri_tank_validation/create_okushiri.py
r6160 r7276 3 3 4 4 5 import Numericas num5 import numpy as num 6 6 7 7 from anuga.pmesh.mesh import * … … 9 9 from anuga.coordinate_transforms.geo_reference import Geo_reference 10 10 from anuga.geospatial_data import Geospatial_data 11 from anuga.config import netcdf_float 11 12 12 13 import project … … 111 112 112 113 N = len(lines) 113 T = num.zeros(N, num. Float) #Time114 Q = num.zeros(N, num. Float) #Values114 T = num.zeros(N, num.float) #Time 115 Q = num.zeros(N, num.float) #Values 115 116 116 117 for i, line in enumerate(lines): … … 128 129 fid.starttime = 0.0 129 130 fid.createDimension('number_of_timesteps', len(T)) 130 fid.createVariable('time', n um.Float, ('number_of_timesteps',))131 fid.createVariable('time', netcdf_float, ('number_of_timesteps',)) 131 132 fid.variables['time'][:] = T 132 133 133 fid.createVariable('stage', n um.Float, ('number_of_timesteps',))134 fid.createVariable('stage', netcdf_float, ('number_of_timesteps',)) 134 135 fid.variables['stage'][:] = Q[:] 135 136 136 fid.createVariable('xmomentum', n um.Float, ('number_of_timesteps',))137 fid.createVariable('xmomentum', netcdf_float, ('number_of_timesteps',)) 137 138 fid.variables['xmomentum'][:] = 0.0 138 139 139 fid.createVariable('ymomentum', n um.Float, ('number_of_timesteps',))140 fid.createVariable('ymomentum', netcdf_float, ('number_of_timesteps',)) 140 141 fid.variables['ymomentum'][:] = 0.0 141 142 -
anuga_validation/automated_validation_tests/okushiri_tank_validation/output_ch5-7-9.lic
r5025 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 1 <?xml version='1.0' encoding='iso-8859-1'?> 3 2 <ga_license_file> 4 3 <metadata> … … 8 7 <datafile> 9 8 <filename>output_ch5-7-9.txt</filename> 10 <checksum> -1044604103</checksum>9 <checksum>3250363193</checksum> 11 10 <publishable>Yes</publishable> 12 11 <accountable>Ole Nielsen</accountable> -
anuga_validation/automated_validation_tests/okushiri_tank_validation/test_caching_of_set_quantity.py
r6708 r7276 13 13 from anuga.fit_interpolate.fit import _fit_to_mesh 14 14 import project 15 import Numericas num15 import numpy as num 16 16 import time 17 17 -
anuga_validation/automated_validation_tests/patong_beach_validation/build_urs_boundary.py
r6844 r7276 11 11 12 12 from Scientific.IO.NetCDF import NetCDFFile 13 import Numericas num13 import numpy as num 14 14 15 15 from anuga.shallow_water.data_manager import urs2sts 16 17 import Numeric as num18 16 19 17 … … 148 146 149 147 weight_factor = 1.0 150 mux_weights = weight_factor*num.ones(len(mux_filenames), num. Float)148 mux_weights = weight_factor*num.ones(len(mux_filenames), num.float) 151 149 152 150 order_filename = project.urs_order -
anuga_validation/automated_validation_tests/patong_beach_validation/cmpsww.py
r7040 r7276 13 13 import getopt 14 14 from Scientific.IO.NetCDF import NetCDFFile 15 import Numericas num15 import numpy as num 16 16 from anuga.config import netcdf_mode_r 17 17 … … 21 21 ##### 22 22 23 # allowable 'slop' when testing two float values 24 epsilon = 1.0e-9 23 # Global for the '-q' quiet flag 24 quiet = None 25 26 # default tolerances - same as numpy defaults 27 default_abs_tolerance = 1.0e-08 28 default_rel_tolerance = 1.0000000000000001e-05 25 29 26 30 # Global attributes that should exist and be same in both files … … 69 73 # @param variables A list of variable names to compare. 70 74 # @return Returns if files 'equal', else raises RuntimeError. 71 def files_are_the_same(files, globals=None, timesteps=None, variables=None): 75 def files_are_the_same(files, globals=None, timesteps=None, variables=None, 76 rel_tolerance=default_rel_tolerance, 77 abs_tolerance=default_abs_tolerance): 72 78 # split out the filenames and check they exist 73 79 (file1, file2) = files … … 222 228 ##### 223 229 230 error_msg = '' 231 glob_vars_bad = {} 232 data_vars_bad = {} 233 224 234 # check values of global attributes 225 235 for glob_name in globals: 226 236 if getattr(fid1, glob_name) != getattr(fid2, glob_name): 227 error_msg += ("\nFiles differ in global '%s':\n" 228 "%s: '%s',\n" 229 "%s: '%s'\n" 230 % (glob_name, filename1, str(g1), filename2, str(g2))) 237 print("\nFiles differ in global '%s':\n" 238 "%s: '%s',\n" 239 "%s: '%s'" 240 % (glob_name, filename1, str(g1), filename2, str(g2))) 241 glob_vars_bad[glob_name] = glob_vars_bad.get(glob_name, 0) + 1 231 242 error = True 232 243 233 244 # check data variables, be clever with time series data 245 max_rel_difference = -1 246 diff_count = 0 234 247 for var_name in variables: 235 248 var_dims = expected_variables[var_name] … … 239 252 var1 = num.array(fid1.variables[var_name][t,:]) 240 253 var2 = num.array(fid2.variables[var_name][t,:]) 241 if var1 != var2: 254 if not num.allclose(var1, var2, 255 rtol=rel_tolerance, atol=abs_tolerance): 256 error = True 242 257 for i in xrange(len(var1)): 243 if var1[i] != var2[i]: 244 error_msg += ('\nFiles differ in variable ' 245 '%s[%d,%d]:\n' 246 '%s: %f\n' 247 '%s: %f\n' 248 'difference=%f\n' 249 % (var_name, t, i, 250 filename1, var1[i], 251 filename2, var2[i], 252 var1[i]-var2[i])) 253 break 254 error = True 258 if not num.allclose(var1[i], var2[i], 259 rtol=rel_tolerance, 260 atol=abs_tolerance): 261 abs_difference = num.abs(var1[i]-var2[i]) 262 max_a_b = num.max(num.abs(var1[i]), 263 num.abs(var2[i])) 264 rel_difference = num.abs(abs_difference/max_a_b) 265 266 if not quiet: 267 print('\nFiles differ in variable ' 268 '%s[%d,%d]:\n' 269 '%s: %f\n' 270 '%s: %f\n' 271 'abs. difference=%e, rel. difference=%e\n' 272 % (var_name, t, i, 273 filename1, var1[i], 274 filename2, var2[i], 275 abs_difference, 276 rel_difference)) 277 278 if rel_difference > max_rel_difference: 279 max_rel_difference = rel_difference 280 max_rel_difference_abs = abs_difference 281 max_rel_difference_a = var1[i] 282 max_rel_difference_b = var2[i] 283 284 data_vars_bad[var_name] = data_vars_bad.get(var_name, 0) + 1 285 diff_count += 1 255 286 else: 256 287 # simple data, check whole thing at once 257 288 var1 = num.array(fid1.variables[var_name][:]) 258 289 var2 = num.array(fid2.variables[var_name][:]) 259 if not num.allclose(var1, var2): 290 if not num.allclose(var1, var2, 291 rtol=rel_tolerance, atol=abs_tolerance): 260 292 for j in xrange(len(var1)): 261 if abs(var1[j] - var2[j]) > epsilon: 262 error_msg += ('\nFiles differ in variable ' 263 '%s[%d]:\n' 264 '%s: %f\n' 265 '%s: %f\n' 266 'difference=%f\n' 267 % (var_name, j, 268 filename1, var1[j], 269 filename2, var2[j], 270 var1[j]-var2[j])) 271 break 293 if not num.allclose(var1[j], var2[j], 294 rtol=rel_tolerance, atol=abs_tolerance): 295 abs_difference = num.abs(var1[j]-var2[j]) 296 max_a_b = num.max(num.abs(var1[j]), 297 num.abs(var2[j])) 298 rel_difference = num.abs(abs_difference/max_a_b) 299 300 if not quiet: 301 print('\nFiles differ in variable ' 302 '%s[%d]:\n' 303 '%s: %f\n' 304 '%s: %f\n' 305 'abs. difference=%e, rel. difference=%e\n' 306 % (var_name, j, 307 filename1, var1[j], 308 filename2, var2[j], 309 abs_difference, 310 rel_difference)) 311 312 if rel_difference > max_rel_difference: 313 max_rel_difference = rel_difference 314 max_rel_difference_abs = abs_difference 315 max_rel_difference_a = var1[j] 316 max_rel_difference_b = var2[j] 317 318 data_vars_bad[var_name] = data_vars_bad.get(var_name, 0) + 1 319 diff_count += 1 272 320 error = True 273 321 … … 280 328 281 329 if error: 330 error_msg += ('\nNumber of data differences=%d\n' 331 'Maximum relative data difference=%e\n' 332 'associated absolute difference=%e\n' 333 "associated 'a' value=%e\n" 334 "associated 'b' value=%e\n" 335 % (diff_count, max_rel_difference, max_rel_difference_abs, 336 max_rel_difference_a, max_rel_difference_b)) 337 error_msg += ('\nglob_vars bad=%s\n' % str(glob_vars_bad)) 338 error_msg += ('\ndata_vars bad=%s\n' % str(data_vars_bad)) 282 339 raise RuntimeError, error_msg 283 340 … … 293 350 a('where <options> is zero or more of:\n') 294 351 a(' -h print this help\n') 352 a(' -q be quiet, print only summary of differences\n') 295 353 a(" -a <val> set absolute threshold of 'equivalent'\n") 296 354 a(" -r <val> set relative threshold of 'equivalent'\n") … … 319 377 # @return The status code the program will exit with. 320 378 def main(argv=None): 379 global quiet 380 321 381 if argv is None: 322 382 argv = sys.argv … … 324 384 try: 325 385 try: 326 opts, args = getopt.getopt(argv[1:], 'h g:t:v:',386 opts, args = getopt.getopt(argv[1:], 'hqa:g:r:t:v:', 327 387 ['help', 'globals', 328 388 'variables', 'timesteps']) … … 338 398 timesteps = None 339 399 variables = None 400 quiet = False 401 rel_tolerance = default_rel_tolerance 402 abs_tolerance = default_abs_tolerance 340 403 for opt, arg in opts: 341 404 if opt in ('-h', '--help'): 342 405 print usage() 343 406 sys.exit(0) 407 elif opt in ('-q', '--quiet'): 408 quiet = True 409 elif opt in ('-a', '--absolute'): 410 abs_tolerance = float(arg) 411 elif opt in ('-r', '--relative'): 412 rel_tolerance = float(arg) 344 413 elif opt in ('-g', '--globals'): 345 414 globals = arg.split(',') … … 357 426 try: 358 427 files_are_the_same(args, globals=globals, 359 timesteps=timesteps, variables=variables) 428 timesteps=timesteps, variables=variables, 429 rel_tolerance=rel_tolerance, 430 abs_tolerance=abs_tolerance) 360 431 except RuntimeError, msg: 361 432 print msg -
anuga_validation/automated_validation_tests/patong_beach_validation/run_model.py
r6927 r7276 28 28 # Related major packages 29 29 from Scientific.IO.NetCDF import NetCDFFile 30 import Numericas num30 #import numpy as num 31 31 32 32 from anuga.interface import create_domain_from_regions … … 44 44 from anuga.utilities.polygon import read_polygon, Polygon_function 45 45 from anuga.caching import cache 46 47 import anuga.utilities.log as log 46 48 47 49 # Application specific imports … … 70 72 #------------------------------------------------------------------------------- 71 73 72 print 'Create computational domain' 74 log.critical('Create computational domain') 73 75 74 76 # Create the STS file … … 76 78 # We need to use caching instead! 77 79 78 print 'project.mux_data_folder=%s' % project.mux_data_folder 80 log.critical( 'project.mux_data_folder=%s' % project.mux_data_folder) 79 81 if not os.path.exists(project.event_sts + '.sts'): 80 82 bub.build_urs_boundary(project.mux_input_filename, project.event_sts) … … 111 113 use_cache=True, 112 114 verbose=True) 113 print domain.statistics()115 log.critical(domain.statistics()) 114 116 115 117 # FIXME(Ole): How can we make this more automatic? … … 125 127 #------------------------------------------------------------------------------- 126 128 127 print 'Setup initial conditions' 129 log.critical('Setup initial conditions') 128 130 129 131 # Set the initial stage in the offcoast region only … … 144 146 if project.use_buildings: 145 147 # Add buildings from file 146 print 'Reading building polygons'148 log.critical('Reading building polygons') 147 149 building_polygons, building_heights = csv2building_polygons(project.building_polygon) 148 150 #clipping_polygons=project.building_area_polygons) 149 151 150 print 'Creating %d building polygons' % len(building_polygons)152 log.critical('Creating %d building polygons' % len(building_polygons)) 151 153 def create_polygon_function(building_polygons, geo_reference=None): 152 154 L = [] 153 155 for i, key in enumerate(building_polygons): 154 if i%100==0: print i156 if i%100==0: log.critical(i) 155 157 poly = building_polygons[key] 156 158 elev = building_heights[key] … … 161 163 return buildings 162 164 163 print 'Creating %d building polygons' % len(building_polygons)165 log.critical('Creating %d building polygons' % len(building_polygons)) 164 166 buildings = cache(create_polygon_function, 165 167 building_polygons, … … 167 169 verbose=True) 168 170 169 print 'Adding buildings'171 log.critical('Adding buildings') 170 172 domain.add_quantity('elevation', 171 173 buildings, … … 178 180 #------------------------------------------------------------------------------- 179 181 180 print 'Set boundary - available tags:', domain.get_boundary_tags()182 log.critical('Set boundary - available tags:' % domain.get_boundary_tags()) 181 183 182 184 Br = Reflective_boundary(domain) … … 204 206 for t in domain.evolve(yieldstep=2000, 205 207 finaltime=6000): 206 print domain.timestepping_statistics()207 print domain.boundary_statistics(tags='ocean')208 log.critical(domain.timestepping_statistics()) 209 log.critical(domain.boundary_statistics(tags='ocean')) 208 210 209 211 # Start detailed model … … 211 213 finaltime=project.finaltime, 212 214 skip_initial_step=True): 213 print domain.timestepping_statistics()214 print domain.boundary_statistics(tags='ocean')215 log.critical(domain.timestepping_statistics()) 216 log.critical(domain.boundary_statistics(tags='ocean')) 215 217 216 print 'Simulation took %.2f seconds' %(time.time()-t0)218 log.critical('Simulation took %.2f seconds' %(time.time()-t0)) 217 219 -
anuga_validation/automated_validation_tests/patong_beach_validation/validate.py
r7267 r7276 13 13 import time 14 14 import shutil 15 import platform16 15 17 16 from anuga.utilities.system_tools import get_web_file, untar_file, file_length … … 33 32 'http://dfn.dl.sourceforge.net/sourceforge/anuga/' # de 34 33 ] 34 35 35 ### for testing 36 36 ##MIRRORS = ['http://10.7.64.243/patong_validation_data/'] # local linux box … … 46 46 # these names must be of the form <scene>.sww.<type>.tgz 47 47 # as code below depends upon it. 48 Optional_Data_Objects = ('patong.sww.TRIAL.tgz', 48 Optional_Data_Objects = ( 49 'patong.sww.TRIAL.tgz', 49 50 'patong.sww.BASIC.tgz', 50 51 'patong.sww.FINAL.tgz' … … 66 67 OUTPUT_SWW = 'patong.sww' 67 68 68 # default name of python to run69 PythonName = 'python'70 71 69 72 70 def setup(): 73 '''Prepare for the validation run. 74 75 Check we have required data set in project.py. 76 ''' 71 '''Prepare for the validation run.''' 77 72 78 73 pass … … 302 297 # run the simulation, produce SWW file 303 298 log.info('Running the simulation ...') 304 cmd = ' %s run_model.py > %s' % (PythonName, RUNMODEL_STDOUT)299 cmd = 'python run_model.py > %s' % RUNMODEL_STDOUT 305 300 log.debug("run_simulation: doing '%s'" % cmd) 306 301 res = os.system(cmd) … … 349 344 # compare SWW files here and there 350 345 new_output_sww = os.path.join(output_directory, expected_sww) 351 cmd = ' %s cmpsww.py %s %s > cmpsww.stdout' % (PythonName,local_sww, new_output_sww)346 cmd = 'python cmpsww.py %s %s > cmpsww.stdout' % (local_sww, new_output_sww) 352 347 log.debug("check_that_output_is_as_expected: doing '%s'" % cmd) 353 348 res = os.system(cmd) … … 381 376 ################################################################################ 382 377 383 # determine what python we are, initialize PythonName384 py_tuple = platform.python_version_tuple()385 PythonName = 'python%s.%s' % (py_tuple[0], py_tuple[1])386 387 378 # set logging levels 388 379 log.console_logging_level = log.INFO 389 380 log.log_logging_level = log.DEBUG 390 log_filename = log.log_filename391 381 392 382 setup() 393 383 394 384 # prepare user for what is about to happen 395 396 msg = ''' 385 log.critical(''' 397 386 Please note that this validation test is accurate only on 64bit Linux or 398 387 Windows. Running the validation on a 32bit operating system will result in … … 412 401 if you wish. If not supplied in environment variables you will be prompted for 413 402 the information. 414 ''' 415 416 log.critical(msg) 403 ''') 404 417 405 418 406 # make sure local data is up to date … … 443 431 for odo in Optional_Data_Objects: 444 432 start_time = time.time() 433 445 434 (_, vtype, _) = odo.rsplit('.', 2) 446 435 vtype = vtype.lower() … … 452 441 (expected_sww, _) = valid_sww.rsplit('.', 1) 453 442 check_that_output_is_as_expected(expected_sww, valid_sww) 454 shutil.move(log_filename, '%s.%s' % (log_filename, vtype)) 443 455 444 stop_time = time.time() 456 log.critical("'%s' validation took %.1fs " % (vtype, stop_time - start_time))445 log.critical("'%s' validation took %.1fs\n\n\n" % (vtype, stop_time - start_time)) 457 446 458 447 # clean up 448 log.critical('Tearing down ...') 459 449 teardown() -
anuga_validation/automated_validation_tests/urs_mux_files_validation/boundary_gauges.lic
r5109 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 3 <ga_license_file> 1 <?xml version='1.0' encoding='iso-8859-1'?> 2 <ga_license_file> 4 3 <metadata> 5 4 <author>Duncan Gray</author> … … 7 6 <datafile> 8 7 <filename>boundary_gauges.txt</filename> 9 <checksum> -1553233046</checksum>8 <checksum>2741734250</checksum> 10 9 <publishable>Yes</publishable> 11 10 <accountable>Duncan Gray</accountable> -
anuga_validation/automated_validation_tests/urs_mux_files_validation/gauges.lic
r5109 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 3 <ga_license_file> 1 <?xml version='1.0' encoding='iso-8859-1'?> 2 <ga_license_file> 4 3 <metadata> 5 4 <author>Duncan Gray</author> -
anuga_validation/automated_validation_tests/urs_mux_files_validation/o_test-e-mux.lic
r5505 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 3 <ga_license_file> 1 <?xml version='1.0' encoding='iso-8859-1'?> 2 <ga_license_file> 4 3 <metadata> 5 4 <author>Duncan Gray</author> … … 7 6 <datafile> 8 7 <filename>o_test-e-mux</filename> 9 <checksum> -1664514244</checksum>8 <checksum>2630453052</checksum> 10 9 <publishable>Yes</publishable> 11 10 <accountable>Duncan Gray</accountable> -
anuga_validation/automated_validation_tests/urs_mux_files_validation/o_test-n-mux.lic
r5505 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 3 <ga_license_file> 1 <?xml version='1.0' encoding='iso-8859-1'?> 2 <ga_license_file> 4 3 <metadata> 5 4 <author>Duncan Gray</author> -
anuga_validation/automated_validation_tests/urs_mux_files_validation/o_test-z-mux.lic
r5505 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 3 <ga_license_file> 1 <?xml version='1.0' encoding='iso-8859-1'?> 2 <ga_license_file> 4 3 <metadata> 5 4 <author>Duncan Gray</author> … … 7 6 <datafile> 8 7 <filename>o_test-z-mux</filename> 9 <checksum> -1449856062</checksum>8 <checksum>2845111234</checksum> 10 9 <publishable>Yes</publishable> 11 10 <accountable>Duncan Gray</accountable> -
anuga_validation/automated_validation_tests/urs_mux_files_validation/run_Bf.py
r6160 r7276 20 20 # Related major packages 21 21 from anuga.shallow_water import Domain,Dirichlet_boundary,File_boundary,Transmissive_boundary, Field_boundary 22 import Numericas num22 import numpy as num 23 23 from anuga.pmesh.mesh_interface import create_mesh_from_regions 24 24 from anuga.abstract_2d_finite_volumes.util import start_screen_catcher, copy_code_files, sww2timeseries, get_data_from_file -
anuga_validation/automated_validation_tests/validate_all.py
r6885 r7276 39 39 os.chdir(path) 40 40 s = 'python %s' %(filename) 41 print42 41 print s 43 42 os.system(s) -
anuga_validation/circular_island/get_compared_graphs.py
r5142 r7276 12 12 #from anuga.utilities.polygon import read_polygon#, plot_polygons 13 13 #from math import cos,pi,sin,tan#,sqrt 14 #from Numeric import array, zeros, Float, allclose,resize,sqrt15 14 #from anuga.shallow_water.data_manager import csv2dict 16 15 #from time import localtime, strftime, gmtime -
anuga_validation/circular_island/run_circular.py
r5442 r7276 15 15 from anuga.abstract_2d_finite_volumes.util import file_function, sww2csv_gauges,csv2timeseries_graphs 16 16 from anuga.pmesh.mesh_interface import create_mesh_from_regions 17 from anuga.utilities.polygon import read_polygon #, plot_polygons18 from math import cos,pi,sin,tan #,sqrt19 from Numeric import array, zeros, Float, allclose,resize,sqrt 17 from anuga.utilities.polygon import read_polygon 18 from math import cos,pi,sin,tan 19 import numpy as num 20 20 from anuga.shallow_water.data_manager import csv2dict 21 21 from time import localtime, strftime, gmtime 22 22 from anuga.utilities.system_tools import get_user_name, get_host_name 23 23 from os import sep, environ, getenv 24 from anuga.config import netcdf_float 25 from Scientific.IO.NetCDF import NetCDFFile 26 24 27 #------------------------- 25 28 # Create Domain from mesh … … 57 60 58 61 print 'Preparing time boundary from %s' %textversion 59 from Numeric import array60 62 61 63 fid = open(textversion) … … 70 72 71 73 N = len(lines) 72 T = zeros(N, Float) #Time73 Q = zeros(N, Float) #Values74 T = num.zeros(N, num.float) #Time 75 Q = num.zeros(N, num.float) #Values 74 76 75 77 for i, line in enumerate(lines): … … 82 84 83 85 #Create tms file 84 from Scientific.IO.NetCDF import NetCDFFile85 86 86 print 'Writing to', filename 87 87 fid = NetCDFFile(filename[:-4] + '.tms', 'w') … … 91 91 fid.starttime = 0.0 92 92 fid.createDimension('number_of_timesteps', len(T)) 93 fid.createVariable('time', Float, ('number_of_timesteps',))93 fid.createVariable('time', netcdf_float, ('number_of_timesteps',)) 94 94 fid.variables['time'][:] = T 95 95 96 fid.createVariable('stage', Float, ('number_of_timesteps',))96 fid.createVariable('stage', netcdf_float, ('number_of_timesteps',)) 97 97 fid.variables['stage'][:] = Q[:] 98 98 99 fid.createVariable('xmomentum', Float, ('number_of_timesteps',))99 fid.createVariable('xmomentum', netcdf_float, ('number_of_timesteps',)) 100 100 fid.variables['xmomentum'][:] = 0.0 101 101 102 fid.createVariable('ymomentum', Float, ('number_of_timesteps',))102 fid.createVariable('ymomentum', netcdf_float, ('number_of_timesteps',)) 103 103 fid.variables['ymomentum'][:] = 0.0 104 104 … … 181 181 def circular_island_elevation(x,y): 182 182 water_depth = 0.32 183 list_xy = sqrt((center_x-x)**2+(center_y-L-y)**2)183 list_xy = num.sqrt((center_x-x)**2+(center_y-L-y)**2) 184 184 print 'x',min(x),max(x) 185 185 print 'y',min(y),max(y) -
anuga_validation/circular_island/sqrt_table_run_circular.py
r5442 r7276 17 17 from anuga.utilities.polygon import read_polygon#, plot_polygons 18 18 from math import cos,pi,sin,tan,sqrt 19 from Numeric import array, zeros, Float, allclose,resize 19 import numpy as num 20 20 from anuga.shallow_water.data_manager import csv2dict 21 21 from time import localtime, strftime, gmtime 22 22 from anuga.utilities.system_tools import get_user_name, get_host_name 23 23 from os import sep, environ, getenv 24 from anuga.config import netcdf_float 25 from Scientific.IO.NetCDF import NetCDFFile 26 27 24 28 #------------------------- 25 29 # Create Domain from mesh … … 56 60 57 61 print 'Preparing time boundary from %s' %textversion 58 from Numeric import array59 62 60 63 fid = open(textversion) … … 69 72 70 73 N = len(lines) 71 T = zeros(N, Float) #Time72 Q = zeros(N, Float) #Values74 T = num.zeros(N, num.float) #Time 75 Q = num.zeros(N, num.float) #Values 73 76 74 77 for i, line in enumerate(lines): … … 81 84 82 85 #Create tms file 83 from Scientific.IO.NetCDF import NetCDFFile84 85 86 print 'Writing to', filename 86 87 fid = NetCDFFile(filename[:-4] + '.tms', 'w') … … 90 91 fid.starttime = 0.0 91 92 fid.createDimension('number_of_timesteps', len(T)) 92 fid.createVariable('time', Float, ('number_of_timesteps',))93 fid.createVariable('time', netcdf_float, ('number_of_timesteps',)) 93 94 fid.variables['time'][:] = T 94 95 95 fid.createVariable('stage', Float, ('number_of_timesteps',))96 fid.createVariable('stage', netcdf_float, ('number_of_timesteps',)) 96 97 fid.variables['stage'][:] = Q[:] 97 98 98 fid.createVariable('xmomentum', Float, ('number_of_timesteps',))99 fid.createVariable('xmomentum', netcdf_float, ('number_of_timesteps',)) 99 100 fid.variables['xmomentum'][:] = 0.0 100 101 101 fid.createVariable('ymomentum', Float, ('number_of_timesteps',))102 fid.createVariable('ymomentum', netcdf_float, ('number_of_timesteps',)) 102 103 fid.variables['ymomentum'][:] = 0.0 103 104 -
anuga_validation/okushiri_2005/Benchmark_2_Bathymetry.lic
r5023 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 1 <?xml version='1.0' encoding='iso-8859-1'?> 3 2 <ga_license_file> 4 3 <metadata> … … 8 7 <datafile> 9 8 <filename>Benchmark_2_Bathymetry.txt</filename> 10 <checksum> -1310724450</checksum>9 <checksum>2984242846</checksum> 11 10 <publishable>Yes</publishable> 12 11 <accountable>Ole Nielsen</accountable> -
anuga_validation/okushiri_2005/Benchmark_2_input.lic
r5023 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 1 <?xml version='1.0' encoding='iso-8859-1'?> 3 2 <ga_license_file> 4 3 <metadata> -
anuga_validation/okushiri_2005/compare_timeseries.py
r4557 r7276 5 5 """ 6 6 7 from Numeric import allclose, argmin, argmax 7 import numpy as num 8 8 from Scientific.IO.NetCDF import NetCDFFile 9 9 … … 68 68 assert reference_time[0] == 0.0 69 69 assert reference_time[-1] == finaltime 70 assert allclose(reference_time, input_time)70 assert num.allclose(reference_time, input_time) 71 71 72 72 for key in gauge_names: … … 144 144 145 145 # Locations of extrema 146 i0 = argmax(observed_timeseries)147 i1 = argmax(model)146 i0 = num.argmax(observed_timeseries) 147 i1 = num.argmax(model) 148 148 res = abs(reference_time[i1] - reference_time[i0]) 149 149 print 'Timelag between maxima = %.18e' %res 150 150 151 151 152 i0 = argmin(observed_timeseries)153 i1 = argmin(model)152 i0 = num.argmin(observed_timeseries) 153 i1 = num.argmin(model) 154 154 res = abs(reference_time[i1] - reference_time[i0]) 155 155 print 'Timelag between minima = %.18e' %res -
anuga_validation/okushiri_2005/create_okushiri.py
r3915 r7276 1 1 """Create mesh and time boundary for the Okushiri island validation 2 2 """ 3 4 5 from Numeric import array, zeros, Float, allclose6 3 7 4 from anuga.pmesh.mesh import * … … 9 6 from anuga.coordinate_transforms.geo_reference import Geo_reference 10 7 from anuga.geospatial_data import Geospatial_data 8 9 from Scientific.IO.NetCDF import NetCDFFile 11 10 12 11 import project … … 51 50 """ 52 51 53 from Scientific.IO.NetCDF import NetCDFFile54 from Numeric import array55 56 57 52 print 'Creating', filename 58 53 … … 69 64 70 65 N = len(lines) 71 T = zeros(N, Float) #Time72 Q = zeros(N, Float) #Values66 T = num.zeros(N, num.float) #Time 67 Q = num.zeros(N, num.float) #Values 73 68 74 69 for i, line in enumerate(lines): … … 86 81 fid.starttime = 0.0 87 82 fid.createDimension('number_of_timesteps', len(T)) 88 fid.createVariable('time', Float, ('number_of_timesteps',))83 fid.createVariable('time', netcdf_float, ('number_of_timesteps',)) 89 84 fid.variables['time'][:] = T 90 85 91 fid.createVariable('stage', Float, ('number_of_timesteps',))86 fid.createVariable('stage', netcdf_float, ('number_of_timesteps',)) 92 87 fid.variables['stage'][:] = Q[:] 93 88 94 fid.createVariable('xmomentum', Float, ('number_of_timesteps',))89 fid.createVariable('xmomentum', netcdf_float, ('number_of_timesteps',)) 95 90 fid.variables['xmomentum'][:] = 0.0 96 91 97 fid.createVariable('ymomentum', Float, ('number_of_timesteps',))92 fid.createVariable('ymomentum', netcdf_float, ('number_of_timesteps',)) 98 93 fid.variables['ymomentum'][:] = 0.0 99 94 -
anuga_validation/okushiri_2005/create_okushiri_original.py
r3913 r7276 2 2 """ 3 3 4 from Numeric import array, zeros, Float, allclose 4 import numpy as num 5 from Scientific.IO.NetCDF import NetCDFFile 5 6 6 7 from anuga.pmesh.mesh import * 7 8 from anuga.coordinate_transforms.geo_reference import Geo_reference 9 from anuga.config import netcdf_float 8 10 9 11 import project … … 19 21 20 22 print 'Preparing time boundary from %s' %textversion 21 from Numeric import array22 23 23 24 fid = open(textversion) … … 32 33 33 34 N = len(lines) 34 T = zeros(N, Float) #Time35 Q = zeros(N, Float) #Values35 T = num.zeros(N, num.float) #Time 36 Q = num.zeros(N, num.float) #Values 36 37 37 38 for i, line in enumerate(lines): … … 43 44 44 45 #Create tms file 45 from Scientific.IO.NetCDF import NetCDFFile46 47 46 print 'Writing to', filename 48 47 fid = NetCDFFile(filename, 'w') … … 52 51 fid.starttime = 0.0 53 52 fid.createDimension('number_of_timesteps', len(T)) 54 fid.createVariable('time', Float, ('number_of_timesteps',))53 fid.createVariable('time', netcdf_float, ('number_of_timesteps',)) 55 54 fid.variables['time'][:] = T 56 55 57 fid.createVariable('stage', Float, ('number_of_timesteps',))56 fid.createVariable('stage', netcdf_float, ('number_of_timesteps',)) 58 57 fid.variables['stage'][:] = Q[:] 59 58 60 fid.createVariable('xmomentum', Float, ('number_of_timesteps',))59 fid.createVariable('xmomentum', netcdf_float, ('number_of_timesteps',)) 61 60 fid.variables['xmomentum'][:] = 0.0 62 61 63 fid.createVariable('ymomentum', Float, ('number_of_timesteps',))62 fid.createVariable('ymomentum', netcdf_float, ('number_of_timesteps',)) 64 63 fid.variables['ymomentum'][:] = 0.0 65 64 … … 226 225 m.generateMesh('pzq28.0za1000000a') 227 226 228 import project229 227 m.export_mesh_file(project.mesh_filename) 230 228 -
anuga_validation/okushiri_2005/output_ch5-7-9.lic
r5020 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 1 <?xml version='1.0' encoding='iso-8859-1'?> 3 2 <ga_license_file> 4 3 <metadata> … … 19 18 <datafile> 20 19 <filename>output_ch5-7-9.txt</filename> 21 <checksum> -1044604103</checksum>20 <checksum>3250363193</checksum> 22 21 <publishable>Yes</publishable> 23 22 <accountable>Ole Nielsen</accountable> -
anuga_validation/okushiri_2005/problem02.lic
r5023 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 1 <?xml version='1.0' encoding='iso-8859-1'?> 3 2 <ga_license_file> 4 3 <metadata> … … 8 7 <datafile> 9 8 <filename>problem02.doc</filename> 10 <checksum> -1811884659</checksum>9 <checksum>2483082637</checksum> 11 10 <publishable>Yes</publishable> 12 11 <accountable>Ole Nielsen</accountable> -
anuga_validation/okushiri_2005/test_caching_of_set_quantity.py
r7040 r7276 16 16 from anuga.fit_interpolate.fit import _fit_to_mesh 17 17 import project 18 import Numericas num18 import numpy as num 19 19 import time 20 20 -
anuga_validation/okushiri_2005/timings.lic
r5023 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 1 <?xml version='1.0' encoding='iso-8859-1'?> 3 2 <ga_license_file> 4 3 <metadata> … … 8 7 <datafile> 9 8 <filename>timings.txt</filename> 10 <checksum> -1935951304</checksum>9 <checksum>2359015992</checksum> 11 10 <publishable>Yes</publishable> 12 11 <accountable>Ole Nielsen</accountable> -
anuga_validation/performance_tests/okushiri/Benchmark_2.lic
r5026 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 1 <?xml version='1.0' encoding='iso-8859-1'?> 3 2 <ga_license_file> 4 3 <metadata> -
anuga_validation/performance_tests/okushiri/Benchmark_2_Bathymetry.lic
r5026 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 1 <?xml version='1.0' encoding='iso-8859-1'?> 3 2 <ga_license_file> 4 3 <metadata> -
anuga_validation/performance_tests/okushiri/Benchmark_2_input.lic
r5026 r7276 1 <?xml version="1.0" encoding="iso-8859-1"?> 2 1 <?xml version='1.0' encoding='iso-8859-1'?> 3 2 <ga_license_file> 4 3 <metadata> … … 8 7 <datafile> 9 8 <filename>Benchmark_2_input.tms</filename> 10 <checksum> -725631838</checksum>9 <checksum>3569335458</checksum> 11 10 <publishable>Yes</publishable> 12 11 <accountable>Ole Nielsen</accountable> -
anuga_validation/performance_tests/run_profile.py
r7181 r7276 8 8 from anuga.shallow_water import Domain, Reflective_boundary 9 9 from mesh_factory import rectangular 10 #import numpy as num # not used? 10 11 11 12
Note: See TracChangeset
for help on using the changeset viewer.