source: anuga_validation/automated_validation_tests/patong_beach_validation/validate.py @ 6890

Last change on this file since 6890 was 6890, checked in by rwilson, 15 years ago

Commit to remove deleted files.

  • Property svn:executable set to *
File size: 13.6 KB
Line 
1'''
2Automatic verification that the ANUGA code runs the Patong simulation
3and produces the expected output.
4
5Required files are downloaded from the ANUGA servers if they are
6out of date or missing.
7'''
8
9import sys
10import os
11import glob
12import unittest
13import time
14import shutil
15
16from anuga.utilities.system_tools import get_web_file, untar_file, file_length
17import anuga.utilities.log as log
18
19
20# sourceforge download mirror hosts (must end with '/')
21# try these in turn for each file
22## NOTE: It would be more reliable if we could somehow 'poll' Sourceforge
23##       for a list of mirrors instead of hard-coding a list here.  That may
24##       be too difficult, as we only get the chance to select a mirror when
25##       actually downloading a file.
26MIRRORS = ['http://transact.dl.sourceforge.net/sourceforge/anuga/',       # au
27           'http://voxel.dl.sourceforge.net/sourceforge/anuga/',          # us
28           'http://superb-west.dl.sourceforge.net/sourceforge/anuga/',    # us
29           'http://jaist.dl.sourceforge.net/sourceforge/anuga/',          # jp
30           'http://dfn.dl.sourceforge.net/sourceforge/anuga/'             # de
31          ]
32##MIRRORS = ['http://10.7.64.243/patong_validation_data/']       # local linux box
33
34# URL to hand-get data files, if required
35DATA_FILES_URL = 'http://sourceforge.net/project/showfiles.php?group_id=172848'
36
37# sequence of mandatory local data objects
38Mandatory_Data_Objects = ('data.tgz',)
39
40# sequence of optional local data objects
41Optional_Data_Objects = ('patong.sww.TRIAL.tgz',
42                         'patong.sww.BASIC.tgz',
43                         'patong.sww.FINAL.tgz'
44                        )
45
46# path to the local data directory
47Local_Data_Directory = 'local_data'
48
49# path to the remote data directory
50Remote_Data_Directory = 'remote_data'
51
52# name of stdout catch file for runmodel.py
53RUNMODEL_STDOUT = 'runmodel.stdout'
54
55# text at start of 'output dir' line in RUNMODEL_STDOUT file
56OUTDIR_PREFIX = 'Make directory '
57
58# Name of SWW file produced by simulation
59OUTPUT_SWW = 'patong.sww'
60
61
62def setup():
63    '''Prepare for the validation run.
64
65    Check we have required data set in project.py.
66    '''
67   
68    pass
69
70
71def refresh_local_data(data_objects, target_dir, mirrors):
72    '''Update local data objects from the server.
73
74    data_objects:   list of files to refresh
75    target_dir:     directory in which to put files
76    mirrors:        list of mirror sites to use
77   
78    Each file has an associated *.digest file used to decide
79    if the local file needs refreshing.
80   
81    Return True if all went well, else False.
82    '''
83
84    # decision function to decide if a file contains HTML
85    def is_html(filename):
86        '''Decide if given file contains HTML.'''
87       
88        fd = open(filename)
89        data = fd.read(1024)
90        fd.close()
91
92        if 'DOCTYPE' in data:
93            return False
94       
95        return True
96
97   
98    # local function to get remote file from one of mirrors
99    def get_remote_from_mirrors(remote, local, auth, mirrors):
100        '''Get 'remote' from one of 'mirrors', put in 'local'.'''
101
102        # Get a unique date+time string to defeat caching.  The idea is to add
103        # this to the end of any URL so proxy sees a different request.
104        cache_defeat = '?' + time.strftime('%Y%m%d%H%M%S')
105
106        # try each mirror when getting file
107        for mirror in mirrors:
108            log.info('Fetching remote file %s from mirror %s'
109                     % (remote, mirror))
110
111            remote_url = mirror + remote + cache_defeat
112            log.debug('Trying to fetching file %s' % remote_url)
113            new_auth = get_web_file(remote_url, local, auth=auth)
114            if new_auth != False and not is_html(local):
115                log.debug('Success fetching file %s' % remote_url)
116                return new_auth
117
118        log.info('Could not fetch file %s' % remote_url)
119        return None           
120               
121
122    # local function to compare contents of two files
123    def files_same(file_a, file_b):
124        '''Compare two files to see if contents are the same.'''
125       
126        fd = open(file_a, 'r')
127        data_a = fd.read()
128        fd.close()
129
130        fd = open(file_b, 'r')
131        data_b = fd.read()
132        fd.close()
133
134        return data_a == data_b
135
136       
137    # local function to update one data object
138    def refresh_object(obj, auth, mirrors):
139        '''Update object 'obj' using authentication tuple 'auth'.
140       
141        Return updated auth object (success) or None (failure).
142        '''
143
144        log.info("Refreshing local object '%s'" % obj)
145
146        # create local and remote file paths.
147        obj_digest = obj + '.digest'
148       
149        remote_file = os.path.join(Remote_Data_Directory, obj)
150        remote_digest = remote_file + '.digest'
151       
152        local_file = os.path.join(Local_Data_Directory, obj)
153        local_digest = local_file + '.digest'
154       
155        # see if missing either digest or object .tgz
156        if not os.path.exists(local_digest) or not os.path.exists(local_file):
157            # no digest or no object, download both digest and object
158            auth = get_remote_from_mirrors(obj_digest, local_digest, auth, mirrors)
159            if auth:
160                auth = get_remote_from_mirrors(obj, local_file, auth, mirrors)
161        else:
162            # download object digest to remote data directory
163            auth = get_remote_from_mirrors(obj_digest, remote_digest, auth, mirrors)
164            if auth:
165                if not files_same(local_data_digest, remote_data_digest):
166                    # digests differ, refresh object
167                    log.info('Local file %s is out of date' % obj)
168                    shutil.move(remote_digest, local_digest)
169                    auth = get_remote_from_mirrors(obj, local_file, auth, mirrors)
170               
171        return auth
172
173    # create local data directory if required
174    log.debug('Creating local directory: %s' % Local_Data_Directory)
175    if not os.path.exists(Local_Data_Directory):
176        os.mkdir(Local_Data_Directory)
177
178    # clean out remote data copy directory
179    log.debug('Cleaning remote directory: %s' % Remote_Data_Directory)
180    shutil.rmtree(Remote_Data_Directory, ignore_errors=True)
181    os.mkdir(Remote_Data_Directory)
182
183    # success, refresh local files
184    auth = None
185    for data_object in Mandatory_Data_Objects:
186        auth = refresh_object(data_object, auth, mirrors)
187        if auth is None:
188            log.debug('Failed fetching %s, returning None' % data_object)
189            return None
190    for data_object in Optional_Data_Objects:
191        auth = refresh_object(data_object, auth, mirrors)
192        if auth is None:
193            log.debug('Failed fetching %s, returning None' % data_object)
194            return None
195
196    log.info('Local data has been refreshed.')
197    return True
198
199
200def can_we_run():
201    '''Decide if we can run with the files we have.
202   
203    Return True if we *can* run, else False.
204
205    Tell user what is happening first, then untar files.
206    '''
207
208    log.critical('Checking if you have the required files to run:')
209
210    have_mandatory_files = True
211    max_width = 0
212    for obj in Mandatory_Data_Objects:
213        max_width = max(len(obj), max_width)
214    for obj in Optional_Data_Objects:
215        max_width = max(len(obj), max_width)
216
217    # if we don't have all mandatory object, can't run
218    for obj in Mandatory_Data_Objects:
219        obj_path = os.path.join(Local_Data_Directory, obj)
220        if os.path.exists(obj_path):
221            log.info('\t%s  found' % obj.ljust(max_width))
222        else:
223            log.info('\t%s  MISSING AND REQUIRED!' % obj.ljust(max_width))
224            have_mandatory_files = False
225
226    # one or more of these must exist
227    have_optional_files = True
228    for obj in Optional_Data_Objects:
229        obj_path = os.path.join(Local_Data_Directory, obj)
230        if os.path.exists(obj_path):
231            log.info('\t%s  found' % obj.ljust(max_width))
232        else:
233            log.info('\t%s  MISSING!' % obj.ljust(max_width))
234            have_optional_files = False
235
236    if not have_mandatory_files or not have_optional_files:
237        log.critical('You must obtain the missing files before you can run '
238                     'this validation.')
239        return False
240
241    log.critical('You have the required files.')
242
243    return True
244
245
246def run_simulation(vtype, sim_obj):
247    '''Run the Patong simulation.'''
248   
249    # now untar file/directory objects
250    for obj in Mandatory_Data_Objects:
251        tar_path = os.path.join(Local_Data_Directory, obj)
252        log.info('Untarring %s in directory %s ...'
253                 % (tar_path, Local_Data_Directory))
254        untar_file(tar_path, target_dir=Local_Data_Directory)
255
256    # untar the vtype object
257    tar_path = os.path.join(Local_Data_Directory, sim_obj)
258    log.info('Untarring %s in directory %s ...'
259             % (tar_path, Local_Data_Directory))
260    untar_file(tar_path, target_dir=Local_Data_Directory)
261
262    # modify environment so we use the local data
263    new_inundationhome = os.path.join(Local_Data_Directory, '')
264    os.environ['INUNDATIONHOME'] = new_inundationhome
265    new_muxhome = os.path.join(Local_Data_Directory, 'data')
266    os.environ['MUXHOME'] = new_muxhome
267
268    # modify project.py template
269    fd = open('project.py.template', 'r')
270    project = fd.readlines()
271    fd.close()
272
273    new_project = []
274    for line in project:
275        new_project.append(line.replace('#!SETUP!#', vtype.lower()))
276           
277    fd = open('project.py', 'w')
278    fd.write(''.join(new_project))
279    fd.close()
280   
281    # We import here, _after_ environment variables are set
282    import project
283
284    # run the simulation, produce SWW file
285    log.critical('Running Patong simulation ...')
286    cmd = 'python run_model.py > %s' % RUNMODEL_STDOUT
287    res = os.system(cmd)
288    assert res == 0
289
290def check_that_output_is_as_expected():
291    '''Check that validation output is as required.'''
292
293    # get path to expected SWW file
294    log.critical('Checking that simulation results are as expected ...')
295    local_sww = os.path.join(Local_Data_Directory, Local_Data_Objects[0])
296
297    # get output directory from stdout capture file
298    try:
299        fd = open(RUNMODEL_STDOUT, 'r')
300    except IOError, e:
301        log.critical("Can't open catch file '%s': %s"
302                     % (RUNMODEL_STDOUT, str(e)))
303        return 1
304    lines = fd.readlines()
305    fd.close
306
307    output_directory = None
308    for line in lines:
309        if line.startswith(OUTDIR_PREFIX):
310            output_directory = line.replace(OUTDIR_PREFIX, '', 1)
311            output_directory = output_directory.strip('\n')
312            break
313    if output_directory is None:
314        log.critical("Couldn't find line starting with '%s' in file '%s'"
315                     % (OUTDIR_PREFIX, RUNMODEL_STDOUT))
316        return 1
317
318    # compare SWW files here and there
319    new_output_sww = os.path.join(output_directory, OUTPUT_SWW)
320    cmd = 'python cmpsww.py %s %s > cmpsww.stdout' % (local_sww, new_output_sww)
321    res = os.system(cmd)
322    if res == 0:
323        log.info('Simulation results are as expected.')
324    else:
325        log.critical('Simulation results are NOT as expected.')
326        fd = open('cmpsww.stdout', 'r')
327        cmp_error = fd.readlines()
328        fd.close()
329        log.critical('\n' + ''.join(cmp_error))
330
331
332def teardown():
333    '''Clean up after validation run.'''
334   
335    # clear all data objects from local data directory
336    for data_object in Local_Data_Objects:
337        obj_path = os.path.join(Local_Data_Directory, data_object)
338        if os.path.isfile(obj_path):
339            os.remove(obj_path)
340        else:
341            shutil.rmtree(obj_path, ignore_errors=True)
342
343    # remove remote directory and stdout capture file
344    shutil.rmtree(Remote_Data_Directory, ignore_errors=True)
345    try:
346        os.remove(RUNMODEL_STDOUT)
347    except OSError:
348        pass
349           
350
351################################################################################
352# Mainline - run the simulation, check output.
353################################################################################
354
355# set logging levels
356log.console_logging_level = log.INFO
357setup()
358
359# prepare user for what is about to happen
360
361msg = '''
362This validation requires a working internet connection to refresh it's files.
363You may still run this validation without an internet connection if you have the
364required files.
365
366If you are behind a proxy server you will need to supply your proxy details
367such as the proxy server address and your proxy username and password.  These
368can be defined in one or more of the environment variables:
369    HTTP_PROXY
370    PROXY_USERNAME
371    PROXY_PASSWORD
372if you wish.  If not supplied in environment variables you will be prompted for
373the information.
374'''
375
376log.critical(msg)
377
378# make sure local data is up to date
379data_objects = []
380for o in Mandatory_Data_Objects:
381    data_objects.append(o)
382for o in Optional_Data_Objects:
383    data_objects.append(o)
384   
385if not refresh_local_data(data_objects, Local_Data_Directory, MIRRORS):
386    if not can_we_run():
387        log.critical("Can't refresh via the internet and you don't have the "
388                     "required files.")
389        log.critical('Terminating the validation.')
390        log.critical('')
391        log.critical('If you get the missing files from %s' % DATA_FILES_URL)
392        log.critical('then you can try to run the validation again.  Put the '
393                     'files into the directory')
394        log.critical("%s." % Local_Data_Directory)
395        sys.exit(10)
396
397# now run what simulations we can and check output is as expected
398for odo in Optional_Data_Objects:
399    (_, vtype, _) = odo.rsplit('.', 2)
400    vtype = vtype.lower()
401    print "Running Patong '%s' validation" % vtype
402    run_simulation(vtype, odo)
403    check_that_output_is_as_expected(vtype, odo)
404
405# clean up
406#teardown()
Note: See TracBrowser for help on using the repository browser.