source: branches/numpy_anuga_validation/automated_validation_tests/patong_beach_validation/validate_patong.py @ 6808

Last change on this file since 6808 was 6808, checked in by rwilson, 15 years ago

Improved error handling.

File size: 10.3 KB
Line 
1'''
2Automatic verification that the ANUGA code runs the Patong simulation
3and produces the expected output.
4
5Required files are downloaded from the ANUGA servers if they are
6out of date or missing.
7'''
8
9import sys
10import os
11import glob
12import unittest
13import time
14import shutil
15
16from anuga.utilities.system_tools import get_web_file, untar_file
17import anuga.utilities.log as log
18
19
20# base URL for the remote ANUGA data
21PATONG_HOST = 'http://10.7.64.243/'
22PATONG_DATA_URL = PATONG_HOST + 'patong_validation_data/'
23
24# path to the local data directory
25Local_Data_Directory = os.path.join('.', 'local_data')
26
27# path to the remote data directory
28Remote_Data_Directory = os.path.join('.', 'remote_data')
29
30# sequence of required local data objects
31# first is always the SWW file to be compared
32Local_Data_Objects = ('patong.sww', 'data')
33
34# name of stdout catch file for runmodel.py
35RUNMODEL_STDOUT = 'runmodel.stdout'
36
37# text at start of 'output dir' line in RUNMODEL_STDOUT file
38OUTDIR_PREFIX = 'Make directory '
39
40# Name of SWW file produced by simulationvalidate_patong.py
41OUTPUT_SWW = 'patong.sww'
42
43
44def setup():
45    '''Prepare for the validation run.
46
47    Check we have required data set in project.py.
48    '''
49   
50    pass
51
52
53def update_local_data():
54    '''Update local data objects from the server.
55
56    These are expected to be *.tgz files/directories.
57    '''
58   
59    # local function to update one data object
60    def update_object(obj, auth):
61        '''Update object 'obj' using authentication tuple 'auth'.
62       
63        Return updated auth object (success) or False (failure).
64        '''
65
66        log.info("Refreshing local object '%s'" % obj)
67
68        # Get a unique date+time string to defeat caching.  The idea is to add
69        # this to the end of any URL so proxy sees a different request.
70        cache_defeat = '?' + time.strftime('%Y%m%d%H%M%S')
71
72        # create local and remote paths, URLs, etc.
73        remote_path = os.path.join(Remote_Data_Directory, obj)
74        remote_digest = remote_path + '.tgz.digest'
75       
76        local_path = os.path.join(Local_Data_Directory, obj)
77        local_file = local_path + '.tgz'
78        local_digest = local_file + '.digest'
79       
80        object_url = PATONG_DATA_URL + obj + '.tgz'
81        digest_url = object_url + '.digest'
82
83        # see if missing either digest or object .tgz
84        if not os.path.exists(local_digest) or not os.path.exists(local_file):
85            # no digest or no object, download both digest and object
86            log.info('Fetching remote file %s' % digest_url)
87            auth = get_web_file(digest_url+cache_defeat,
88                                local_digest, auth=auth)
89            if auth == False:
90                return False
91            log.info('Fetching remote file %s' % object_url)
92            auth = get_web_file(object_url+cache_defeat, local_file, auth=auth)
93            if auth == False:
94                return False
95        else:
96            # download object digest to remote data directory
97            log.info('Fetching remote file %s' % digest_url)
98            auth = get_web_file(digest_url+cache_defeat,
99                                remote_digest, auth=auth)
100            if auth == False:
101                return False
102           
103            # compare remote with local digest
104            fd = open(local_digest, 'r')
105            local_data_digest = fd.read()
106            fd.close()
107
108            fd = open(remote_digest, 'r')
109            remote_data_digest = fd.read()
110            fd.close()
111
112            # if digests differ, refresh object
113            if local_data_digest != remote_data_digest:
114                log.info('Local file %s is out of date' % obj)
115                fd = open(local_digest, 'w')
116                fd.write(remote_data_digest)
117                fd.close()
118
119                log.info('Fetching remote file %s' % object_url)
120                auth = get_web_file(object_url+cache_defeat,
121                                    local_file, auth=auth)
122        return auth
123
124    # create local data directory if required
125    if not os.path.exists(Local_Data_Directory):
126        os.mkdir(Local_Data_Directory)
127
128    # clean out remote data copy directory
129    shutil.rmtree(Remote_Data_Directory, ignore_errors=True)
130    os.mkdir(Remote_Data_Directory)
131
132    # refresh local files
133    auth = None
134    for data_object in Local_Data_Objects:
135        auth = update_object(data_object, auth)
136        if auth == False:
137            return False
138
139    # unpack *.tgz files
140    for data_object in Local_Data_Objects:
141        tar_path = os.path.join(Local_Data_Directory, data_object+'.tgz')
142        log.info('Untarring %s in directory %s' % (tar_path, Local_Data_Directory))
143        untar_file(tar_path, target_dir=Local_Data_Directory)
144       
145    log.info('Local data has been refreshed.')
146    return True
147
148
149def can_we_run():
150    '''Decide if we can run with the files we have.
151   
152    Return True if we *can* run, else False.
153
154    Tell user what is happening first, then untar files.
155    '''
156
157    log.critical('Checking if you have the required files ...')
158
159    have_files = True
160    max_width = 0
161    for obj in Local_Data_Objects:
162        max_width = max(len(obj), max_width)
163
164    for obj in Local_Data_Objects:
165        # if we don't have required object, can't run
166        obj_path = os.path.join(Local_Data_Directory, obj+'.tgz')
167        if os.path.exists(obj_path):
168            log.info('%s  found' % obj.ljust(max_width))
169        else:
170            log.info('%s  MISSING!' % obj.ljust(max_width))
171            have_files = False
172
173    if not have_files:
174        log.critical('You must obtain the missing files before you can run '
175                     'this validation.')
176        return False
177
178    log.critical('You have the required files.')
179
180    # now untar file/directory objects
181    for obj in Local_Data_Objects:
182        log.debug('Untarring %s in directory %s'
183                  % (obj_path, Local_Data_Directory))
184        untar_file(obj_path, target_dir=Local_Data_Directory)
185
186    return True
187
188
189def run_simulation():
190    '''Run the Patong simulation.'''
191   
192    # modify environment so we use the local data
193    new_inundationhome = os.path.join(Local_Data_Directory, '')
194    os.environ['INUNDATIONHOME'] = new_inundationhome
195    new_muxhome = os.path.join(Local_Data_Directory, 'data')
196    os.environ['MUXHOME'] = new_muxhome
197
198    # We import here, _after_ environment variables are set
199    import project
200
201    # run the simulation, produce SWW file
202    log.critical('Running Patong simulation ...')
203    cmd = 'python run_model.py > %s' % RUNMODEL_STDOUT
204    res = os.system(cmd)
205    assert res == 0
206
207def check_that_output_is_as_expected():
208    '''Check that validation output is as required.'''
209
210    # get path to expected SWW file
211    log.critical('Checking that simulation results are as expected ...')
212    local_sww = os.path.join(Local_Data_Directory, Local_Data_Objects[0])
213
214    # get output directory from stdout capture file
215    try:
216        fd = open(RUNMODEL_STDOUT, 'r')
217    except IOError, e:
218        log.critical("Can't open catch file '%s': %s"
219                     % (RUNMODEL_STDOUT, str(e)))
220        return 1
221    lines = fd.readlines()
222    fd.close
223
224    output_directory = None
225    for line in lines:
226        if line.startswith(OUTDIR_PREFIX):
227            output_directory = line.replace(OUTDIR_PREFIX, '', 1)
228            output_directory = output_directory.strip('\n')
229            break
230    if output_directory is None:
231        log.critical("Couldn't find line starting with '%s' in file '%s'"
232                     % (OUTDIR_PREFIX, RUNMODEL_STDOUT))
233        return 1
234
235    # compare SWW files here and there
236    new_output_sww = os.path.join(output_directory, OUTPUT_SWW)
237    cmd = 'python cmpsww.py %s %s > cmpsww.stdout' % (local_sww, new_output_sww)
238    res = os.system(cmd)
239    if res == 0:
240        log.info('Simulation results are as expected.')
241    else:
242        log.critical('Simulation results are NOT as expected.')
243        fd = open('cmpsww.stdout', 'r')
244        cmp_error = fd.readlines()
245        fd.close()
246        log.critical('\n' + ''.join(cmp_error))
247
248
249def teardown():
250    '''Clean up after validation run.'''
251   
252    # clear all data objects from local data directory
253    for data_object in Local_Data_Objects:
254        obj_path = os.path.join(Local_Data_Directory, data_object)
255        if os.path.isfile(obj_path):
256            os.remove(obj_path)
257        else:
258            shutil.rmtree(obj_path, ignore_errors=True)
259
260    # remove remote directory and stdout capture file
261    shutil.rmtree(Remote_Data_Directory, ignore_errors=True)
262    try:
263        os.remove(RUNMODEL_STDOUT)
264    except OSError:
265        pass
266           
267
268################################################################################
269# Mainline - run the simulation, check output.
270################################################################################
271
272# set logging levels
273log.console_logging_level = log.INFO
274setup()
275
276# prepare user for what is about to happen
277log.critical('')
278log.critical('This validation requires a working internet connection to run.')
279log.critical('')
280log.critical('If you are behind a proxy server you will need to supply your '
281             'proxy details')
282log.critical('such as the proxy server address and your proxy username and '
283             'password.  These')
284log.critical('can be defined in one or more of the environment variables:')
285log.critical('\tHTTP_PROXY')
286log.critical('\tPROXY_USERNAME')
287log.critical('\tPROXY_PASSWORD')
288log.critical('if you wish.  If not supplied in environment variables, you '
289             'will be prompted for')
290log.critical('the details.')
291log.critical('')
292log.critical('You may still run this validation without an internet '
293             'connection if you have the')
294log.critical('required files.')
295log.critical('*'*80)
296log.critical('')
297
298# make sure local data is up to date
299if not update_local_data():
300    if not can_we_run():
301        log.critical('')
302        log.critical('*'*80)
303        log.critical("Can't connect via the internet and you don't have the "
304                     "required files.")
305        log.critical('Terminating the validation.')
306        log.critical('')
307        log.critical('If you get the missing files from %s' % PATONG_DATA_URL)
308        log.critical('then you can try to run the validation again.')
309        sys.exit(10)
310
311# run the simulation
312run_simulation()
313
314# check output is as expected
315check_that_output_is_as_expected()
316
317# clean up
318teardown()
Note: See TracBrowser for help on using the repository browser.