source: branches/numpy_anuga_validation/automated_validation_tests/patong_beach_validation/validate_patong.py @ 6828

Last change on this file since 6828 was 6828, checked in by rwilson, 16 years ago

Changes to get auto_refresh of Patong validation working.

File size: 10.8 KB
Line 
1'''
2Automatic verification that the ANUGA code runs the Patong simulation
3and produces the expected output.
4
5Required files are downloaded from the ANUGA servers if they are
6out of date or missing.
7'''
8
9import sys
10import os
11import glob
12import unittest
13import time
14import shutil
15
16from anuga.utilities.system_tools import get_web_file, untar_file
17import anuga.utilities.log as log
18
19
20# base URL for the remote ANUGA data
21PATONG_HOST = 'http://10.7.64.243/'
22PATONG_DATA_URL = PATONG_HOST + 'patong_validation_data/'
23
24# path to the local data directory
25Local_Data_Directory = os.path.join('.', 'local_data')
26
27# path to the remote data directory
28Remote_Data_Directory = os.path.join('.', 'remote_data')
29
30# sequence of required local data objects
31# first is always the SWW file to be compared
32Local_Data_Objects = ('patong.sww', 'data')
33
34# name of stdout catch file for runmodel.py
35RUNMODEL_STDOUT = 'runmodel.stdout'
36
37# text at start of 'output dir' line in RUNMODEL_STDOUT file
38OUTDIR_PREFIX = 'Make directory '
39
40# Name of SWW file produced by simulationvalidate_patong.py
41OUTPUT_SWW = 'patong.sww'
42
43
44def setup():
45    '''Prepare for the validation run.
46
47    Check we have required data set in project.py.
48    '''
49   
50    pass
51
52
53def update_local_data():
54    '''Update local data objects from the server.
55
56    These are expected to be *.tgz files/directories.
57    Return True if all went well, else False.
58    '''
59   
60    # local function to update one data object
61    def update_object(obj, auth):
62        '''Update object 'obj' using authentication tuple 'auth'.
63       
64        Return updated auth object (success) or False (failure).
65        '''
66
67        log.info("Refreshing local object '%s'" % obj)
68
69        # Get a unique date+time string to defeat caching.  The idea is to add
70        # this to the end of any URL so proxy sees a different request.
71        cache_defeat = '?' + time.strftime('%Y%m%d%H%M%S')
72
73        # create local and remote paths, URLs, etc.
74        remote_path = os.path.join(Remote_Data_Directory, obj)
75        remote_digest = remote_path + '.tgz.digest'
76       
77        local_path = os.path.join(Local_Data_Directory, obj)
78        local_file = local_path + '.tgz'
79        local_digest = local_file + '.digest'
80       
81        object_url = PATONG_DATA_URL + obj + '.tgz'
82        digest_url = object_url + '.digest'
83
84        # see if missing either digest or object .tgz
85        if not os.path.exists(local_digest) or not os.path.exists(local_file):
86            # no digest or no object, download both digest and object
87            log.info('Fetching remote file %s' % digest_url)
88            auth = get_web_file(digest_url+cache_defeat,
89                                local_digest, auth=auth)
90            if auth == False:
91                log.info("Couldn't fetch file: %s" % digest_url)
92                return False
93            log.info('Fetching remote file %s' % object_url)
94            auth = get_web_file(object_url+cache_defeat, local_file, auth=auth)
95            if auth == False:
96                return False
97        else:
98            # download object digest to remote data directory
99            log.info('Fetching remote file %s' % digest_url)
100            auth = get_web_file(digest_url+cache_defeat,
101                                remote_digest, auth=auth)
102            if auth == False:
103                return False
104           
105            # compare remote with local digest
106            fd = open(local_digest, 'r')
107            local_data_digest = fd.read()
108            fd.close()
109
110            fd = open(remote_digest, 'r')
111            remote_data_digest = fd.read()
112            fd.close()
113
114            # if digests differ, refresh object
115            if local_data_digest != remote_data_digest:
116                log.info('Local file %s is out of date' % obj)
117                fd = open(local_digest, 'w')
118                fd.write(remote_data_digest)
119                fd.close()
120
121                log.info('Fetching remote file %s' % object_url)
122                auth = get_web_file(object_url+cache_defeat,
123                                    local_file, auth=auth)
124        return auth
125
126    # create local data directory if required
127    log.debug('Creating local directory: %s' % Local_Data_Directory)
128    if not os.path.exists(Local_Data_Directory):
129        os.mkdir(Local_Data_Directory)
130
131    # clean out remote data copy directory
132    log.debug('Cleaning remote directory: %s' % Remote_Data_Directory)
133    shutil.rmtree(Remote_Data_Directory, ignore_errors=True)
134    os.mkdir(Remote_Data_Directory)
135
136    # refresh local files
137    auth = None
138    for data_object in Local_Data_Objects:
139        log.debug('Trying to update: %s' % data_object)
140        auth = update_object(data_object, auth)
141        if auth == False:
142            log.debug('Failed fetching %s, returning False' % data_object)
143            return False
144
145    # unpack *.tgz files
146    for data_object in Local_Data_Objects:
147        tar_path = os.path.join(Local_Data_Directory, data_object+'.tgz')
148        log.info('Untarring %s in directory %s ...'
149                 % (tar_path, Local_Data_Directory))
150        untar_file(tar_path, target_dir=Local_Data_Directory)
151       
152    log.info('Local data has been refreshed.')
153    return True
154
155
156def can_we_run():
157    '''Decide if we can run with the files we have.
158   
159    Return True if we *can* run, else False.
160
161    Tell user what is happening first, then untar files.
162    '''
163
164    log.critical('Checking if you have the required files to run locally ...')
165
166    have_files = True
167    max_width = 0
168    for obj in Local_Data_Objects:
169        max_width = max(len(obj), max_width)
170
171    for obj in Local_Data_Objects:
172        # if we don't have required object, can't run
173        obj_path = os.path.join(Local_Data_Directory, obj+'.tgz')
174        if os.path.exists(obj_path):
175            log.info('%s  found' % obj.ljust(max_width))
176        else:
177            log.info('%s  MISSING!' % obj.ljust(max_width))
178            have_files = False
179
180    if not have_files:
181        log.critical('You must obtain the missing files before you can run '
182                     'this validation.')
183        return False
184
185    log.critical('You have the required files.')
186
187    # now untar file/directory objects
188    for data_object in Local_Data_Objects:
189        tar_path = os.path.join(Local_Data_Directory, data_object+'.tgz')
190        log.info('Untarring %s in directory %s ...'
191                 % (tar_path, Local_Data_Directory))
192        untar_file(tar_path, target_dir=Local_Data_Directory)
193
194    return True
195
196
197def run_simulation():
198    '''Run the Patong simulation.'''
199   
200    # modify environment so we use the local data
201    new_inundationhome = os.path.join(Local_Data_Directory, '')
202    os.environ['INUNDATIONHOME'] = new_inundationhome
203    new_muxhome = os.path.join(Local_Data_Directory, 'data')
204    os.environ['MUXHOME'] = new_muxhome
205
206    # We import here, _after_ environment variables are set
207    import project
208
209    # run the simulation, produce SWW file
210    log.critical('Running Patong simulation ...')
211    cmd = 'python run_model.py > %s' % RUNMODEL_STDOUT
212    res = os.system(cmd)
213    assert res == 0
214
215def check_that_output_is_as_expected():
216    '''Check that validation output is as required.'''
217
218    # get path to expected SWW file
219    log.critical('Checking that simulation results are as expected ...')
220    local_sww = os.path.join(Local_Data_Directory, Local_Data_Objects[0])
221
222    # get output directory from stdout capture file
223    try:
224        fd = open(RUNMODEL_STDOUT, 'r')
225    except IOError, e:
226        log.critical("Can't open catch file '%s': %s"
227                     % (RUNMODEL_STDOUT, str(e)))
228        return 1
229    lines = fd.readlines()
230    fd.close
231
232    output_directory = None
233    for line in lines:
234        if line.startswith(OUTDIR_PREFIX):
235            output_directory = line.replace(OUTDIR_PREFIX, '', 1)
236            output_directory = output_directory.strip('\n')
237            break
238    if output_directory is None:
239        log.critical("Couldn't find line starting with '%s' in file '%s'"
240                     % (OUTDIR_PREFIX, RUNMODEL_STDOUT))
241        return 1
242
243    # compare SWW files here and there
244    new_output_sww = os.path.join(output_directory, OUTPUT_SWW)
245    cmd = 'python cmpsww.py %s %s > cmpsww.stdout' % (local_sww, new_output_sww)
246    res = os.system(cmd)
247    if res == 0:
248        log.info('Simulation results are as expected.')
249    else:
250        log.critical('Simulation results are NOT as expected.')
251        fd = open('cmpsww.stdout', 'r')
252        cmp_error = fd.readlines()
253        fd.close()
254        log.critical('\n' + ''.join(cmp_error))
255
256
257def teardown():
258    '''Clean up after validation run.'''
259   
260    # clear all data objects from local data directory
261    for data_object in Local_Data_Objects:
262        obj_path = os.path.join(Local_Data_Directory, data_object)
263        if os.path.isfile(obj_path):
264            os.remove(obj_path)
265        else:
266            shutil.rmtree(obj_path, ignore_errors=True)
267
268    # remove remote directory and stdout capture file
269    shutil.rmtree(Remote_Data_Directory, ignore_errors=True)
270    try:
271        os.remove(RUNMODEL_STDOUT)
272    except OSError:
273        pass
274           
275
276################################################################################
277# Mainline - run the simulation, check output.
278################################################################################
279
280# set logging levels
281log.console_logging_level = log.INFO
282setup()
283
284# prepare user for what is about to happen
285log.critical('')
286log.critical('This validation requires a working internet connection to run.')
287log.critical('')
288log.critical('If you are behind a proxy server you will need to supply your '
289             'proxy details')
290log.critical('such as the proxy server address and your proxy username and '
291             'password.  These')
292log.critical('can be defined in one or more of the environment variables:')
293log.critical('\tHTTP_PROXY')
294log.critical('\tPROXY_USERNAME')
295log.critical('\tPROXY_PASSWORD')
296log.critical('if you wish.  If not supplied in environment variables, you '
297             'will be prompted for')
298log.critical('the details.')
299log.critical('')
300log.critical('You may still run this validation without an internet '
301             'connection if you have the')
302log.critical('required files.')
303log.critical('*'*80)
304log.critical('')
305
306# make sure local data is up to date
307if not update_local_data():
308    log.info("Can't update local files over the internet (for some reason).")
309    if not can_we_run():
310        log.critical('')
311        log.critical('*'*80)
312        log.critical("Can't connect via the internet and you don't have the "
313                     "required files.")
314        log.critical('Terminating the validation.')
315        log.critical('')
316        log.critical('If you get the missing files from %s' % PATONG_DATA_URL)
317        log.critical('then you can try to run the validation again.')
318        sys.exit(10)
319
320# run the simulation
321run_simulation()
322
323# check output is as expected
324check_that_output_is_as_expected()
325
326# clean up
327teardown()
Note: See TracBrowser for help on using the repository browser.