source: anuga_validation/automated_validation_tests/patong_beach_validation/validate_patong.py @ 6844

Last change on this file since 6844 was 6844, checked in by rwilson, 15 years ago

Add Patong beach validation in main trunk.

  • Property svn:executable set to *
File size: 10.5 KB
Line 
1'''
2Automatic verification that the ANUGA code runs the Patong simulation
3and produces the expected output.
4
5Required files are downloaded from the ANUGA servers if they are
6out of date or missing.
7'''
8
9import sys
10import os
11import glob
12import unittest
13import time
14import shutil
15
16from anuga.utilities.system_tools import get_web_file, untar_file
17import anuga.utilities.log as log
18
19
20# base URL for the remote ANUGA data
21PATONG_HOST = 'http://10.7.64.243/'
22PATONG_DATA_URL = PATONG_HOST + 'patong_validation_data/'
23
24# path to the local data directory
25Local_Data_Directory = os.path.join('.', 'local_data')
26
27# path to the remote data directory
28Remote_Data_Directory = os.path.join('.', 'remote_data')
29
30# sequence of required local data objects
31# first is always the SWW file to be compared
32Local_Data_Objects = ('patong.sww', 'data')
33
34# name of stdout catch file for runmodel.py
35RUNMODEL_STDOUT = 'runmodel.stdout'
36
37# text at start of 'output dir' line in RUNMODEL_STDOUT file
38OUTDIR_PREFIX = 'Make directory '
39
40# Name of SWW file produced by simulationvalidate_patong.py
41OUTPUT_SWW = 'patong.sww'
42
43
44def setup():
45    '''Prepare for the validation run.
46
47    Check we have required data set in project.py.
48    '''
49   
50    pass
51
52
53def update_local_data():
54    '''Update local data objects from the server.
55
56    These are expected to be *.tgz files/directories.
57    Return True if all went well, else False.
58    '''
59   
60    # local function to update one data object
61    def update_object(obj, auth):
62        '''Update object 'obj' using authentication tuple 'auth'.
63       
64        Return updated auth object (success) or False (failure).
65        '''
66
67        log.info("Refreshing local object '%s'" % obj)
68
69        # Get a unique date+time string to defeat caching.  The idea is to add
70        # this to the end of any URL so proxy sees a different request.
71        cache_defeat = '?' + time.strftime('%Y%m%d%H%M%S')
72
73        # create local and remote paths, URLs, etc.
74        remote_path = os.path.join(Remote_Data_Directory, obj)
75        remote_digest = remote_path + '.tgz.digest'
76       
77        local_path = os.path.join(Local_Data_Directory, obj)
78        local_file = local_path + '.tgz'
79        local_digest = local_file + '.digest'
80       
81        object_url = PATONG_DATA_URL + obj + '.tgz'
82        digest_url = object_url + '.digest'
83
84        # see if missing either digest or object .tgz
85        if not os.path.exists(local_digest) or not os.path.exists(local_file):
86            # no digest or no object, download both digest and object
87            log.info('Fetching remote file %s' % digest_url)
88            auth = get_web_file(digest_url+cache_defeat,
89                                local_digest, auth=auth)
90            if auth == False:
91                log.info("Couldn't fetch file: %s" % digest_url)
92                return False
93            log.info('Fetching remote file %s' % object_url)
94            auth = get_web_file(object_url+cache_defeat, local_file, auth=auth)
95            if auth == False:
96                return False
97        else:
98            # download object digest to remote data directory
99            log.info('Fetching remote file %s' % digest_url)
100            auth = get_web_file(digest_url+cache_defeat,
101                                remote_digest, auth=auth)
102            if auth == False:
103                return False
104           
105            # compare remote with local digest
106            fd = open(local_digest, 'r')
107            local_data_digest = fd.read()
108            fd.close()
109
110            fd = open(remote_digest, 'r')
111            remote_data_digest = fd.read()
112            fd.close()
113
114            # if digests differ, refresh object
115            if local_data_digest != remote_data_digest:
116                log.info('Local file %s is out of date' % obj)
117                fd = open(local_digest, 'w')
118                fd.write(remote_data_digest)
119                fd.close()
120
121                log.info('Fetching remote file %s' % object_url)
122                auth = get_web_file(object_url+cache_defeat,
123                                    local_file, auth=auth)
124        return auth
125
126    # create local data directory if required
127    log.debug('Creating local directory: %s' % Local_Data_Directory)
128    if not os.path.exists(Local_Data_Directory):
129        os.mkdir(Local_Data_Directory)
130
131    # clean out remote data copy directory
132    log.debug('Cleaning remote directory: %s' % Remote_Data_Directory)
133    shutil.rmtree(Remote_Data_Directory, ignore_errors=True)
134    os.mkdir(Remote_Data_Directory)
135
136    # refresh local files
137    auth = None
138    for data_object in Local_Data_Objects:
139        log.debug('Trying to update: %s' % data_object)
140        auth = update_object(data_object, auth)
141        if auth == False:
142            log.debug('Failed fetching %s, returning False' % data_object)
143            return False
144
145    log.info('Local data has been refreshed.')
146    return True
147
148
149def can_we_run():
150    '''Decide if we can run with the files we have.
151   
152    Return True if we *can* run, else False.
153
154    Tell user what is happening first, then untar files.
155    '''
156
157    log.critical('Checking if you have the required files to run locally ...')
158
159    have_files = True
160    max_width = 0
161    for obj in Local_Data_Objects:
162        max_width = max(len(obj), max_width)
163
164    for obj in Local_Data_Objects:
165        # if we don't have required object, can't run
166        obj_path = os.path.join(Local_Data_Directory, obj+'.tgz')
167        if os.path.exists(obj_path):
168            log.info('%s  found' % obj.ljust(max_width))
169        else:
170            log.info('%s  MISSING!' % obj.ljust(max_width))
171            have_files = False
172
173    if not have_files:
174        log.critical('You must obtain the missing files before you can run '
175                     'this validation.')
176        return False
177
178    log.critical('You have the required files.')
179
180    return True
181
182
183def run_simulation():
184    '''Run the Patong simulation.'''
185   
186    # now untar file/directory objects
187    for data_object in Local_Data_Objects:
188        tar_path = os.path.join(Local_Data_Directory, data_object+'.tgz')
189        log.info('Untarring %s in directory %s ...'
190                 % (tar_path, Local_Data_Directory))
191        untar_file(tar_path, target_dir=Local_Data_Directory)
192
193    # modify environment so we use the local data
194    new_inundationhome = os.path.join(Local_Data_Directory, '')
195    os.environ['INUNDATIONHOME'] = new_inundationhome
196    new_muxhome = os.path.join(Local_Data_Directory, 'data')
197    os.environ['MUXHOME'] = new_muxhome
198
199    # We import here, _after_ environment variables are set
200    import project
201
202    # run the simulation, produce SWW file
203    log.critical('Running Patong simulation ...')
204    cmd = 'python run_model.py > %s' % RUNMODEL_STDOUT
205    res = os.system(cmd)
206    assert res == 0
207
208def check_that_output_is_as_expected():
209    '''Check that validation output is as required.'''
210
211    # get path to expected SWW file
212    log.critical('Checking that simulation results are as expected ...')
213    local_sww = os.path.join(Local_Data_Directory, Local_Data_Objects[0])
214
215    # get output directory from stdout capture file
216    try:
217        fd = open(RUNMODEL_STDOUT, 'r')
218    except IOError, e:
219        log.critical("Can't open catch file '%s': %s"
220                     % (RUNMODEL_STDOUT, str(e)))
221        return 1
222    lines = fd.readlines()
223    fd.close
224
225    output_directory = None
226    for line in lines:
227        if line.startswith(OUTDIR_PREFIX):
228            output_directory = line.replace(OUTDIR_PREFIX, '', 1)
229            output_directory = output_directory.strip('\n')
230            break
231    if output_directory is None:
232        log.critical("Couldn't find line starting with '%s' in file '%s'"
233                     % (OUTDIR_PREFIX, RUNMODEL_STDOUT))
234        return 1
235
236    # compare SWW files here and there
237    new_output_sww = os.path.join(output_directory, OUTPUT_SWW)
238    cmd = 'python cmpsww.py %s %s > cmpsww.stdout' % (local_sww, new_output_sww)
239    res = os.system(cmd)
240    if res == 0:
241        log.info('Simulation results are as expected.')
242    else:
243        log.critical('Simulation results are NOT as expected.')
244        fd = open('cmpsww.stdout', 'r')
245        cmp_error = fd.readlines()
246        fd.close()
247        log.critical('\n' + ''.join(cmp_error))
248
249
250def teardown():
251    '''Clean up after validation run.'''
252   
253    # clear all data objects from local data directory
254    for data_object in Local_Data_Objects:
255        obj_path = os.path.join(Local_Data_Directory, data_object)
256        if os.path.isfile(obj_path):
257            os.remove(obj_path)
258        else:
259            shutil.rmtree(obj_path, ignore_errors=True)
260
261    # remove remote directory and stdout capture file
262    shutil.rmtree(Remote_Data_Directory, ignore_errors=True)
263    try:
264        os.remove(RUNMODEL_STDOUT)
265    except OSError:
266        pass
267           
268
269################################################################################
270# Mainline - run the simulation, check output.
271################################################################################
272
273# set logging levels
274log.console_logging_level = log.INFO
275setup()
276
277# prepare user for what is about to happen
278log.critical('')
279log.critical('This validation requires a working internet connection to run.')
280log.critical('')
281log.critical('If you are behind a proxy server you will need to supply your '
282             'proxy details')
283log.critical('such as the proxy server address and your proxy username and '
284             'password.  These')
285log.critical('can be defined in one or more of the environment variables:')
286log.critical('\tHTTP_PROXY')
287log.critical('\tPROXY_USERNAME')
288log.critical('\tPROXY_PASSWORD')
289log.critical('if you wish.  If not supplied in environment variables, you '
290             'will be prompted for')
291log.critical('the details.')
292log.critical('')
293log.critical('You may still run this validation without an internet '
294             'connection if you have the')
295log.critical('required files.')
296log.critical('*'*80)
297log.critical('')
298
299# make sure local data is up to date
300if not update_local_data():
301    log.info("Can't update local files over the internet (for some reason).")
302    if not can_we_run():
303        log.critical('')
304        log.critical('*'*80)
305        log.critical("Can't connect via the internet and you don't have the "
306                     "required files.")
307        log.critical('Terminating the validation.')
308        log.critical('')
309        log.critical('If you get the missing files from %s' % PATONG_DATA_URL)
310        log.critical('then you can try to run the validation again.')
311        sys.exit(10)
312
313# run the simulation
314run_simulation()
315
316# check output is as expected
317check_that_output_is_as_expected()
318
319# clean up
320#teardown()
Note: See TracBrowser for help on using the repository browser.