source: anuga_validation/automated_validation_tests/patong_validation/validate_patong.py @ 6782

Last change on this file since 6782 was 6782, checked in by rwilson, 15 years ago

Modified to run correctly in remote user environment.

File size: 7.1 KB
Line 
1'''
2Automatic verification that the ANUGA code runs the Patong simulation
3and produces the expected output.
4
5Required files are downloaded from the ANUGA servers if they are
6out of date or missing.
7'''
8
9import os
10import glob
11import unittest
12import time
13import shutil
14
15from anuga.utilities.system_tools import get_web_file, untar_file
16import anuga.utilities.log as log
17
18
19# base URL for the remote ANUGA data
20PATONG_DATA_URL = 'http://10.7.64.243/patong_validation_data/'
21
22# path to the local data directory
23Local_Data_Directory = os.path.join('.', 'local_data')
24
25# path to the remote data directory
26Remote_Data_Directory = os.path.join('.', 'remote_data')
27
28# sequence of required local data objects
29# first is always the SWW file to be compared
30Local_Data_Objects = ('patong.sww', 'data')
31
32# name of stdout catch file for runmodel.py
33RUNMODEL_STDOUT = 'runmodel.stdout'
34
35# text at start of 'output dir' line in RUNMODEL_STDOUT file
36OUTDIR_PREFIX = 'Make directory '
37
38# Name of SWW file produced by simulation
39OUTPUT_SWW = 'patong.sww'
40
41
42def setup():
43    '''Prepare for the validation run.
44
45    Check we have required data set in project.py.
46    '''
47   
48    pass
49
50
51def update_local_data():
52    '''Update local data objects from the server.
53
54    These are expected to be *.tgz files/directories.
55    '''
56   
57    # local function to update one data object
58    def update_object(obj, auth):
59        '''Update object 'obj' using authentication tuple 'auth'.'''
60       
61        # Get a unique date+time string to defeat caching.  The idea is to add
62        # this to the end of any URL so proxy sees a different request.
63        cache_defeat = '?' + time.strftime('%Y%m%d%H%M%S')
64
65        # create local and remote paths, URLs, etc.
66        remote_path = os.path.join(Remote_Data_Directory, obj)
67        remote_digest = remote_path + '.tgz.digest'
68       
69        local_path = os.path.join(Local_Data_Directory, obj)
70        local_file = local_path + '.tgz'
71        local_digest = local_file + '.digest'
72       
73        object_url = PATONG_DATA_URL + obj + '.tgz'
74        digest_url = object_url + '.digest'
75
76        # see if missing either digest or object .tgz
77        if not os.path.exists(local_digest) or not os.path.exists(local_file):
78            # no digest or no object, download both digest and object
79            log.debug('Fetching remote file %s' % digest_url)
80            auth = get_web_file(digest_url+cache_defeat,
81                                local_digest, auth=auth)
82            log.debug('Fetching remote file %s' % object_url)
83            auth = get_web_file(object_url+cache_defeat, local_file, auth=auth)
84        else:
85            # download object digest to remote data directory
86            log.debug('Fetching remote file %s' % object_url)
87            auth = get_web_file(digest_url+cache_defeat,
88                                remote_digest, auth=auth)
89           
90            # compare remote with local digest
91            fd = open(local_digest, 'r')
92            local_data_digest = fd.read()
93            fd.close()
94
95            fd = open(remote_digest, 'r')
96            remote_data_digest = fd.read()
97            fd.close()
98
99            # if digests differ, refresh object
100            if local_data_digest != remote_data_digest:
101                log.debug('Local file %s is out of date' % obj)
102                fd = open(local_digest, 'w')
103                fd.write(remote_data_digest)
104                fd.close()
105
106                log.debug('Fetching remote file %s' % object_url)
107                auth = get_web_file(object_url+cache_defeat,
108                                    local_file, auth=auth)
109        return auth
110
111    log.debug('Refreshing local data ...')
112   
113    # create local data directory if required
114    if not os.path.exists(Local_Data_Directory):
115        os.mkdir(Local_Data_Directory)
116
117    # clean out remote data copy directory
118    shutil.rmtree(Remote_Data_Directory, ignore_errors=True)
119    os.mkdir(Remote_Data_Directory)
120
121    # refresh local files
122    auth = None
123    for data_object in Local_Data_Objects:
124        auth = update_object(data_object, auth)
125
126    # unpack *.tgz files
127    for data_object in Local_Data_Objects:
128        tar_path = os.path.join(Local_Data_Directory, data_object+'.tgz')
129        log.debug('Untarring %s in dir %s' % (tar_path, Local_Data_Directory))
130        untar_file(tar_path, target_dir=Local_Data_Directory)
131       
132    log.debug('Local data has been refreshed.')
133
134
135def run_simulation():
136    '''Run the Patong simulation.'''
137   
138    # modify environment so we use the local data
139    new_inundationhome = os.path.join(Local_Data_Directory, '')
140    os.environ['INUNDATIONHOME'] = new_inundationhome
141    new_muxhome = os.path.join(Local_Data_Directory, 'data')
142    os.environ['MUXHOME'] = new_muxhome
143
144    # We import here, _after_ environment variables are set
145    import project
146
147    # run the simulation, produce SWW file
148    log.debug('Running Patong simulation ...')
149    cmd = 'python run_model.py > %s' % RUNMODEL_STDOUT
150    res = os.system(cmd)
151    assert res == 0
152
153def check_that_output_is_as_expected():
154    '''Check that validation output is as required.'''
155
156    # get path to expected SWW file
157    log.debug('Checking that simulation results are as expected ...')
158    local_sww = os.path.join(Local_Data_Directory, Local_Data_Objects[0])
159
160    # get output directory from stdout capture file
161    try:
162        fd = open(RUNMODEL_STDOUT, 'r')
163    except IOError, e:
164        log.critical("Can't open catch file '%s': %s"
165                     % (RUNMODEL_STDOUT, str(e)))
166        return 1
167    lines = fd.readlines()
168    fd.close
169
170    output_directory = None
171    for line in lines:
172        if line.startswith(OUTDIR_PREFIX):
173            output_directory = line.replace(OUTDIR_PREFIX, '', 1)
174            output_directory = output_directory.strip('\n')
175            break
176    if output_directory is None:
177        log.critical("Couldn't find line starting with '%s' in file '%s'"
178                     % (OUTDIR_PREFIX, RUNMODEL_STDOUT))
179        return 1
180
181    # compare SWW files here and there
182    new_output_sww = os.path.join(output_directory, OUTPUT_SWW)
183    cmd = 'python cmpsww.py %s %s > cmpsww.stdout' % (local_sww, new_output_sww)
184    res = os.system(cmd)
185    assert res == 0
186    log.debug('Simulation results are as expected.')
187
188
189def teardown():
190    '''Clean up after validation run.'''
191   
192    # clear all data objects from local data directory
193    for data_object in Local_Data_Objects:
194        obj_path = os.path.join(Local_Data_Directory, data_object)
195        if os.path.isfile(obj_path):
196            os.remove(obj_path)
197        else:
198            shutil.rmtree(obj_path, ignore_errors=True)
199
200    # remove remote directory and stdout capture file
201    shutil.rmtree(Remote_Data_Directory, ignore_errors=True)
202    os.remove(RUNMODEL_STDOUT)
203           
204
205################################################################################
206# Mainline - run the simulation, check output.
207################################################################################
208
209# set logging levels
210log.console_logging_level = log.DEBUG
211setup()
212
213# make sure local data is up to date
214update_local_data()
215
216# run the simulation
217run_simulation()
218
219# check output is as expected
220check_that_output_is_as_expected()
221
222# clean up
223teardown()
Note: See TracBrowser for help on using the repository browser.