#!/usr/bin/env python ''' Program to update a DVD jurisdiction 'anuga' directory from the original source directory. usage: update.py where is one of: Hobart BatemansBay Gosford GoldCoast ie, the name of one of the DVD jurisdiction staging directories (case-insensitive). ''' import os import sys import shutil import time # common base path to all data and project files main_path = '/nas/gemd/georisk_models/inundation' # structures holding data for each jurisdiction # data_src_path : sub-path to data directory # proj_src_path : sub-path to project files # data_dst_path : where to copy data files to # proj_dst_path : where to copy project files to # copy_data_dirs : data directories to completely copy # make_dst_dirs : make these data directories (may be left empty) # copy_data_files: individual data files to copy to # copy_proj_files: individual project files to copy to hobart_data = \ {'jurisdiction': 'Hobart', # jurisdiction name # paths to various source directories 'data_src_path': 'data/tasmania/hobart_tsunami_scenario_2009/anuga', 'arcgis_src_path': 'data/tasmania/hobart_tsunami_scenario_2009/ArcGIS', 'viz_src_path': 'data/tasmania/hobart_tsunami_scenario_2009/visualisations', 'proj_src_path': 'sandpits/kvanputten/ANUGA/anuga_work/production/hobart_2009/For_DVD', 'report_src_path': 'data/four_east_coast_communities_reporting/Professional_opinions_final', # paths to destination directories (under 'jurisdiction' root) 'data_dst_path': 'data/tasmania/hobart_tsunami_scenario_2009/anuga', 'proj_dst_path': 'project', 'viz_dst_path': 'documents', 'arcgis_dst_path': 'data/tasmania/hobart_tsunami_scenario_2009/ArcGIS', 'report_dst_path': 'documents', # copy or create whole directories 'make_dst_dirs': ['outputs', 'topographies'], 'copy_data_dirs': ['gauges','boundaries', 'polygons'], # copy 'data' files or directories 'copy_data_files': ['outputs/Event1_HAT', 'outputs/Event1_MSL', 'outputs/Event2_HAT', 'outputs/Event2_MSL', 'outputs/Event3_MSL', 'outputs/Event3_HAT', 'outputs/elevation.gdb', 'topographies/hobart_combined_elevation.pts', 'topographies/hobart_combined_elevation.txt', 'topographies/Metadata.pdf'], # copy 'project' files or directories 'copy_proj_files': ['build_elevation.py', 'project.py', 'run_model.py', 'export_results_max.py', 'get_timeseries.py', 'setup_model.py', 'build_urs_boundary.py', 'file_length.py'], # copy 'visualisations' files or directories 'copy_viz_files': ['Figures'], # copy 'arcgis' files or directories 'copy_arc_files': ['Hobart.gdb','Hobart.mxd','layer files'], # copy 'report' file 'copy_report_files': ['professionalopinion_2009_07_TAS_inundation_modelling.pdf'] } batemans_bay_data = \ {'jurisdiction': 'BatemansBay', # jurisdiction name # paths to various source directories 'data_src_path': 'data/new_south_wales/batemans_bay_tsunami_scenario_2009/anuga', 'arcgis_src_path': 'data/new_south_wales/batemans_bay_tsunami_scenario_2009/ArcGIS', 'viz_src_path': 'data/new_south_wales/batemans_bay_tsunami_scenario_2009/visualisations', 'proj_src_path': 'sandpits/jgriffin/ANUGA/anuga_work/production/new_south_wales/batemans_bay', 'report_src_path': 'data/four_east_coast_communities_reporting/Professional_opinions_final', # paths to destination directories (under 'jurisdiction' root) 'data_dst_path': 'data/new_south_wales/batemans_bay_tsunami_scenario_2009/anuga', 'proj_dst_path': 'project', 'viz_dst_path': 'documents', 'arcgis_dst_path': 'data/new_south_wales/batemans_bay_tsunami_scenario_2009/ArcGIS', 'report_dst_path': 'documents', # copy or create whole directories 'make_dst_dirs': ['boundaries', 'meshes', 'outputs', 'topographies'], 'copy_data_dirs': ['polygons', 'gauges'], # copy 'data' files or directories 'copy_data_files': ['topographies/batemans_bay_combined_elevation.pts', 'topographies/Data_lineage.doc', 'outputs/Event1_HAT', 'outputs/Event1_MSL', 'outputs/Event2_HAT', 'outputs/Event2_MSL', 'outputs/Event3_MSL', 'outputs/Puysegur_200yr', 'outputs/Puysegur_500yr', 'outputs/Puysegur_1000yr', 'outputs/Puysegur_5000yr', 'outputs/New_Hebrides_200yr', 'outputs/New_Hebrides_500yr', 'outputs/New_Hebrides_1000yr', 'outputs/New_Hebrides_2000yr', 'outputs/New_Hebrides_5000yr', 'outputs/elevation', 'boundaries/51077', 'boundaries/51204', 'boundaries/51292', 'boundaries/51347', 'boundaries/51378', 'boundaries/51424', 'boundaries/51436', 'boundaries/51445', 'boundaries/58115', 'boundaries/58129', 'boundaries/58226', 'boundaries/58272', 'boundaries/58284', 'boundaries/58286', 'boundaries/58346', 'boundaries/58368', 'boundaries/boundaries.gdb', 'boundaries/landward_boundary.csv', 'boundaries/urs_order.csv' ], # copy 'visualisations' files or directories 'copy_viz_files': ['Figures'], # copy 'project' files or directories 'copy_proj_files': ['project.py', 'run_model.py', 'setup_model.py', 'build_elevation.py', 'export_results_max.py', 'file_length.py', 'build_urs_boundary.py'], # copy 'arcgis' files or directories 'copy_arc_files': ['BB_cbd_figure_template.mxd'], # copy 'report' file 'copy_report_files': ['professionalopinion_2009_09_NSW_inundation_modelling.pdf'] } gosford_data = \ {'jurisdiction': 'Gosford', # jurisdiction name # paths to various source directories 'data_src_path': 'data/new_south_wales/gosford_tsunami_scenario_2009/anuga', 'arcgis_src_path': 'data/new_south_wales/gosford_tsunami_scenario_2009/ArcGIS', 'viz_src_path': 'data/new_south_wales/gosford_tsunami_scenario_2009/visualisations', 'proj_src_path': 'sandpits/jgriffin/ANUGA/anuga_work/production/new_south_wales/gosford', 'report_src_path': 'data/four_east_coast_communities_reporting/Professional_opinions_final', # paths to destination directories (under 'jurisdiction' root) 'data_dst_path': 'data/new_south_wales/gosford_tsunami_scenario_2009/anuga', 'proj_dst_path': 'project', 'viz_dst_path': 'documents', 'arcgis_dst_path': 'data/new_south_wales/gosford_tsunami_scenario_2009/ArcGIS', 'report_dst_path': 'documents', # copy or create whole directories 'make_dst_dirs': ['boundaries', 'meshes', 'outputs', 'topographies'], 'copy_data_dirs': ['polygons', 'gauges'], # copy 'data' files or directories 'copy_data_files': ['topographies/gosford_combined_elevation.pts', 'topographies/Data_lineage.doc', 'outputs/Event1_HAT', 'outputs/Event1_MSL', 'outputs/Event2_HAT', 'outputs/Event2_MSL', 'outputs/Event3_MSL', 'outputs/Puysegur_200yr', 'outputs/Puysegur_500yr', 'outputs/Puysegur_1000yr', 'outputs/Puysegur_5000yr', 'outputs/elevation', 'boundaries/51436', 'boundaries/58025', 'boundaries/58113', 'boundaries/58187', 'boundaries/58242', 'boundaries/58284', 'boundaries/58349', 'boundaries/boundaries.gdb', 'boundaries/landward_boundary.csv', 'boundaries/urs_order.csv' ], # copy 'visualisations' files or directories 'copy_viz_files': ['Figures'], # copy 'project' files or directories 'copy_proj_files': ['export_results_max.py', 'file_length.py', 'project.py', 'run_model.py', 'build_elevation.py', 'get_timeseries.py', 'setup_model.py', 'build_urs_boundary.py' ], # copy 'arcgis' files or directories 'copy_arc_files': ['G_umina_figure_template.mxd', 'avoca_bookmark.dat', 'copacabana_bookmark.dat', 'terrigal_bookmark.dat', 'umina_bookmark.dat'], # copy 'report' file 'copy_report_files': ['professionalopinion_2009_09_NSW_inundation_modelling.pdf'] } gold_coast_data = \ {'jurisdiction': 'GoldCoast', # jurisdiction name # paths to various source directories 'data_src_path': 'data/queensland/gold_coast_tsunami_scenario_2009/anuga', 'arcgis_src_path': 'data/queensland/gold_coast_tsunami_scenario_2009/ArcGIS', 'viz_src_path': 'data/queensland/gold_coast_tsunami_scenario_2009/visualisations', 'proj_src_path': 'sandpits/lfountain/anuga_work/production/gold_coast_2009/For_DVD', 'report_src_path': 'data/four_east_coast_communities_reporting/Professional_opinions_final', # paths to destination directories (under 'jurisdiction' root) 'data_dst_path': 'data/queensland/gold_coast_tsunami_scenario_2009/anuga', 'proj_dst_path': 'project', 'viz_dst_path': 'documents', 'arcgis_dst_path': 'data/queensland/gold_coast_tsunami_scenario_2009/ArcGIS', 'report_dst_path': 'documents', # copy or create whole directories 'make_dst_dirs': ['outputs', 'topographies', 'boundaries'], 'copy_data_dirs': ['polygons', 'gauges'], # copy 'data' files or directories 'copy_data_files': ['outputs/Event1_HAT', 'outputs/Event1_MSL', 'outputs/Event2_HAT', 'outputs/Event2_MSL', 'outputs/Event3_HAT', 'outputs/Event3_MSL', 'boundaries/50863', 'boundaries/50994', 'boundaries/51253', 'boundaries/51392', 'boundaries/51423', 'boundaries/51469', 'boundaries/62833','boundaries/events_chosen.xls', 'boundaries/landward_boundary.csv', 'boundaries/urs_order.csv', 'topographies/gold_coast_combined_elevation.pts' ], # copy 'visualisations' files or directories 'copy_viz_files': ['Figures'], # copy 'project' files or directories 'copy_proj_files': ['build_elevation.py', 'export_results_max.py', 'file_length.py', 'project.py', 'run_model.py', 'setup_model.py', 'get_timeseries.py', 'build_urs_boundary.py' ], # copy 'arcgis' files or directories 'copy_arc_files': ['gold_coast.mxd', 'gold_coast.gdb'], # copy 'report' file 'copy_report_files': ['professionalopinion_2009_08_QLD_inundation_modelling.pdf'] } # dictionary mapping lower-case jurisdiction name to jurisdiction data dictionary source_jurisdiction_path = {'hobart': hobart_data, 'batemansbay': batemans_bay_data, 'gosford': gosford_data, 'goldcoast': gold_coast_data } ###### # Routines to automate the script data above. ###### def log(msg=''): print(msg) def dir_copy(src, dst): cmd = 'cp -R %s %s' % (src, dst) log('Doing: %s' % cmd) fd = os.popen(cmd) fd.close() def copy_file_or_dir(src, dst): '''Copy a file or complete directory.''' # could be a file or directory being copied try: shutil.copyfile(src, dst) except IOError, e: if 'Is a directory' in str(e): shutil.copytree(src, dst) else: log('*' *72) log('* %s' % str(e)) log('*' *72) def update_staging(jurisdiction): # create a list of jurisdiction names jurisdiction_names = [] for k in source_jurisdiction_path: jurisdiction_names.append(source_jurisdiction_path[k]['jurisdiction']) # get ready j_dict = source_jurisdiction_path[jurisdiction] j_name = j_dict['jurisdiction'] data_src_path = os.path.join(main_path, j_dict['data_src_path']) data_dst_path = os.path.join(os.getcwd(), j_name, j_dict['data_dst_path']) proj_src_path = os.path.join(main_path, j_dict['proj_src_path']) proj_dst_path = os.path.join(os.getcwd(), j_name, j_dict['proj_dst_path']) viz_src_path = os.path.join(main_path, j_dict['viz_src_path']) viz_dst_path = os.path.join(os.getcwd(), j_name, j_dict['viz_dst_path']) arcgis_src_path = os.path.join(main_path, j_dict['arcgis_src_path']) arcgis_dst_path = os.path.join(os.getcwd(), j_name, j_dict['arcgis_dst_path']) report_src_path = os.path.join(main_path, j_dict['report_src_path']) report_dst_path = os.path.join(os.getcwd(), j_name, j_dict['report_dst_path']) # tell where all stuff is coming from log('Getting data from: %s' % data_src_path) log('Getting project from: %s' % proj_src_path) log('Getting ArcGIS from: %s' % arcgis_src_path) log('Getting report from: %s' % report_src_path) log('') # create new output directory, delete old if there if os.path.exists(j_name): log('Deleting existing staging directory: %s' % j_name) shutil.rmtree(j_name) log('Creating staging directory: %s' % j_name) os.makedirs(j_name) # create required directories for dir in j_dict['make_dst_dirs']: new_dir = os.path.join(data_dst_path, dir) log('Creating directory: %s' % dir) os.makedirs(new_dir) # copy required full directories for copy_dir in j_dict['copy_data_dirs']: src_dir = os.path.join(data_src_path, copy_dir) dst_dir = os.path.join(data_dst_path, copy_dir) log('Copying directory: %s' % copy_dir) copy_file_or_dir(src_dir, dst_dir) # copy required data files for copy_file in j_dict['copy_data_files']: src_file = os.path.join(data_src_path, copy_file) new_file = os.path.join(data_dst_path, copy_file) log('Copying: %s' % copy_file) copy_file_or_dir(src_file, new_file) # copy required visualisation files log('Creating directory: %s' % viz_dst_path) os.makedirs(viz_dst_path) for copy_file in j_dict['copy_viz_files']: src_file = os.path.join(viz_src_path, copy_file) new_file = os.path.join(viz_dst_path, copy_file) log('Copying: %s' % copy_file) copy_file_or_dir(src_file, new_file) # copy required project files log('Creating directory: %s' % proj_dst_path) os.makedirs(proj_dst_path) for copy_file in j_dict['copy_proj_files']: src_file = os.path.join(proj_src_path, copy_file) new_file = os.path.join(proj_dst_path, copy_file) log('Copying: %s' % copy_file) copy_file_or_dir(src_file, new_file) # copy required ArcGIS files log('Creating directory: %s' % arcgis_dst_path) os.makedirs(arcgis_dst_path) for copy_file in j_dict['copy_arc_files']: src_file = os.path.join(arcgis_src_path, copy_file) new_file = os.path.join(arcgis_dst_path, copy_file) log('Copying: %s' % copy_file) copy_file_or_dir(src_file, new_file) # copy report log('Creating directory: %s' % report_dst_path) for copy_file in j_dict['copy_report_files']: src_file = os.path.join(report_src_path, copy_file) new_file = os.path.join(report_dst_path, copy_file) log('Copying: %s' % copy_file) copy_file_or_dir(src_file, new_file) # now copy jurisdiction-specific DVD files src_dir = 'extra_files/%s/*' % j_name dst_dir = j_name dir_copy(src_dir, dst_dir) # copy the extra_files and special directories src_dir = 'extra_files/browser_files' dst_dir = j_name dir_copy(src_dir, dst_dir) src_dir = 'extra_files/documents' dir_copy(src_dir, dst_dir) src_file = 'extra_files/autorun.inf' dir_copy(src_file, dst_dir) src_dir = 'extra_files/.cache' dir_copy(src_dir, dst_dir) # get size of the staging directory cmd = 'du -sh %s' % j_name fd = os.popen(cmd, 'r') res = fd.read() fd.close() (res, _) = res.split('\t', 1) log('Staging directory %s has size %s' % (j_name, res)) def usage(): print('usage: update.py ') print('where is one of the jurisdiction staging directories.') if sys.platform == 'win32': print('Sorry, you can only run this script under Unix') sys.exit(10) if len(sys.argv) != 2: usage() sys.exit(10) jurisdiction = sys.argv[1].lower() # remove any trailing '/' - from TAB completion if jurisdiction.endswith('/'): jurisdiction = jurisdiction[:-1] start_time = time.time() update_staging(jurisdiction) stop_time = time.time() elapsed_time = stop_time - start_time log('Elapsed time is %.1fs' % elapsed_time) print 'completed'