Skip to content
Snippets Groups Projects
Commit 7c849c83 authored by Alan De Smet's avatar Alan De Smet
Browse files

Allow multiple symlink destinations

aitf relies on the behavior
parent 3dd3225a
No related branches found
No related tags found
No related merge requests found
...@@ -53,11 +53,11 @@ def parse_args(): ...@@ -53,11 +53,11 @@ def parse_args():
metavar='DATE', metavar='DATE',
help='scan time ancillary data is needed for') help='scan time ancillary data is needed for')
ap.add_argument('destination', type=arg_directory, ap.add_argument('destination', type=arg_directory, nargs='+',
metavar='FINAL_PATH', metavar='FINAL_PATH',
help='write required files here, possibly as symbolic links') help='write symbolic links to required files here (actual files may be placed in the first destination provided)')
add_cache_dir_arg(ap) add_cache_dir_arg(ap, '--cache')
ap.add_argument('--cache-only', action='store_true', ap.add_argument('--cache-only', action='store_true',
help='do not download; only use files already in cache') help='do not download; only use files already in cache')
...@@ -89,10 +89,10 @@ def main(): ...@@ -89,10 +89,10 @@ def main():
aitf.conlog.setup_logging() aitf.conlog.setup_logging()
args = parse_args() args = parse_args()
cache_dir = args.dir cache_dir = args.cache
if not os.access(cache_dir, os.W_OK): if not os.access(cache_dir, os.W_OK):
logging.warning(f'I do not appear to be able to have write access to "{cache_dir}". Downloading directly into {args.destination}') logging.warning(f'I do not appear to be able to have write access to "{cache_dir}". Downloading directly into {args.destination[0]}')
args.dir = args.destination args.cache = args.destination[0]
do_download = not args.cache_only do_download = not args.cache_only
...@@ -100,29 +100,30 @@ def main(): ...@@ -100,29 +100,30 @@ def main():
logging.progress("Acquiring SST") logging.progress("Acquiring SST")
sststats = aitf.ancil.SST.DownloadStatistics() sststats = aitf.ancil.SST.DownloadStatistics()
sst_files = aitf.ancil.SST.download_for_time(args.scan_time, args.dir, do_download = do_download, download_stats = sststats) sst_files = aitf.ancil.SST.download_for_time(args.scan_time, args.cache, do_download = do_download, download_stats = sststats)
log_download_report("SST Download Summary", sststats) log_download_report("SST Download Summary", sststats)
# using SST instead of GFS to show that # using SST instead of GFS to show that
# DownloadStatistics are interchangable. # DownloadStatistics are interchangable.
logging.progress("Acquiring GFS") logging.progress("Acquiring GFS")
gfsstats = aitf.ancil.SST.DownloadStatistics() gfsstats = aitf.ancil.SST.DownloadStatistics()
gfs_files = aitf.ancil.GFS.download_for_time(args.scan_time, args.dir, do_download = do_download, download_stats = sststats) gfs_files = aitf.ancil.GFS.download_for_time(args.scan_time, args.cache, do_download = do_download, download_stats = sststats)
log_download_report("GFS Download Summary", gfsstats) log_download_report("GFS Download Summary", gfsstats)
log_download_report("Total Download Summary", sststats + gfsstats) log_download_report("Total Download Summary", sststats + gfsstats)
logging.progress("Symbolically linking into place") logging.progress("Symbolically linking into place")
all_files = list(sst_files) + list(gfs_files) all_files = list(sst_files) + list(gfs_files)
for file in all_files: for destination in args.destination:
f = os.path.relpath(file, args.dir) for file in all_files:
src = os.path.join(args.dir, f) f = os.path.relpath(file, args.cache)
dst = hack_oisst_preliminary_filename(os.path.join(args.destination, f)) src = os.path.join(args.cache, f)
dst_dir = os.path.dirname(dst) dst = hack_oisst_preliminary_filename(os.path.join(destination, f))
os.makedirs(dst_dir, exist_ok=True) dst_dir = os.path.dirname(dst)
logging.info(src + ' -> ' + dst) os.makedirs(dst_dir, exist_ok=True)
if src != dst: logging.info(src + ' -> ' + dst)
os.symlink(src, dst) if src != dst: # src==dst if we set args.cache to args.destination[0]
os.symlink(src, dst)
except csppfetch.DownloadsFailedException as e: except csppfetch.DownloadsFailedException as e:
logging.fatal(str(e)) logging.fatal(str(e))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment