Skip to content
Snippets Groups Projects
Commit c20d0286 authored by Alan De Smet's avatar Alan De Smet
Browse files

Add total report on statistics

(And delete a bunch of dead code I could have sworn I removed earlier)
parent 3a837f64
No related branches found
No related tags found
No related merge requests found
......@@ -84,83 +84,33 @@ def parse_args():
return args
def print_download_report(name, stats):
print(name)
print("\n".join([" "+x for x in stats.report()]))
def main():
#add_console_logging()
args = parse_args()
stats = DownloadStatistics()
sststats = DownloadStatistics()
aitfancil.SST.update_cache(args.dir,
start_time=args.oldest,
end_time=args.newest,
download_stats = stats,
download_stats = sststats,
)
print("\n".join(stats.report()))
print_download_report("SST Download Summary", sststats)
stats = DownloadStatistics()
gfsstats = DownloadStatistics()
aitfancil.GFS.update_cache(args.dir,
start_time=args.oldest,
end_time=args.newest,
download_stats = stats,
download_stats = gfsstats,
)
print("\n".join(stats.report()))
print_download_report("GFS Download Summary", gfsstats)
print_download_report("Total Download Summary", sststats + gfsstats)
return 0
last_time = ""
_date = dt.date(2010,1,10)
for hour in range(0,24):
minute = 0
second = 0
_time = dt.time(hour,minute,second)
d = dt.datetime.combine(_date,_time)
fslist = GFS.get_filesets_for_time(d)
line = ""
for i,v in enumerate(fslist):
if len(v.urls_to_files) != 2:
raise RuntimeError(f"I expect all of these to be 2 long, is len{v.urls_to_files} long")
datetimes = [ parse_datetime_from_gfs_filename(x) for x in v.urls_to_files.values() ]
if datetimes[0] != datetimes[1]:
raise RuntimeError("I expect all I expect GFS items in a single FileSet to be for the same timestamp. They are {datetimes}")
delta = (datetimes[0].date() - _date).days
if float(int(delta)) != delta:
raise RuntimeError("I expected difference to be in whole hours")
f1 = list(v.urls_to_files.values())[0][-2:]
f2 = list(v.urls_to_files.values())[1][-2:]
line += (f" {int(delta):2d} "+
f"{datetimes[0].hour:2d}:"+
datetimes[0].strftime("%M") +
f" F{f1} F{f2} / "
)
#import re
#def p(x): " ".join(re.findall(r'pgrb(\d\d):', x))
#b = [x for x in v.urls_to_files.values()]
#print(" ",i,b)
if last_time != line:
print(str(d.time())+ line)
last_time = line
sys.stderr.write("early abort for testing\n")
sys.exit(1)
with tempfile.TemporaryDirectory() as example_dir:
test_date = dt.datetime(2019,5,27, 12,0,0)
first = csppfetch.DownloadStatistics()
sst.download_for_time(test_date, example_dir, download_stats = first)
os.path.exists(example_dir+"/avhrr-only-v2.20190527_preliminary.nc")
print("First download:",first)
second = csppfetch.DownloadStatistics()
sst.download_for_time(test_date, example_dir, download_stats = second)
print("Second download:",second)
return 0
if __name__ == '__main__':
sys.exit(main())
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment