Skip to content
Snippets Groups Projects
Verified Commit f97c2f9c authored by Owen Graham's avatar Owen Graham
Browse files

Refactor redundant overlay code

parent e1d85783
No related branches found
No related tags found
No related merge requests found
......@@ -11,6 +11,7 @@ import datetime
from io import BytesIO
import itertools
import json
from types import SimpleNamespace
from wsgiref.handlers import CGIHandler
from urllib.request import urlopen
......@@ -158,69 +159,57 @@ def plot():
@app.route('/plot/overlay')
def plot_overlay():
name1 = request.args.get('name1')
year1 = int(request.args.get('year1'))
name2 = request.args.get('name2')
year2 = int(request.args.get('year2'))
num_datasets = 2
datasets = tuple(SimpleNamespace() for _ in range(num_datasets))
for n, dset in enumerate(datasets, start=1):
dset.name = request.args.get(f'name{n}')
dset.year = int(request.args.get(f'year{n}'))
field = int(request.args.get('field'))
units = field_units[field]
json_link1 = get_link(name1, year1) + '.jsonld'
json_link2 = get_link(name2, year2) + '.jsonld'
data1 = read_data(name1, year1, json_link1)
data2 = read_data(name2, year2, json_link2)
def ignore_feb_29(rows):
return [row for row in rows if (row[0].month, row[0].day) != (2, 29)]
data1 = np.array(ignore_feb_29(data1))
data2 = np.array(ignore_feb_29(data2))
for dset in datasets:
dset.json_link = get_link(dset.name, dset.year) + '.jsonld'
raw_data = read_data(dset.name, dset.year, dset.json_link)
dset.data = np.array(ignore_feb_29(raw_data))
fig, axes = plt.subplots()
fig.set_figheight(6)
fig.set_figwidth(12)
axes.plot(data1[:, 0],
data1[:, field],
label=f'{name1} {data1[0][0].year}')
axes.plot(data1[:, 0],
data2[:, field],
alpha=0.6,
label=f'{name2} {data2[0][0].year}')
avg1 = np.nanmean(data1[:, field], dtype='float32')
avg2 = np.nanmean(data2[:, field], dtype='float32')
axes.hlines(y=avg1,
xmin=data1[:, 0][0],
xmax=data1[:, 0][-1],
alpha=0.7,
label=f'Avg {name1} {data1[0][0].year}')
axes.hlines(y=avg2,
xmin=data1[:, 0][0],
xmax=data1[:, 0][-1],
for i, dset in enumerate(datasets):
alpha_kw = {'alpha': 0.6} if i else {}
axes.plot(datasets[0].data[:, 0],
dset.data[:, field],
**alpha_kw,
label=f'{dset.name} {dset.data[0][0].year}')
for dset in datasets:
dset.avg = np.nanmean(dset.data[:, field], dtype='float32')
axes.hlines(y=dset.avg,
xmin=datasets[0].data[:, 0][0],
xmax=datasets[0].data[:, 0][-1],
alpha=0.7,
label=f'Avg {name2} {data2[0][0].year}')
label=f'Avg {dset.name} {dset.data[0][0].year}')
axes.set_ylabel(units)
avg1 = np.nanmean(data1[:, field], dtype='float32')
avg2 = np.nanmean(data2[:, field], dtype='float32')
max1 = max(data1, key=lambda row: row[field])
max2 = max(data2, key=lambda row: row[field])
maximum, max_name = ((max1, name1) if max1[field] > max2[field]
else (max2, name2))
min1 = min(data1, key=lambda row: row[field])
min2 = min(data2, key=lambda row: row[field])
minimum, min_name = ((min1, name1) if min1[field] <= min2[field]
else (min2, name2))
for dset in datasets:
dset.max = max(dset.data, key=lambda row: row[field])
dset.min = min(dset.data, key=lambda row: row[field])
max_dset = max(datasets, key=lambda dset: dset.max[field])
min_dset = min(datasets, key=lambda dset: dset.min[field])
axes.grid(True)
axes.set_title((f'Max {units}: {maximum[field]}, {max_name} Station, '
f'Date: ({maximum[0]}). '
f'Min {units}: {minimum[field]}, {min_name} Station, '
f'Date: ({minimum[0]}).'),
axes.set_title((f'Max {units}: {max_dset.max[field]}, '
f'{max_dset.name} Station, Date: ({max_dset.max[0]}). '
f'Min {units}: {min_dset.min[field]}, '
f'{min_dset.name} Station, Date: ({min_dset.min[0]}).'),
fontsize='small')
axes.legend()
axes.tick_params(labelbottom=False)
plt.suptitle(f'{units} measurements, '
f'{name1} Station, {data1[0][0].year} / '
f'{name2} Station, {data2[0][0].year}')
title_dsets = ' / '.join(f'{dset.name} Station, {dset.data[0][0].year}'
for dset in datasets)
plt.suptitle(f'{units} measurements, {title_dsets}')
buf = BytesIO()
fig.savefig(buf, format='png')
png_data = base64.b64encode(buf.getbuffer()).decode('ascii')
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment