Skip to content
Snippets Groups Projects
Unverified Commit d9564cc1 authored by David Hoese's avatar David Hoese
Browse files

Add quicklook images to file api and move error handlers

parent 9b6fc577
No related branches found
No related tags found
No related merge requests found
......@@ -4,11 +4,10 @@ from xml.dom.minidom import Document
import numpy as np
import pandas as pd
from flask import render_template, jsonify, Response
from flask import render_template, Response
from flask_json import as_json_p
from metobsapi.util import data_responses
from metobsapi.util.error_handlers import ERROR_HANDLERS
from metobsapi.util.query_influx import build_queries, query
LOG = logging.getLogger(__name__)
......@@ -140,11 +139,12 @@ def handle_csv(frame, epoch, sep=',',
data_lines = []
line_format = sep.join(["{time}", "{symbols}"])
for t, row in frame.iterrows():
line = line_format.format(
time=t,
symbols=sep.join(str(x) for x in row.values))
data_lines.append(line)
if frame is not None and not frame.empty:
for t, row in frame.iterrows():
line = line_format.format(
time=t,
symbols=sep.join(str(x) for x in row.values))
data_lines.append(line)
if not epoch:
epoch_str = '%Y-%m-%dT%H:%M:%SZ'
......@@ -156,9 +156,9 @@ def handle_csv(frame, epoch, sep=',',
status=status,
code=code,
message=message,
num_results=frame.shape[0],
num_results=frame.shape[0] if frame is not None else 0,
epoch_str=epoch_str,
symbol_list=sep.join(frame.columns),
symbol_list=sep.join(frame.columns) if frame is not None else '',
symbol_data="\n".join(data_lines),
)
......@@ -168,35 +168,44 @@ def handle_csv(frame, epoch, sep=',',
@as_json_p(optional=True)
def handle_json(frame, epoch, order='columns',
message='', code=200, status='success', **kwargs):
# force conversion to float types so they can be json'd
for column, data_type in zip(frame.columns, frame.dtypes.values):
if issubclass(data_type.type, np.integer):
frame[column] = frame[column].astype(float)
# replace NaNs with None
frame = frame.where(pd.notnull(frame), None)
output = {}
output['status'] = status
output['message'] = message
output['code'] = code
output['num_results'] = frame.shape[0]
package = {
'timestamps': frame.index.values,
}
if epoch:
newStamps = []
for stamp in package['timestamps']:
newStamps.append(float(stamp))
package['timestamps'] = newStamps
if order == 'column':
package['data'] = dict(frame)
package = {}
if frame is not None and not frame.empty:
# force conversion to float types so they can be json'd
for column, data_type in zip(frame.columns, frame.dtypes.values):
if issubclass(data_type.type, np.integer):
frame[column] = frame[column].astype(float)
# replace NaNs with None
frame = frame.where(pd.notnull(frame), None)
package['timestamps'] = frame.index.values
if epoch:
newStamps = []
for stamp in package['timestamps']:
newStamps.append(float(stamp))
package['timestamps'] = newStamps
if order == 'column':
package['data'] = dict(frame)
else:
package['symbols'] = frame.columns
package['data'] = [frame.iloc[i].values for i in range(frame.shape[0])]
# package['data'] = frame.values
else:
package['symbols'] = frame.columns
package['data'] = [frame.iloc[i].values for i in range(frame.shape[0])]
# package['data'] = frame.values
output['results'] = package
package['timestamps'] = []
if order == 'column':
package['data'] = {}
else:
package['data'] = []
package['symbols'] = []
output = {
'status': status,
'message': message,
'code': code,
'num_results': frame.shape[0] if frame is not None else 0,
'results': package,
}
return output, code
......@@ -209,8 +218,9 @@ def handle_xml(frame, epoch, sep=',',
head.setAttribute('status', status)
head.setAttribute('code', str(code))
head.setAttribute('message', message)
head.setAttribute('num_results', str(frame.shape[0]))
head.setAttribute('num_results', str(frame.shape[0]) if frame is not None else str(0))
head.setAttribute('seperator', sep)
data_elem = doc.createElement('data')
doc.appendChild(head)
columns_elem = doc.createElement('symbols')
......@@ -224,38 +234,34 @@ def handle_xml(frame, epoch, sep=',',
time_elem.setAttribute('format', data_responses.epoch_translation[epoch] + ' since epoch (1970-01-01 00:00:00)')
columns_elem.appendChild(time_elem)
for c in frame.columns:
col_elem = doc.createElement('symbol')
col_elem.setAttribute('name', c)
parts = c.split('.')
col_elem.setAttribute('short_name', parts[2])
col_elem.setAttribute('site', parts[0])
col_elem.setAttribute('inst', parts[1])
columns_elem.appendChild(col_elem)
if frame is not None and not frame.empty:
for c in frame.columns:
col_elem = doc.createElement('symbol')
col_elem.setAttribute('name', c)
parts = c.split('.')
col_elem.setAttribute('short_name', parts[2])
col_elem.setAttribute('site', parts[0])
col_elem.setAttribute('inst', parts[1])
columns_elem.appendChild(col_elem)
for idx, (t, row) in enumerate(frame.iterrows()):
row_elem = doc.createElement('row')
row_elem.setAttribute('id', str(idx))
row_elem.appendChild(doc.createTextNode(str(t)))
for point in row:
row_elem.appendChild(doc.createTextNode(sep))
row_elem.appendChild(doc.createTextNode(str(point)))
data_elem.appendChild(row_elem)
head.appendChild(columns_elem)
data_elem = doc.createElement('data')
for idx, (t, row) in enumerate(frame.iterrows()):
row_elem = doc.createElement('row')
row_elem.setAttribute('id', str(idx))
row_elem.appendChild(doc.createTextNode(str(t)))
for point in row:
row_elem.appendChild(doc.createTextNode(sep))
row_elem.appendChild(doc.createTextNode(str(point)))
data_elem.appendChild(row_elem)
head.appendChild(data_elem)
# txt = doc.toprettyxml(indent=" ", encoding="utf-8")
txt = doc.toxml(encoding="utf-8")
return Response(txt, mimetype='text/xml'), code
def handle_error(fmt, error_str):
handler = ERROR_HANDLERS[fmt]
handler = RESPONSE_HANDLERS[fmt]
err_code, err_msg = data_responses.ERROR_MESSAGES.get(error_str, (400, error_str))
res = handler(err_code, err_msg)
if fmt == 'json':
res = jsonify(**res)
res = handler(None, None, message=err_msg, code=err_code, status='error')
return res, err_code
......
......@@ -4,11 +4,10 @@ from datetime import datetime
from datetime import timedelta as delta
import pandas as pd
from flask import render_template, jsonify, Response
from flask import render_template, Response
from flask_json import as_json_p
from metobsapi.util import file_responses
from metobsapi.util.error_handlers import ERROR_HANDLERS
from metobsapi.data_api import handle_date
LOG = logging.getLogger(__name__)
......@@ -92,18 +91,20 @@ def handle_csv(frame, message='', code=200, status='success'):
"""
# Normalize the frame that was given so we only have expected information
frame = frame[columns]
data_lines = []
if not frame.empty:
if frame is not None and not frame.empty:
frame = frame[columns]
data_lines = []
for row in frame.values:
data_lines.append(','.join(str(x) for x in row))
else:
data_lines = []
rows = "\n".join(data_lines)
output = output.format(
status=status,
code=code,
message=message,
num_results=frame.shape[0],
num_results=frame.shape[0] if frame is not None else 0,
column_list=",".join(columns),
rows=rows,
)
......@@ -140,8 +141,8 @@ EOF
status=status,
code=code,
message=message,
num_results=frame.shape[0],
url_list='\n'.join(str(x) for x in frame['url'])
num_results=frame.shape[0] if frame is not None else 0,
url_list='\n'.join(str(x) for x in frame['url']) if frame is not None else '',
)
return Response(output, mimetype='text/plain'), code
......@@ -168,7 +169,7 @@ bitsadmin /monitor
"""
if not frame.empty:
if frame is not None and not frame.empty:
urls = frame['url']
directories = []
commands = []
......@@ -193,12 +194,14 @@ if not exist %cd%\\data\\{directory} (
relpath=frame['relpath'][idx].replace('/', '\\')
)
commands.append(url_str)
else:
commands = []
output = output.format(
status=status,
code=code,
message=message,
num_results=frame.shape[0],
num_results=frame.shape[0] if frame is not None else 0,
commands="\n".join(commands),
)
# windows line endings
......@@ -212,28 +215,19 @@ def handle_json(frame, message='', code=200, status='success'):
output['status'] = status
output['message'] = message
output['code'] = code
output['num_results'] = (len(list(frame.index)))
output['num_results'] = (len(list(frame.index))) if frame is not None else 0
if not frame.empty:
if frame is not None and not frame.empty:
body = []
for row in frame.values:
new_row = dict((k, row[idx]) for idx, k in enumerate(frame.columns))
body.append(new_row)
output['data'] = body
else:
output['data'] = []
return output, code
def handle_error(fmt, error_str, stream_id=None):
handler = ERROR_HANDLERS[fmt]
err_code, err_msg = file_responses.ERROR_MESSAGES[error_str]
if stream_id is not None:
err_msg += ": '{}'".format(stream_id)
res = handler(err_code, err_msg)
if fmt == 'json':
res = jsonify(**res)
return res, err_code
RESPONSE_HANDLERS = {
'csv': handle_csv,
'sh': handle_sh,
......@@ -242,6 +236,15 @@ RESPONSE_HANDLERS = {
}
def handle_error(fmt, error_str, stream_id=None):
handler = RESPONSE_HANDLERS[fmt]
err_code, err_msg = file_responses.ERROR_MESSAGES[error_str]
if stream_id is not None:
err_msg += ": '{}'".format(stream_id)
res = handler(None, message=err_msg, code=err_code, status='error')
return res, err_code
def find_stream_files(fmt, begin_time, end_time, dates, streams):
if fmt not in RESPONSE_HANDLERS:
return render_template('400.html', format=fmt), 400
......
......@@ -122,7 +122,7 @@ class TestFilesAPI(unittest.TestCase):
assert res['data'][0]['filename'] == fn
def test_tower_multi_all_patterns(self):
res = self.app.get('/api/files.json?streams=aoss.tower.*.l00.*:aoss.tower.nc-1d-1m.lb1.v00')
res = self.app.get('/api/files.json?streams=aoss.tower.*.l00.*:aoss.tower.nc-daily.lb1.v00')
res = json.loads(str(res.data, encoding='utf-8'))
fn = self._datetimes[0].strftime('rig_tower.%Y-%m-%d.ascii')
assert res['data'][0]['filename'] == fn
......@@ -131,7 +131,7 @@ class TestFilesAPI(unittest.TestCase):
def test_tower_dates(self):
dates = tuple(dt.strftime('%Y-%m-%d') for dt in self._datetimes[::2])
res = self.app.get('/api/files.json?streams=aoss.tower.nc-1d-1m.lb1.v00&dates={}:{}'.format(*dates))
res = self.app.get('/api/files.json?streams=aoss.tower.nc-daily.lb1.v00&dates={}:{}'.format(*dates))
res = json.loads(str(res.data, encoding='utf-8'))
fn = self._datetimes[0].strftime('aoss_tower.%Y-%m-%d.nc')
assert res['data'][0]['filename'] == fn
......
......@@ -25,6 +25,7 @@ def create_fake_archive(archive_info, root=FAKE_ARCHIVE_PATH, datetimes=None):
if datetimes is None:
datetimes = [datetime.utcnow()]
curr_dir = os.getcwd()
os.makedirs(root, exist_ok=True)
os.chdir(root)
for site, inst_info in archive_info.items():
......@@ -65,4 +66,5 @@ def create_fake_archive(archive_info, root=FAKE_ARCHIVE_PATH, datetimes=None):
os.chdir('..')
os.chdir('..')
os.chdir('..')
os.chdir(curr_dir)
from xml.dom.minidom import Document
def create_xml(code, message):
doc = Document()
header = 'metobs'
head = doc.createElement(header)
head.setAttribute('status', 'error')
head.setAttribute('code', code)
head.setAttribute('message', message)
head.setAttribute('num_results', '0')
doc.appendChild(head)
return doc.toprettyxml(indent=" ", encoding="utf-8")
def create_json(code, message):
json = {}
json['status'] = 'error'
json['code'] = code
json['num_results'] = 0
json['message'] = message
return json
def create_csv(code, message):
returnString = '# status: error<br>'
returnString += '# code: ' + str(code) + '<br>'
returnString += '# num_results: 0<br># message: ' + message
return returnString
def create_bat(code, message):
returnString = ':: status: error<br>'
returnString += ':: code: ' + str(code) + '<br>'
returnString += ':: num_results: 0<br># message: ' + message
return returnString
ERROR_HANDLERS = {
'csv': create_csv,
'xml': create_xml,
'json': create_json,
'bat': create_bat,
}
......@@ -31,16 +31,51 @@ ARCHIVE_INFO = {
'level_b1': {
'versions': ('version_00',),
'products': {
'nc-1mo-1d': {
'nc-monthly': {
'frequency': ProductFrequency.MONTHLY_DIR,
'pattern': 'aoss_tower.%Y-%m.nc',
'display_name': 'Monthly NetCDF file (aoss_tower.YYYY-MM.nc)',
},
'nc-1d-1m': {
'frequency': ProductFrequency.DAILY_DIR,
'nc-daily': {
'frequency': ProductFrequency.DAILY_FILE,
'pattern': 'aoss_tower.%Y-%m-%d.nc',
'display_name': 'Daily NetCDF file (aoss_tower.YYYY-MM-DD.nc)',
},
'meteorogram-daily': {
'frequency': ProductFrequency.DAILY_FILE,
'pattern': 'aoss_tower.meteorogram.%Y-%m-%d*.png',
'display_name': 'Daily Meteorogram (aoss_tower.meteorogram.YYYY-MM-DD.png)',
},
'td-daily': {
'frequency': ProductFrequency.DAILY_FILE,
'pattern': 'aoss_tower.td.%Y-%m-%d*.png',
'display_name': 'Daily Air and Dewpoint Temperature (aoss_tower.td.YYYY-MM-DD.png)',
},
'pressure-daily': {
'frequency': ProductFrequency.DAILY_FILE,
'pattern': 'aoss_tower.pressure.%Y-%m-%d*.png',
'display_name': 'Daily Pressure (aoss_tower.pressure.YYYY-MM-DD.png)',
},
'wind-speed-daily': {
'frequency': ProductFrequency.DAILY_FILE,
'pattern': 'aoss_tower.wind_speed.%Y-%m-%d*.png',
'display_name': 'Daily Wind Speed (aoss_tower.wind_speed.YYYY-MM-DD.png)',
},
'wind-dir-daily': {
'frequency': ProductFrequency.DAILY_FILE,
'pattern': 'aoss_tower.wind_dir.%Y-%m-%d*.png',
'display_name': 'Daily Wind Direction (aoss_tower.wind_dir.YYYY-MM-DD.png)',
},
'accum-precip-daily': {
'frequency': ProductFrequency.DAILY_FILE,
'pattern': 'aoss_tower.accum_precip.%Y-%m-%d*.png',
'display_name': 'Daily Accumulated Precipitation (aoss_tower.accum_precip.YYYY-MM-DD.png)',
},
'solar-flux-daily': {
'frequency': ProductFrequency.DAILY_FILE,
'pattern': 'aoss_tower.solar_flux.%Y-%m-%d*.png',
'display_name': 'Daily Solar Flux (aoss_tower.solar_flux.YYYY-MM-DD.png)',
},
},
},
},
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment