Skip to content
Snippets Groups Projects
Unverified Commit 77c0e749 authored by David Hoese's avatar David Hoese
Browse files

Add more tests to metobs api and other reorganization

parent caa4899e
Branches
No related tags found
No related merge requests found
......@@ -84,8 +84,12 @@ def handle_symbols(symbols):
def handle_influxdb_result(result, symbols, interval):
frames = []
for si, (req_syms, influx_symbs) in symbols.items():
data_points = result.get_points('metobs_' + interval, tags={'site': si[0], 'inst': si[1]})
frame = pd.DataFrame(data_points, columns=['time'] + influx_symbs)
columns = ['time'] + influx_symbs
if not result:
frame = pd.DataFrame(columns=columns)
else:
data_points = result.get_points('metobs_' + interval, tags={'site': si[0], 'inst': si[1]})
frame = pd.DataFrame(data_points, columns=['time'] + influx_symbs)
frame.set_index('time', inplace=True)
frame.fillna(value=np.nan, inplace=True)
if influx_symbs[-1] == 'wind_north':
......@@ -117,9 +121,10 @@ def calc_num_records(begin, end, interval):
return diff / data_responses.INTERVALS[interval]
def handle_csv(frame, symbols, epoch, sep=',', **kwargs):
output = """# status: success
# code: 200
def handle_csv(frame, symbols, epoch, sep=',',
message='', code=200, status='success', **kwargs):
output = """# status: {status}
# code: {code:d}
# message: {message}
# num_results: {num_results:d}
# fields: {epoch_str},{symbol_list}
......@@ -140,22 +145,25 @@ def handle_csv(frame, symbols, epoch, sep=',', **kwargs):
epoch_str = data_responses.epoch_translation[epoch] + ' since epoch (1970-01-01 00:00:00)'
output = output.format(
message='',
status=status,
code=code,
message=message,
num_results=frame.shape[0],
epoch_str=epoch_str,
symbol_list=sep.join(symbols),
symbol_data="\n".join(data_lines),
)
return Response(output, mimetype='text/csv')
return Response(output, mimetype='text/csv'), code
@as_json_p(optional=True)
def handle_json(frame, symbols, epoch, order='columns', **kwargs):
def handle_json(frame, symbols, epoch, order='columns',
message='', code=200, status='success', **kwargs):
output = {}
output['status'] = 'success'
output['message'] = ''
output['code'] = 200
output['status'] = status
output['message'] = message
output['code'] = code
output['num_results'] = frame.shape[0]
package = {
......@@ -175,17 +183,18 @@ def handle_json(frame, symbols, epoch, order='columns', **kwargs):
package['data'] = [frame.iloc[i].values for i in range(frame.shape[0])]
# package['data'] = frame.values
output['results'] = package
return output
return output, code
def handle_xml(frame, symbols, epoch, sep=',', **kwargs):
def handle_xml(frame, symbols, epoch, sep=',',
message='', code=200, status='success', **kwargs):
doc = Document()
header = 'metobs'
head = doc.createElement(header)
head.setAttribute('status', 'success')
head.setAttribute('code', '200')
head.setAttribute('message', '')
head.setAttribute('status', status)
head.setAttribute('code', str(code))
head.setAttribute('message', message)
head.setAttribute('num_results', str(frame.shape[0]))
head.setAttribute('seperator', sep)
......@@ -223,7 +232,7 @@ def handle_xml(frame, symbols, epoch, sep=',', **kwargs):
# txt = doc.toprettyxml(indent=" ", encoding="utf-8")
txt = doc.toxml(encoding="utf-8")
return Response(txt, mimetype='text/xml')
return Response(txt, mimetype='text/xml'), code
def handle_error(fmt, error_str, stream_id=None):
......@@ -271,9 +280,6 @@ def modify_data(fmt, begin, end, site, inst, symbols, interval,
elif interval not in data_responses.INTERVALS:
return handle_error(fmt, 'bad_interval')
if calc_num_records(begin, end, interval) > data_responses.RESPONSES_LIMIT:
return [False, 413]
if site and inst:
# shorthand for symbols that all use the same site and inst
symbols = ["{}.{}.{}".format(site, inst, s) for s in symbols.split(':')]
......@@ -288,12 +294,24 @@ def modify_data(fmt, begin, end, site, inst, symbols, interval,
except ValueError as e:
return handle_error(fmt, str(e))
queries = build_queries(site, inst, influx_symbols, begin, end, interval)
result = query(queries, epoch)
if calc_num_records(begin, end, interval) > data_responses.RESPONSES_LIMIT:
message = "Request will return too many values, please use files API"
code = 413
status = 'fail'
result = None
else:
message = ""
code = 200
status = 'success'
queries = build_queries(site, inst, influx_symbols, begin, end, interval)
result = query(queries, epoch)
frame = handle_influxdb_result(result, influx_symbols, interval)
# order the resulting symbols the way the user requested
# assume time is the first column
frame = frame[symbols]
handler = RESPONSE_HANDLERS[fmt]
return handler(frame, symbols, epoch, sep=sep, order=order)
return handler(frame, symbols, epoch,
sep=sep, order=order,
status=status, code=code, message=message)
......@@ -6,6 +6,7 @@ from io import StringIO
import pandas as pd
from flask import render_template, jsonify, Response
from flask_json import as_json_p
from metobsapi.util import file_responses
from metobsapi.util.error_handlers import ERROR_HANDLERS
......@@ -172,6 +173,7 @@ def handle_bat(frame):
return output.getvalue()
@as_json_p(optional=True)
def handle_json(frame):
output = {}
output['status'] = 'success'
......@@ -219,21 +221,28 @@ RESPONSE_HANDLERS = {
def find_stream_files(fmt, begin_time, end_time, dates, streams):
if fmt not in RESPONSE_HANDLERS:
return render_template('400.html', format=fmt), 400
if not streams:
return handle_error(fmt, 'missing_streams')
elif isinstance(streams, str):
streams = streams.split(':')
if isinstance(dates, str):
dates = dates.split(':')
try:
dates = handle_begin_end(begin_time, end_time, dates)
except (TypeError, ValueError):
return handle_error(fmt, 'malformed_string')
return handle_error(fmt, 'malformed_timestamp')
frame = get_data(dates, streams)
if isinstance(frame, tuple):
stream_id, error_type = frame
return handle_error(fmt, error_type, stream_id=stream_id)
try:
handler = RESPONSE_HANDLERS[fmt]
except KeyError:
return render_template('400.html', format=fmt), 400
handler = RESPONSE_HANDLERS[fmt]
res = handler(frame)
if fmt == 'json':
return res
......
......@@ -92,15 +92,17 @@ def get_files(fmt):
begin_time = request.args.get('begin')
end_time = request.args.get('end')
dates = request.args.get('dates')
if dates is not None:
dates = dates.split(':')
streams = request.args.get('streams').split(':')
streams = request.args.get('streams')
return files_api.find_stream_files(fmt, begin_time, end_time, dates, streams)
@app.route('/api/archive/info', methods=['GET'])
def get_archive_info():
return jsonify({'sites': file_responses.ARCHIVE_INFO})
return jsonify({
'code': 200,
'message': '',
'sites': file_responses.ARCHIVE_INFO,
})
if __name__ == '__main__':
app.debug = True
......
import unittest
import tempfile
import shutil
import json
# class TestErrorHandlers(unittest.TestCase):
# def setUp(self):
# import metobsapi
# metobsapi.app.config['TESTING'] = True
# metobsapi.app.config['DEBUG'] = True
# self.app = metobsapi.app.test_client()
#
# def test_internal_server(self):
# with unittest.mock.patch('metobsapi.data_api.modify_data') as m:
# m.side_effect = ValueError("Random Test Error")
# res = self.app.get('/api/data.json')
# pass
class TestArchiveInfo(unittest.TestCase):
@classmethod
def setUpClass(cls):
from metobsapi.util import create_fake_archive, file_responses
from datetime import datetime, timedelta
cls.archive_dir = tempfile.mkdtemp(suffix='_metobsapi_files_test')
# need now for 'recent' queries
now = datetime.utcnow()
cls._datetimes = [now, now - timedelta(days=1), now - timedelta(days=2)]
create_fake_archive(file_responses.ARCHIVE_INFO, root=cls.archive_dir, datetimes=cls._datetimes)
# import subprocess
# subprocess.check_call(['/opt/local/bin/tree', cls.archive_dir])
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.archive_dir)
def setUp(self):
import metobsapi
metobsapi.app.config['TESTING'] = True
metobsapi.app.config['ARCHIVE_ROOT'] = self.archive_dir
self.app = metobsapi.app.test_client()
def tearDown(self):
pass
def test_archive_info(self):
res = self.app.get('/api/archive/info')
res = json.loads(res.data.decode())
self.assertEqual(res['code'], 200)
self.assertIn('sites', res)
if __name__ == "__main__":
unittest.main()
\ No newline at end of file
unittest.main()
......@@ -37,6 +37,7 @@ def fake_data(interval, symbols, num_vals):
class TestDataAPI(unittest.TestCase):
def setUp(self):
metobsapi.app.config['TESTING'] = True
metobsapi.app.config['DEBUG'] = True
self.app = metobsapi.app.test_client()
def test_doc(self):
......@@ -92,6 +93,14 @@ class TestDataAPI(unittest.TestCase):
self.assertIn('\'site\'', res['message'])
self.assertIn('\'inst\'', res['message'])
def test_too_many_points(self):
res = self.app.get('/api/data.json?symbols=aoss.tower.air_temp&begin=1970-01-01T00:00:00')
self.assertEqual(res.status_code, 413)
res = json.loads(res.data.decode())
self.assertIn('too many values', res['message'])
self.assertEqual(res['code'], 413)
self.assertEqual(res['status'], 'fail')
@mock.patch('metobsapi.data_api.query')
def test_shorthand_one_symbol_json_row(self, query_func):
r = fake_data('1m', {('aoss', 'tower'): ['time', 'air_temp']}, 9)
......
import metobsapi
from metobsapi.util import ProductFrequency, file_responses
import unittest
import tempfile
import shutil
......@@ -9,7 +8,7 @@ import json
class TestFilesAPI(unittest.TestCase):
@classmethod
def setUpClass(cls):
from metobsapi.util import create_fake_archive
from metobsapi.util import create_fake_archive, file_responses
from datetime import datetime, timedelta
cls.archive_dir = tempfile.mkdtemp(suffix='_metobsapi_files_test')
# need now for 'recent' queries
......@@ -25,6 +24,7 @@ class TestFilesAPI(unittest.TestCase):
def setUp(self):
metobsapi.app.config['TESTING'] = True
metobsapi.app.config['DEBUG'] = True
metobsapi.app.config['ARCHIVE_ROOT'] = self.archive_dir
self.app = metobsapi.app.test_client()
......@@ -35,6 +35,22 @@ class TestFilesAPI(unittest.TestCase):
res = self.app.get('/api/files')
assert b'File Request Application' in res.data
def test_bad_format(self):
res = self.app.get('/api/files.fake')
self.assertIn(b'No data file format', res.data)
def test_missing_streams(self):
res = self.app.get('/api/files.json')
self.assertIn(b'stream', res.data)
def test_bad_begin(self):
res = self.app.get('/api/files.json?streams=test&begin=bad')
self.assertIn(b'timestamp', res.data)
def test_bad_symbol(self):
res = self.app.get('/api/files.json?streams=test')
self.assertIn(b'stream', res.data)
def test_tower_daily_ascii_csv(self):
res = self.app.get('/api/files.csv?streams=aoss.tower.ascii.l00.*')
fn = bytes(self._datetimes[0].strftime('rig_tower.%Y-%m-%d.ascii'), encoding='utf-8')
......
......@@ -70,7 +70,6 @@ api_version = 1.0
RESPONSES_LIMIT = 50000
ERROR_MESSAGES = {
# FIXME: These are the file API error messages
'bad_order': (400, '\'order\' can only be \'column\' or \'row\' (default)'),
'bad_epoch': (400, '\'epoch\' can only be unspecified or {}'.format(", ".join(["\'{}\'".format(x) for x in epoch_keys]))),
'bad_interval': (400, '\'interval\' can only be unspecified or {}'.format(", ".join(["\'{}\'".format(x) for x in INTERVALS.keys()]))),
......
......@@ -178,11 +178,11 @@ for site, site_info in ARCHIVE_INFO.items():
ERROR_MESSAGES = {
'datetime_error': (400, 'missing begin or end time parameters'),
'malformed_string': (400, 'could not parse timestamp, check format'),
'malformed_timestamp': (400, 'could not parse timestamp parameters \'begin\' or \'end\', check format'),
'missing_inst': (400, 'missing or unknown inst parameter'),
'missing_site': (400, 'missing or unknown site parameter'),
'missing_level': (400, 'missing or unknown level parameter'),
'missing_stream_pattern': (400, 'missing or unknown stream pattern parameter'),
'missing_streams': (400, 'missing or unknown stream pattern parameter'),
'missing_version': (400, 'missing or unknown version parameter'),
'unknown_stream': (400, "unknown stream ID, expected 'site'.'inst'.'level=lXX'.'pattern'.'versionXX or \"*\"'"),
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment