Skip to content
Snippets Groups Projects
Unverified Commit 42cd608e authored by David Hoese's avatar David Hoese
Browse files

Add more tests and initial wind speed/direction handling

parent d07993c6
No related branches found
No related tags found
No related merge requests found
......@@ -7,22 +7,10 @@ import pandas as pd
from flask import render_template, jsonify, Response
from flask_json import as_json_p
from metobsapi.util import data_responses
import math
import numpy as np
LOG = logging.getLogger(__name__)
INTERVALS = {
'1m': 60,
'5m': 60 * 5,
'30m': 60 * 30,
'1h': 60 * 60,
'3h': 60 * 60 * 3,
'6h': 60 * 60 * 6,
'12h': 60 * 60 * 12,
'24h': 60 * 60 * 24,
}
ROUNDING = {
'rel_hum': 0,
'wind_direction': 0,
......@@ -30,7 +18,7 @@ ROUNDING = {
def round_value(value, symbol):
return round(value, ROUNDING.get(symbol, 1))
return np.round(value, ROUNDING.get(symbol, 1))
def handle_date(date):
......@@ -41,7 +29,7 @@ def handle_date(date):
else:
return datetime.strptime(date, "%Y-%m-%dT%H:%M:%S")
except ValueError:
LOG.warning("Malformed date string '%s'", date, exc_info=True)
LOG.warning("Malformed date string '%s'", date)
raise ValueError("Malformed date string '%s'" % (date,))
......@@ -89,41 +77,6 @@ def handle_symbols(symbols):
ret[si][0].extend((None, None))
ret[si][1].extend(('wind_east', 'wind_north'))
# wind_speed = 0
# spd_idx = -1
# wind_dir = 0
# dir_idx = -1
#
# if 'wind_speed' in symbols:
# wind_speed = 1
# spd_idx = symbols.index('wind_speed')
#
# if 'wind_direction' in symbols:
# wind_dir = 1
# dir_idx = symbols.index('wind_direction')
#
# queryParams = []
#
# Check for any unknown symbols (wind_speed and wind_direction are fine)
# for shortForm in symbols:
# if shortForm not in translation:
# if shortForm != 'wind_speed' and shortForm != 'wind_direction':
# return shortForm
# else:
# continue
# queryParams.append(translation[shortForm])
#
# if wind_speed == 1 or wind_dir == 1:
# queryParams.append("wind_east")
# queryParams.append("wind_north")
#
# returnList.append(queryParams)
# returnList.append(wind_speed)
# returnList.append(wind_dir)
# returnList.append(spd_idx)
# returnList.append(dir_idx)
#
# return returnList
return ret
......@@ -139,7 +92,7 @@ def calc_num_records(begin, end, interval):
end = now - end
diff = (end - begin).total_seconds()
return diff / INTERVALS[interval]
return diff / data_responses.INTERVALS[interval]
def handle_csv(frame, symbols, epoch, sep=',', **kwargs):
......@@ -182,7 +135,7 @@ def handle_json(frame, symbols, epoch, order='columns', **kwargs):
output['num_results'] = frame.shape[0]
package = {
'timestamps': frame['time'].values,
'timestamps': frame.index.values,
}
if epoch:
......@@ -195,7 +148,8 @@ def handle_json(frame, symbols, epoch, order='columns', **kwargs):
package['data'] = dict(frame[symbols])
else:
package['symbols'] = symbols
package['data'] = [frame.iloc[i].values[1:] for i in range(frame.shape[0])]
package['data'] = [frame.iloc[i].values for i in range(frame.shape[0])]
# package['data'] = frame.values
output['results'] = package
return output
......@@ -272,107 +226,25 @@ def handle_xml(frame, symbols, epoch, sep=',', **kwargs):
def handle_influxdb_result(result, symbols, interval):
# dataDict = {}
# dataList = list(result.get_points('metobs_' + interval))
#
# for symbol in symbols:
# pairDict = {}
# for dictionary in dataList:
# if symbol in dictionary:
# pairDict[str(dictionary['time'])] = round_value(dictionary[symbol], symbol)
# else:
# pairDict[str(dictionary['time'])] = -99999.
#
# dataDict[symbol] = pairDict
# frame = pd.DataFrame(dataDict)
frames = []
# FIXME: Handle wind_speed and wind_direction
# TODO: Round symbol values
# TODO: Mask invalid values
for si, (req_syms, influx_symbs) in symbols.items():
data_points = result.get_points('metobs_' + interval, tags={'site': si[0], 'inst': si[1]})
frame = pd.DataFrame(data_points, columns=['time'] + influx_symbs)
frame.set_index('time', inplace=True)
if influx_symbs[-1] == 'wind_north':
# remove winds
frame = frame[:, :-2]
frame.columns = ['time'] + req_syms
if 'wind_speed' in frame.columns:
frame['wind_speed'] = round_value(np.hypot(frame['wind_east'], frame['wind_north']), 'wind_speed')
if 'wind_dir' in frame.columns:
frame['wind_direction'] = round_value(np.rad2deg(np.arctan2(frame['wind_east'], frame['wind_north'])), 'wind_direction')
frame = frame.iloc[:, :-2]
frame.columns = req_syms[:-2]
else:
frame.columns = req_syms
frames.append(frame)
# FIXME: Append everything together
frame = frames[0]
# if(spd or wDir):
# wEast = frame['wind_east']
# wNorth = frame['wind_north']
# rows = list(frame.transpose().columns.values)
#
# del frame['wind_east']
# del frame['wind_north']
#
# speed = {}
# direction = {}
#
# row = 0
# wind_dict = {}
#
# for row in rows:
# if spd:
# if(wEast[row] and wNorth[row]):
# speed[row] = round_value(math.sqrt(wEast[row] * wEast[row] + wNorth[row] * wNorth[row]), 'wind_speed')
#
# else:
# if(wEast[row] == 0 and wNorth[row] == 0):
# speed[row] = 0
#
# elif(wEast[row] == 0):
# speed[row] = math.fabs(wNorth[row])
#
# elif(wNorth[row] == 0):
# speed[row] = math.fabs(wEast[row])
#
# else:
# speed[row] = None
#
# if wDir:
# if(wEast[row] and wNorth[row]):
# direction[row] = round_value(math.atan(wNorth[row] / wEast[row]) * 180 / math.pi, 'wind_direction')
# if(direction[row] < 0):
# direction[row] = 360 + direction[row]
#
# else:
# if(wEast[row] == 0 and wNorth[row] == 0):
# direction[row] = 0
#
# elif(wEast[row] == 0):
# if(wNorth[row] > 0):
# direction[row] = 0
# else:
# direction[row] = 180
#
# elif(wNorth[row] == 0):
# if(wEast[row] > 0):
# direction[row] = 90
# else:
# direction[row] = 270
#
# else:
# direction[row] = None
#
# if spd:
# wSSeries = pd.Series(speed)
# ws_dictSeries = {'wind_speed': wSSeries}
# speedDF = pd.DataFrame(ws_dictSeries)
# frames = [frame, speedDF]
# frame = pd.concat(frames, axis=1, join='inner')
#
# if wDir:
# wDSeries = pd.Series(direction)
# wD_dictSeries = {'wind_direction': wDSeries}
# dirDF = pd.DataFrame(wD_dictSeries)
# frames = [frame, dirDF]
# frame = pd.concat(frames, axis=1, join='inner')
#
# frame = frame.transpose()
frame = pd.concat(frames, axis=1, copy=False)
return frame
......@@ -400,27 +272,26 @@ RESPONSE_HANDLERS = {
def modify_data(fmt, begin, end, site, inst, symbols, interval,
sep=',', order='columns', epoch=None):
if fmt not in RESPONSE_HANDLERS:
return handle_error(fmt, 'invalid_format')
try:
# these will be either datetime or timedelta objects
begin = handle_time_string(begin) if begin else None
end = handle_time_string(end) if end else None
except (TypeError, ValueError):
return handle_error(fmt, 'malformed_string')
return handle_error(fmt, 'malformed_timestamp')
if order not in ('column', 'row'):
return handle_error(fmt, 'order')
return handle_error(fmt, 'bad_order')
if epoch and epoch not in data_responses.epoch_translation:
return handle_error(fmt, 'epoch')
if not site:
return handle_error(fmt, 'missing_site')
if not inst:
return handle_error(fmt, 'missing_inst')
return handle_error(fmt, 'bad_epoch')
if not symbols:
return handle_error(fmt, 'missing_symbols')
if not interval:
interval = '1m'
elif interval not in INTERVALS:
return handle_error(fmt, 'invalid_interval')
elif interval not in data_responses.INTERVALS:
return handle_error(fmt, 'bad_interval')
if calc_num_records(begin, end, interval) > data_responses.RESPONSES_LIMIT:
return [False, 413]
......@@ -432,49 +303,18 @@ def modify_data(fmt, begin, end, site, inst, symbols, interval,
# each symbol is fully qualified with site.inst.symbol
symbols = symbols.split(':')
else:
handle_error(fmt, 'both_site_inst')
handle_error(fmt, 'missing_site_inst')
try:
influx_symbols = handle_symbols(symbols)
except ValueError as e:
return handle_error(fmt, str(e.msg))
# symbols = [s[0].split('.')[-1] for s in symbols]
# symbols = unpack[0]
# windSpeed = unpack[1]
# windDirection = unpack[2]
# spd_idx = unpack[3]
# dir_idx = unpack[4]
result = query(site, inst, influx_symbols, begin, end, interval, epoch)
frame = handle_influxdb_result(result, influx_symbols, interval)
#have to alter symbol just a bit
# if(windSpeed or windDirection):
# northIdx = -1
# eastIdx = -1
#
# for idx, symbol in enumerate(symbols):
# if symbol == 'wind_east':
# eastIdx = idx
#
# if symbol == 'wind_north':
# northIdx = idx
#
# if(northIdx > eastIdx):
# del symbols[northIdx]
# del symbols[eastIdx]
#
# else:
# del symbols[eastIdx]
# del symbols[northIdx]
#
# if(windSpeed):
# symbols.insert(spd_idx, 'wind_speed')
#
# if(windDirection):
# symbols.insert(dir_idx,'wind_direction')
# order the resulting symbols the way the user requested
# assume time is the first column
frame = frame[symbols]
try:
handler = RESPONSE_HANDLERS[fmt]
......
......@@ -273,23 +273,6 @@ def handleMultiSites(fmt):
return render_template('404.html'), 404
def handle_single_site(fmt):
begin_time = request.args.get('begin')
end_time = request.args.get('end')
site = request.args.get('site')
inst = request.args.get('inst')
symbols = request.args.get('symbols')
interval = request.args.get('interval')
sep = request.args.get('sep', ',')
order = request.args.get('order', 'row')
epoch = request.args.get('epoch')
result = modifyData.modify_data(fmt, begin_time,
end_time, site, inst, symbols, interval,
sep, order, epoch)
return result
#documentation
@app.route('/api/')
def createIndex():
......@@ -322,13 +305,21 @@ def apply_header(response):
@app.route('/api/data.<fmt>', methods=['GET'])
def get_data(fmt):
sites = request.args.get('site')
begin_time = request.args.get('begin')
end_time = request.args.get('end')
site = request.args.get('site')
inst = request.args.get('inst')
symbols = request.args.get('symbols')
interval = request.args.get('interval')
sep = request.args.get('sep', ',')
order = request.args.get('order', 'row')
epoch = request.args.get('epoch')
if sites or inst:
return handle_single_site(fmt)
else:
return handleMultiSites(fmt)
result = modifyData.modify_data(fmt, begin_time,
end_time, site, inst, symbols, interval,
sep, order, epoch)
return result
@app.route('/api/files.<fmt>', methods=['GET'])
......
......@@ -43,8 +43,57 @@ class TestDataAPI(unittest.TestCase):
res = self.app.get('/api/data')
assert b'Data Request Application' in res.data
def test_bad_format(self):
res = self.app.get('/api/data.fake')
self.assertIn(b'No data file format', res.data)
def test_bad_begin_json(self):
res = self.app.get('/api/data.json?symbols=air_temp&begin=blah')
res = json.loads(res.data.decode())
self.assertEqual(res['code'], 400)
self.assertEqual(res['status'], 'error')
self.assertIn('timestamp', res['message'])
def test_bad_order(self):
res = self.app.get('/api/data.json?order=blah&symbols=air_temp')
res = json.loads(res.data.decode())
self.assertIn('column', res['message'])
self.assertIn('row', res['message'])
def test_bad_epoch(self):
res = self.app.get('/api/data.json?epoch=blah&symbols=air_temp')
res = json.loads(res.data.decode())
self.assertIn('\'h\'', res['message'])
self.assertIn('\'m\'', res['message'])
self.assertIn('\'s\'', res['message'])
self.assertIn('\'u\'', res['message'])
def test_bad_interval(self):
res = self.app.get('/api/data.json?interval=blah&symbols=air_temp')
res = json.loads(res.data.decode())
self.assertIn('\'1m\'', res['message'])
self.assertIn('\'5m\'', res['message'])
self.assertIn('\'1h\'', res['message'])
self.assertIn('\'24h\'', res['message'])
def test_missing_inst(self):
res = self.app.get('/api/data.json?site=X,symbols=air_temp&begin=-05:00:00')
res = json.loads(res.data.decode())
self.assertEqual(res['code'], 400)
self.assertEqual(res['status'], 'error')
self.assertIn('\'site\'', res['message'])
self.assertIn('\'inst\'', res['message'])
def test_missing_site(self):
res = self.app.get('/api/data.json?inst=X,symbols=air_temp&begin=-05:00:00')
res = json.loads(res.data.decode())
self.assertEqual(res['code'], 400)
self.assertEqual(res['status'], 'error')
self.assertIn('\'site\'', res['message'])
self.assertIn('\'inst\'', res['message'])
@mock.patch('metobsapi.modifyData.query')
def test_tower_shorthand_one_symbol_json_row(self, query_func):
def test_shorthand_one_symbol_json_row(self, query_func):
r = fake_data('1m', {('aoss', 'tower'): ['time', 'air_temp']}, 9)
query_func.return_value = r
# row should be the default
......@@ -58,7 +107,7 @@ class TestDataAPI(unittest.TestCase):
self.assertEqual(len(res['results']['data'][0]), 1)
@mock.patch('metobsapi.modifyData.query')
def test_tower_shorthand_one_symbol_json_column(self, query_func):
def test_shorthand_one_symbol_json_column(self, query_func):
r = fake_data('1m', {('aoss', 'tower'): ['time', 'air_temp']}, 9)
query_func.return_value = r
res = self.app.get('/api/data.json?site=aoss&inst=tower&symbols=air_temp&begin=-00:10:00&order=column')
......@@ -68,3 +117,56 @@ class TestDataAPI(unittest.TestCase):
self.assertIn('aoss.tower.air_temp', res['results']['data'])
self.assertEqual(len(res['results']['data']['aoss.tower.air_temp']), 9)
self.assertEqual(len(res['results']['timestamps']), 9)
@mock.patch('metobsapi.modifyData.query')
def test_wind_speed_direction_json(self, query_func):
r = fake_data('1m', {('aoss', 'tower'): ['time', 'wind_speed', 'wind_direction', 'wind_east', 'wind_north']}, 9)
query_func.return_value = r
res = self.app.get('/api/data.json?symbols=aoss.tower.wind_speed:aoss.tower.wind_direction&begin=-00:10:00&order=column')
res = json.loads(res.data.decode())
self.assertEqual(res['code'], 200)
self.assertEqual(res['num_results'], 9)
self.assertIn('aoss.tower.wind_direction', res['results']['data'])
self.assertIn('aoss.tower.wind_speed', res['results']['data'])
self.assertEqual(len(list(res['results']['data'].keys())), 2)
@mock.patch('metobsapi.modifyData.query')
def test_one_symbol_two_insts_json_row(self, query_func):
r = fake_data('1m', {
('aoss', 'tower'): ['time', 'air_temp'],
('mendota', 'buoy'): ['time', 'air_temp'],
}, 9)
query_func.return_value = r
# row should be the default
res = self.app.get('/api/data.json?symbols=aoss.tower.air_temp:mendota.buoy.air_temp&begin=-00:10:00')
res = json.loads(res.data.decode())
self.assertEqual(res['code'], 200)
self.assertEqual(res['num_results'], 9)
self.assertListEqual(res['results']['symbols'], ['aoss.tower.air_temp', 'mendota.buoy.air_temp'])
self.assertEqual(len(res['results']['timestamps']), 9)
self.assertEqual(len(res['results']['data']), 9)
self.assertEqual(len(res['results']['data'][0]), 2)
@mock.patch('metobsapi.modifyData.query')
def test_one_symbol_three_insts_json_row(self, query_func):
r = fake_data('1m', {
('site1', 'inst1'): ['time', 'air_temp'],
('site2', 'inst2'): ['time', 'air_temp'],
('site3', 'inst3'): ['time', 'air_temp'],
}, 9)
query_func.return_value = r
# row should be the default
from metobsapi.util.data_responses import SYMBOL_TRANSLATIONS as st
st = st.copy()
st[('site1', 'inst1')] = st[('aoss', 'tower')]
st[('site2', 'inst2')] = st[('aoss', 'tower')]
st[('site3', 'inst3')] = st[('aoss', 'tower')]
with mock.patch('metobsapi.util.data_responses.SYMBOL_TRANSLATIONS', st):
res = self.app.get('/api/data.json?symbols=site1.inst1.air_temp:site2.inst2.air_temp:site3.inst3.air_temp&begin=-00:10:00')
res = json.loads(res.data.decode())
self.assertEqual(res['code'], 200)
self.assertEqual(res['num_results'], 9)
self.assertListEqual(res['results']['symbols'], ['site1.inst1.air_temp', 'site2.inst2.air_temp', 'site3.inst3.air_temp'])
self.assertEqual(len(res['results']['timestamps']), 9)
self.assertEqual(len(res['results']['data']), 9)
self.assertEqual(len(res['results']['data'][0]), 3)
......@@ -85,6 +85,18 @@ epoch_translation = {
'u': 'microseconds',
'ns': 'nanoseconds'
}
epoch_keys = epoch_translation.keys()
INTERVALS = {
'1m': 60,
'5m': 60 * 5,
'30m': 60 * 30,
'1h': 60 * 60,
'3h': 60 * 60 * 3,
'6h': 60 * 60 * 6,
'12h': 60 * 60 * 12,
'24h': 60 * 60 * 24,
}
resources = 'menodota buoy, and the aoss tower.'
......@@ -145,10 +157,13 @@ ERROR_HANDLERS = {
ERROR_MESSAGES = {
# FIXME: These are the file API error messages
'bad_order': (400, '\'order\' can only be \'column\' or \'row\' (default)'),
'bad_epoch': (400, '\'epoch\' can only be unspecified or {}'.format(", ".join(["\'{}\'".format(x) for x in epoch_keys]))),
'bad_interval': (400, '\'interval\' can only be unspecified or {}'.format(", ".join(["\'{}\'".format(x) for x in INTERVALS.keys()]))),
'datetime_error': (400, 'missing begin or end time parameters'),
'malformed_string': (400, 'could not parse timestamp, check format'),
'missing_inst': (400, 'missing or unknown inst parameter'),
'missing_site': (400, 'missing or unknown site parameter'),
'malformed_timestamp': (400, 'could not parse timestamp parameters \'begin\' or \'end\', check format'),
'missing_symbols': (400, '\'symbols\' must be specified'),
'missing_site_inst': (400, '\'site\' and \'inst\' must both be specified or not at all'),
'unknown_stream': (400, "unknown stream ID, expected 'site'.'inst'.'level=lXX'.'pattern'.'versionXX or \"*\"'"),
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment