Skip to content
Snippets Groups Projects
Unverified Commit caa4899e authored by David Hoese's avatar David Hoese
Browse files

Overall metobs api code style cleanup and reorganization

parent 53e65941
No related branches found
No related tags found
No related merge requests found
......@@ -5,3 +5,10 @@ if 'SECRET_KEY' not in globals():
ARCHIVE_ROOT = '/mnt/inst-data/cache'
ARCHIVE_URL = 'http://metobs.ssec.wisc.edu/pub/cache'
# InfluxDB Settings
INFLUXDB_HOST = 'metobs01'
INFLUXDB_PORT = 8086
INFLUXDB_USER = 'root'
INFLUXDB_PASS = 'root'
INFLUXDB_DB = 'metobs'
import logging
from xml.dom.minidom import Document
from datetime import datetime, timedelta
from metobsapi.queryInflux import query
from xml.dom.minidom import Document
import numpy as np
import pandas as pd
from flask import render_template, jsonify, Response
from flask_json import as_json_p
from metobsapi.util import data_responses
import numpy as np
from metobsapi.util.error_handlers import ERROR_HANDLERS
from metobsapi.util.query_influx import build_queries, query
LOG = logging.getLogger(__name__)
......@@ -77,6 +81,27 @@ def handle_symbols(symbols):
return ret
def handle_influxdb_result(result, symbols, interval):
frames = []
for si, (req_syms, influx_symbs) in symbols.items():
data_points = result.get_points('metobs_' + interval, tags={'site': si[0], 'inst': si[1]})
frame = pd.DataFrame(data_points, columns=['time'] + influx_symbs)
frame.set_index('time', inplace=True)
frame.fillna(value=np.nan, inplace=True)
if influx_symbs[-1] == 'wind_north':
# remove wind components
if 'wind_dir' in frame.columns:
frame['wind_direction'] = np.rad2deg(np.arctan2(frame['wind_east'], frame['wind_north']))
frame = frame.iloc[:, :-2]
frame.columns = req_syms[:-2]
else:
frame.columns = req_syms
frame = frame.round({s: ROUNDING.get(s, 1) for s in frame.columns})
frames.append(frame)
frame = pd.concat(frames, axis=1, copy=False)
return frame
def calc_num_records(begin, end, interval):
now = datetime.utcnow()
if begin is None:
......@@ -201,30 +226,9 @@ def handle_xml(frame, symbols, epoch, sep=',', **kwargs):
return Response(txt, mimetype='text/xml')
def handle_influxdb_result(result, symbols, interval):
frames = []
for si, (req_syms, influx_symbs) in symbols.items():
data_points = result.get_points('metobs_' + interval, tags={'site': si[0], 'inst': si[1]})
frame = pd.DataFrame(data_points, columns=['time'] + influx_symbs)
frame.set_index('time', inplace=True)
frame.fillna(value=np.nan, inplace=True)
if influx_symbs[-1] == 'wind_north':
# remove wind components
if 'wind_dir' in frame.columns:
frame['wind_direction'] = np.rad2deg(np.arctan2(frame['wind_east'], frame['wind_north']))
frame = frame.iloc[:, :-2]
frame.columns = req_syms[:-2]
else:
frame.columns = req_syms
frame = frame.round({s: ROUNDING.get(s, 1) for s in frame.columns})
frames.append(frame)
frame = pd.concat(frames, axis=1, copy=False)
return frame
def handle_error(fmt, error_str, stream_id=None):
try:
handler = data_responses.ERROR_HANDLERS[fmt]
handler = ERROR_HANDLERS[fmt]
except KeyError:
return render_template('400.html', format=fmt), 400
......@@ -284,7 +288,8 @@ def modify_data(fmt, begin, end, site, inst, symbols, interval,
except ValueError as e:
return handle_error(fmt, str(e))
result = query(site, inst, influx_symbols, begin, end, interval, epoch)
queries = build_queries(site, inst, influx_symbols, begin, end, interval)
result = query(queries, epoch)
frame = handle_influxdb_result(result, influx_symbols, interval)
# order the resulting symbols the way the user requested
# assume time is the first column
......
......@@ -8,7 +8,8 @@ import pandas as pd
from flask import render_template, jsonify, Response
from metobsapi.util import file_responses
from metobsapi.modifyData import handle_date
from metobsapi.util.error_handlers import ERROR_HANDLERS
from metobsapi.data_api import handle_date
LOG = logging.getLogger(__name__)
......@@ -72,7 +73,7 @@ def get_data(dates, streams, frame=True):
return data
def handleCSV(frame):
def handle_csv(frame):
columns = ['filename', 'url', 'site', 'inst', 'level', 'version', 'size']
body = StringIO()
......@@ -96,7 +97,7 @@ def handleCSV(frame):
return output.getvalue()
def handleSH(frame):
def handle_sh(frame):
output = StringIO()
output.write('# !/usr/bin/env bash\n')
output.write('# status: success\n# code: 200\n# message:\n')
......@@ -126,8 +127,7 @@ def handleSH(frame):
return output.getvalue()
def handleBAT(frame):
def handle_bat(frame):
output = StringIO()
output.write('@echo off\n\n')
output.write('REM status: success\nREM code: 200\nREM message:\n')
......@@ -172,7 +172,7 @@ def handleBAT(frame):
return output.getvalue()
def handleJSON(frame):
def handle_json(frame):
output = {}
output['status'] = 'success'
output['message'] = ''
......@@ -197,7 +197,7 @@ def handleJSON(frame):
def handle_error(fmt, error_str, stream_id=None):
try:
handler = file_responses.ERROR_HANDLERS[fmt]
handler = ERROR_HANDLERS[fmt]
except KeyError:
return render_template('400.html', format=fmt), 400
......@@ -211,16 +211,16 @@ def handle_error(fmt, error_str, stream_id=None):
RESPONSE_HANDLERS = {
'csv': handleCSV,
'sh': handleSH,
'bat': handleBAT,
'json': handleJSON,
'csv': handle_csv,
'sh': handle_sh,
'bat': handle_bat,
'json': handle_json,
}
def find_stream_files(fmt, beginTime, endTime, dates, streams):
def find_stream_files(fmt, begin_time, end_time, dates, streams):
try:
dates = handle_begin_end(beginTime, endTime, dates)
dates = handle_begin_end(begin_time, end_time, dates)
except (TypeError, ValueError):
return handle_error(fmt, 'malformed_string')
......
# from metobsapi.modifyData import handle_begin_end, handleInterval, siteHandler, instHandler, handle_symbols, handleResult, calc_num_records
from metobsapi.queryInflux import query
from xml.dom.minidom import Document
from io import StringIO
from metobsapi.util import data_responses
def modifyData(fmt, begin, end, site, inst, symbols, interval, sep, callback, epoch):
dates = handle_begin_end(begin, end)
if(dates == 400):
return [False, '400_stamp']
begin = dates[0]
end = dates[1]
if(not inst or not site):
return [False, 500]
site = siteHandler(site)
inst = instHandler(inst)
if(not interval):
interval = '1m'
else:
interval = handleInterval(interval)
if(interval == -1):
return [False, 'interval_error']
if(calc_num_records(begin, end, interval) > data_responses.RESPONSES_LIMIT):
return [False, 413]
unpack = handle_symbols(site, inst, symbols)
if(isinstance(unpack, str)):
return [True, site + '.' + inst + '.' + unpack]
if(isinstance(unpack, int)):
if(unpack == 404):
return [False, 404]
if(not unpack[0]):
return [False, 'unrecognizedSymbol']
symbols = unpack[0]
windSpeed = unpack[1]
windDirection = unpack[2]
spd_idx = unpack[3]
dir_idx = unpack[4]
if(not sep):
sep = ','
result = query(site, inst, symbols, begin, end, interval, epoch)
frame = handleResult(fmt,result, symbols, interval, sep, windSpeed, windDirection)
return [False, frame.transpose()]
def processSymbols(modifiedSymbols):
newSymbols = []
for symbol in modifiedSymbols:
site_inst_list = symbol.split('.')
site = site_inst_list[0]
inst = site_inst_list[1]
actual_symbol = site_inst_list[2]
if(actual_symbol == 'wind_speed'):
newSymbol = 'wind_speed'
elif(actual_symbol == 'wind_direction'):
newSymbol = 'wind_direction'
# if(actual_symbol == 'spd'):
# newSymbol = 'wind_speed'
# elif(actual_symbol == 'dir'):
# newSymbol = 'wind_direction'
else:
if(site == 'aoss' and inst == 'tower'):
newSymbol = data_responses.aoss_translation[actual_symbol]
elif(site == 'mendota' and inst == 'buoy'):
newSymbol = data_responses.buoy_translation[actual_symbol]
newSymbols.append(site + '.' + inst + '.' + newSymbol)
return newSymbols
def process(symbols):
site_inst_obj = {}
for symbol in symbols:
site_inst_list = symbol.split('.')
if(len(site_inst_list) != 3):
return 'unrecognizedSymbol'
site = site_inst_list[0]
inst = site_inst_list[1]
actual_symbol = site_inst_list[2]
#if site isn't in dict, add it
if(site not in site_inst_obj):
site_inst_obj[site] = {inst: [actual_symbol]}
#if the site is in there, but the instrument isn't
elif(inst not in site_inst_obj[site]):
site_inst_obj[site][inst] = [actual_symbol]
# both in there, append symbol
else:
site_inst_obj[site][inst].append(actual_symbol)
return site_inst_obj
def csvReturn(frame, sep, symbols, epoch):
output = StringIO()
frame = frame.transpose()
dates = list(frame.columns.values)
output.write('# status: success<br># code: 200<br># message:<br>')
output.write('# num_results: ' + str(len(dates)) + '<br>')
data = list(frame.iterrows())
if not epoch:
output.write("# Fields: YYYY-MM-DDTHH:MM:SSZ")
else:
output.write("# Fields: " + data_responses.epoch_translation[epoch] + ' since epoch (1970-01-01 00:00:00)')
for symbol in symbols:
output.write(sep + symbol)
output.write('<br/>')
for date in dates:
row = frame[date]
output.write(date)
for symbol in symbols:
output.write(sep + str(row[symbol]))
output.write('<br/>')
return output
def jsonReturn(frame, symbols, jsonFormat, epoch):
output = {}
output['status'] = 'success'
output['message'] = ''
output['code'] = 200
frame = frame.transpose()
output['num_results'] = str(len(list(frame.columns.values)))
package = {}
package['timestamps'] = list(frame.columns.values)
if(epoch):
newStamps = []
for stamp in package['timestamps']:
newStamps.append(float(stamp))
package['timestamps'] = newStamps
if jsonFormat == 'column':
frame = frame.transpose()
data = {}
for symbol in frame:
data[symbol] = frame[symbol]
package['data'] = data
else:
dataList = []
package['symbols'] = symbols
for date in frame:
row = frame[date]
newRow = []
for symbol in symbols:
if(row[symbol]):
newRow.append(float(row[symbol]))
else:
newRow.append(row[symbol])
dataList.append(newRow)
package['data'] = dataList
output['results'] = package
return output
def xmlReturn(frame, symbols, sep, epoch):
doc = Document()
frame = frame.transpose()
if(not sep):
sep = ','
header = 'metobs'
timeStamps = list(frame.columns.values)
head = doc.createElement(header)
head.setAttribute('status', 'success')
head.setAttribute('code', '200')
head.setAttribute('message', '')
head.setAttribute('num_results', str(len(timeStamps)))
head.setAttribute('seperator', sep)
doc.appendChild(head)
stampElt = doc.createElement('timestamp')
if not epoch:
stampElt.setAttribute('format', '%Y-%m-%dT%H:%M:%SZ')
else:
stampElt.setAttribute('format', data_responses.epoch_translation[epoch] + ' since epoch (1970-01-01 00:00:00)')
dateStrings = StringIO()
first = 0
for dateString in timeStamps:
if first == 0:
dateStrings.write(dateString)
first = 1
else:
dateStrings.write(sep + dateString)
stamp_content = doc.createTextNode(dateStrings.getvalue())
stampElt.appendChild(stamp_content)
head.appendChild(stampElt)
frame = frame.transpose()
for symbol in symbols:
first = 0
dataStrings = StringIO()
dataElt = doc.createElement('data')
site_inst_list = symbol.split('.')
site = site_inst_list[0]
inst = site_inst_list[1]
actual_symbol = site_inst_list[2]
dataElt.setAttribute('symbol', actual_symbol)
dataElt.setAttribute('site', site)
dataElt.setAttribute('inst', inst)
dataList = frame[symbol]
for data in dataList:
if first == 0:
dataStrings.write(str(data))
first = 1
else:
dataStrings.write(sep + str(data))
symbol_content = doc.createTextNode(dataStrings.getvalue())
dataElt.appendChild(symbol_content)
head.appendChild(dataElt)
return doc.toprettyxml(indent=" ", encoding="utf-8")
\ No newline at end of file
......@@ -8,7 +8,7 @@ from flask_cors import CORS
from flask_json import FlaskJSON
from enum import Enum
from metobsapi import modifyData, multiData, files
from metobsapi import data_api, files_api
from metobsapi.util import file_responses, data_responses
......@@ -20,6 +20,7 @@ if os.environ.get('METOBSAPI_SETTINGS') is None:
else:
app.config.from_pyfile(os.environ.get('METOBSAPI_SETTINGS'))
# Load json handler and add custom enum encoder
json = FlaskJSON(app)
@json.encoder
......@@ -31,278 +32,42 @@ def enum_encoder(o):
CORS(app, resources=r'/api/*', allow_headers='Content-Type')
def handleUnsupSymbol(symbol, fmt, callback):
message = 'The symbol ' + symbol + ' is not supported. Please try again.'
code = 400
if(fmt == 'json'):
if(callback):
return modifyData.jsonpReturn(data_responses.createJSON(code, message)), 400
else:
return jsonify(data_responses.createJSON(code, message)), 400
elif(fmt == 'csv'):
return data_responses.createCSV(code, message), 400
elif(fmt == 'xml'):
return Response(data_responses.createXML(str(code), message), mimetype='text/xml'), 400
else:
return render_template('400.html', format=fmt), 400
def handleIntError(statusNo, fmt, callback):
if(statusNo == 500):
if(fmt == 'json'):
if(callback):
return modifyData.jsonpReturn(data_responses.json_500), 500
else:
return jsonify(**data_responses.json_500), 500
elif(fmt == 'csv'):
return data_responses.csv_500, 500
elif(fmt == 'xml'):
return Response(data_responses.xml_500, mimetype='text/xml'), 500
elif(statusNo == 400):
return render_template('400.html', format=fmt), 400
elif(statusNo == 404):
if(fmt == 'json'):
if(callback):
return modifyData.jsonpReturn(data_responses.json_404), 404
else:
return jsonify(**data_responses.json_404), 404
if(fmt == 'csv'):
return data_responses.csv_404, 404
if(fmt == 'xml'):
return Response(data_responses.xml_404, mimetype='text/xml'), 404
elif(statusNo == 413):
if(fmt == 'json'):
if(callback):
return modifyData.jsonpReturn(data_responses.json_413), 413
else:
return jsonify(**data_responses.json_413), 413
if(fmt == 'csv'):
return data_responses.csv_413, 413
if(fmt == 'xml'):
return Response(data_responses.xml_413, mimetype='text/xml'), 413
def handleStringError(statusStr, fmt):
if(statusStr == '400_stamp'):
if(fmt == 'json'):
return modifyData.handle_json(data_responses.json_stamp), 400
elif(fmt == 'csv'):
return data_responses.csv_stamp, 400
elif(fmt == 'xml'):
return Response(data_responses.xml_stamp, mimetype='text/xml'), 400
else:
return render_template('400.html', format=fmt), 400
elif(statusStr == 'malformed'):
if(fmt == 'json'):
return modifyData.handle_json(data_responses.json_mal), 400
elif(fmt == 'csv'):
return data_responses.csv_mal, 400
elif(fmt == 'xml'):
return Response(data_responses.xml_mal, mimetype='text/xml'), 400
else:
return render_template('400.html', format=fmt), 400
elif(statusStr == 'unrecognizedSymbol'):
if(fmt == 'json'):
if(callback):
return modifyData.jsonpReturn(data_responses.json_symbol), 400
else:
return jsonify(**data_responses.json_symbol), 400
elif(fmt == 'csv'):
return data_responses.csv_symbol, 400
elif(fmt == 'xml'):
return Response(data_responses.xml_symbol, mimetype='text/xml'), 400
else:
return render_template('400.html', format=fmt), 400
elif(statusStr == 'interval_error'):
if fmt == 'json':
if(callback):
return modifyData.jsonpReturn(data_responses.json_interval), 400
else:
return jsonify(**data_responses.json_interval), 400
elif(fmt == 'csv'):
return data_responses.csv_interval, 400
elif(fmt == 'xml'):
return data_responses.xml_interval, 400
else:
return render_template('400.html', format=fmt), 400
elif(statusStr == 'symbols'):
if(fmt == 'json'):
if(callback):
return modifyData.jsonpReturn(data_responses.json_missingsymbol), 400
else:
return jsonify(**data_responses.json_missingsymbol), 400
elif(fmt == 'csv'):
return data_responses.csv_missingsymbol, 400
elif(fmt == 'xml'):
return Response(data_responses.xml_missingsymbol, mimetype='text/xml'), 400
else:
return render_template('400.html', format=fmt), 400
elif(statusStr == 'epoch'):
if(fmt == 'json'):
if(callback):
return modifyData.jsonpReturn(data_responses.json_epoch), 400
else:
return jsonify(**data_responses.json_epoch), 400
elif(fmt == 'csv'):
return data_responses.csv_epoch, 400
elif(fmt == 'xml'):
return Response(data_responses.xml_epoch, mimetype='text/xml'), 400
else:
return render_template('400.html', format=fmt), 400
elif(statusStr == 'json_format'):
if(fmt == 'json'):
if(callback):
return modifyData.jsonpReturn(data_responses.json_json_format), 400
else:
return jsonify(**data_responses.json_json_format), 400
elif(fmt == 'csv'):
return data_responses.csv_json_format, 400
elif(fmt == 'xml'):
return Response(data_responses.xml_json_format, mimetype='text/xml'), 400
else:
return render_template('400.html', format=fmt), 400
else:
return statusStr
def handleMultiSites(fmt):
beginTime = request.args.get('begin')
endTime = request.args.get('end')
symbols = request.args.get('symbols')
interval = request.args.get('interval')
sep = request.args.get('sep')
callback = request.args.get('callback')
jsonFormat = request.args.get('jsonFormat')
epoch = request.args.get('epoch')
if(jsonFormat != 'column' and jsonFormat != 'row' and not jsonFormat):
handleStringError('json_format', fmt, callback)
if(epoch not in data_responses.epoch_translation and not epoch):
handleStringError('epoch', fmt, callback)
if(not symbols):
return handleStringError('symbols', fmt, callback)
modifiedSymbols = symbols.split(':')
frames = {}
site_inst_symbolObj = multiData.process(modifiedSymbols)
if(isinstance(site_inst_symbolObj, str)):
return handleStringError(site_inst_symbolObj, fmt, callback)
for site in site_inst_symbolObj :
for inst in site_inst_symbolObj[site] :
getSymbols = site_inst_symbolObj[site][inst]
frame = multiData.modifyData(fmt, beginTime, endTime, site, inst, getSymbols, interval, sep, callback, epoch)
unsupSymbol = frame[0]
frame = frame[1]
if(unsupSymbol == True):
return handleUnsupSymbol(frame, fmt, callback)
if(isinstance(frame, int)):
return handleIntError(frame, fmt, callback)
if(isinstance(frame, str)):
return handleStringError(frame, fmt, callback)
frames[site + '.' + inst] = frame
for identity in frames:
frame = frames[identity]
cols = frame.columns.tolist()
for idx, col in enumerate(cols):
col = identity + '.' + col
cols[idx] = col
frame.columns = cols
frames[identity] = frame
mergeFrames = []
for identity in frames:
mergeFrames.append(frames[identity])
frame = pd.concat(mergeFrames, axis=1)
frame = frame.where((pd.notnull(frame)), None)
modifiedSymbols = multiData.processSymbols(modifiedSymbols)
frame = frame[modifiedSymbols]
if(fmt == 'csv'):
if(not sep):
sep = ','
return multiData.csvReturn(frame, sep, modifiedSymbols, epoch).getvalue()
if(fmt == 'json'):
jsonReturn = multiData.jsonReturn(frame, modifiedSymbols, jsonFormat, epoch)
if(callback):
return modifyData.jsonpReturn(jsonReturn)
else:
return jsonify(**jsonReturn)
if(fmt == 'xml'):
return Response(multiData.xmlReturn(frame, modifiedSymbols, sep, epoch), mimetype='text/xml')
else:
return render_template('404.html'), 404
#documentation
@app.route('/api/')
def createIndex():
def index():
"""Main App Documentation"""
return render_template('index.html')
#Documentation
@app.route('/api/files')
def createFilesIndex():
def files_index():
"""Files API Documentation"""
return render_template('files_index.html',
archive_info=file_responses.ARCHIVE_STREAMS,
instrument_streams=file_responses.INSTRUMENT_STREAMS)
#Documentation
@app.route('/api/data')
def createDataIndex():
def data_index():
"""Data API Documentation"""
return render_template('data_index.html')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@app.errorhandler(500)
def internal_server(e):
return render_template('500.html'), 500
@app.after_request
def apply_header(response):
response.headers[data_responses.api_version_header] = data_responses.api_version
return response
@app.route('/api/data.<fmt>', methods=['GET'])
def get_data(fmt):
begin_time = request.args.get('begin')
......@@ -315,7 +80,7 @@ def get_data(fmt):
order = request.args.get('order', 'row')
epoch = request.args.get('epoch')
result = modifyData.modify_data(fmt, begin_time,
result = data_api.modify_data(fmt, begin_time,
end_time, site, inst, symbols, interval,
sep, order, epoch)
......@@ -330,7 +95,7 @@ def get_files(fmt):
if dates is not None:
dates = dates.split(':')
streams = request.args.get('streams').split(':')
return files.find_stream_files(fmt, begin_time, end_time, dates, streams)
return files_api.find_stream_files(fmt, begin_time, end_time, dates, streams)
@app.route('/api/archive/info', methods=['GET'])
......@@ -339,5 +104,4 @@ def get_archive_info():
if __name__ == '__main__':
app.debug = True
app.run('0.0.0.0', threaded=True)
......@@ -34,7 +34,7 @@
Metobs API
</h1>
<h2 id="Data API Documentation">
<a href="{{ url_for('createDataIndex')}}">
<a href="{{ url_for('data_index')}}">
Data API Documentation
</a>
</h2>
......@@ -45,7 +45,7 @@
</p>
<h2 id="#File API Documentation">
<a href="{{ url_for('createFilesIndex')}}">
<a href="{{ url_for('files_index')}}">
Files API Documentation
</a>
</h2>
......
http://localhost:5000/api/data.ascii?symbols=aoss.tower.t:aoss.tower.td:aoss.tower.rh:mendota.buoy.t:mendota.buoy.td:mendota.buoy.rh:aoss.tower.spd&begin=-00:10:00
http://localhost:5000/api/data.json?symbols=aoss.tower.t:aoss.tower.td:aoss.tower.rh:mendota.buoy.t:mendota.buoy.td:mendota.buoy.rh:aoss.tower.spd&begin=-00:10:00&callback=foo
http://localhost:5000/api/data.xml?symbols=aoss.tower.t:aoss.tower.td:aoss.tower.rh:mendota.buoy.t:mendota.buoy.td:mendota.buoy.rh:aoss.tower.spd&begin=-00:10:00
http://localhost:5000/api/data.jsonp?symbols=aoss.tower.t:aoss.tower.td:aoss.tower.rh:mendota.buoy.t:mendota.buoy.td:mendota.buoy.rh:aoss.tower.spd&begin=-00:10:00&callback=foo
http://localhost:5000/api/data.jsonp?site=aoss&inst=tower&symbols=t:td:rh:spd:dir:accum_precip:flux&begin=-00:10:00&callback=foo
http://localhost:5000/api/data.json?site=aoss&inst=tower&symbols=t:td:rh:spd:dir:accum_precip:flux&begin=-00:10:00&callback=foo
http://localhost:5000/api/data.ascii?site=aoss&inst=tower&symbols=t:td:rh:spd:dir:accum_precip:flux&begin=-00:10:00&callback=foo
http://localhost:5000/api/data.xml?site=aoss&inst=tower&symbols=t:td:rh:spd:dir:accum_precip:flux&begin=-00:10:00&callback=foo
//500 error
http://localhost:5000/api/data.xml?site=aoss&symbols=t:td:rh:spd:dir:accum_precip:flux&begin=-00:10:00&callback=foo
//404 error
http://localhost:5000/api/data.xml?site=aoss&inst=%22tower%22&symbols=t:td:rh:spd:dir:accum_precip:flux&begin=-00:10:00&callback=foo
//400 error
http://localhost:5000/api/data.xml?site=aoss&inst=tower&symbols=aoss.t:td:rh:spd:dir:accum_precip:flux&begin=-00:10:00&callback=foo
http://localhost:5000/api/data.jsonp?site=aoss&inst=tower&symbols=t:td:rh:spd:dir:accum_precip:flux&begin=-00:10:00&callback=foo&jsonFormat=column
http://localhost:5000/api/data.json?site=aoss&inst=tower&symbols=t:td:rh:spd:dir:accum_precip:flux&begin=-00:10:00&callback=foo&jsonFormat=column
http://localhost:5000/api/data.json?symbols=aoss.tower.t:aoss.tower.td:aoss.tower.rh:mendota.buoy.t:mendota.buoy.td:mendota.buoy.rh:aoss.tower.spd&begin=-00:10:00&callback=foo&jsonFormat=column
http://localhost:5000/api/data.jsonp?symbols=aoss.tower.t:aoss.tower.td:aoss.tower.rh:mendota.buoy.t:mendota.buoy.td:mendota.buoy.rh:aoss.tower.spd&begin=-00:10:00&callback=foo&jsonFormat=column
http://localhost:5000/api/data.ascii?site=aoss&inst=tower&symbols=t:td:rh:spd:dir:accum_precip:flux&begin=-00:10:00&callback=foo&jsonFormat=column&epoch=ms
http://localhost:5000/api/data.xml?site=aoss&inst=tower&symbols=t:td:rh:spd:dir:accum_precip:flux&begin=-00:10:00&callback=foo&jsonFormat=column&epoch=ms
http://localhost:5000/api/data.ascii?symbols=aoss.tower.t:aoss.tower.td:aoss.tower.rh:mendota.buoy.t:mendota.buoy.td:mendota.buoy.rh:aoss.tower.spd&begin=-00:10:00&callback=foo&jsonFormat=column&epoch=ms
http://localhost:5000/api/data.xml?symbols=aoss.tower.t:aoss.tower.td:aoss.tower.rh:mendota.buoy.t:mendota.buoy.td:mendota.buoy.rh:aoss.tower.spd&begin=-00:10:00&callback=foo&jsonFormat=column&epoch=ms
//new test cases
http://localhost:5000/api/data.ascii?symbols=aoss.tower.air_temp:aoss.tower.dewpoint:aoss.tower.rel_hum:mendota.buoy.air_temp:mendota.buoy.dewpoint:mendota.buoy.rel_hum:aoss.tower.wind_speed&begin=-00:10:00
http://localhost:5000/api/data.json?symbols=aoss.tower.air_temp:aoss.tower.dewpoint:aoss.tower.rel_hum:mendota.buoy.air_temp:mendota.buoy.dewpoint:mendota.buoy.rel_hum:aoss.tower.wind_speed&begin=-00:10:00&callback=foo
http://localhost:5000/api/data.xml?symbols=aoss.tower.air_temp:aoss.tower.dewpoint:aoss.tower.rel_hum:mendota.buoy.air_temp:mendota.buoy.dewpoint:mendota.buoy.rel_hum:aoss.tower.wind_speed&begin=-00:10:00
http://localhost:5000/api/data.jsonp?symbols=aoss.tower.air_temp:aoss.tower.dewpoint:aoss.tower.rel_hum:mendota.buoy.air_temp:mendota.buoy.dewpoint:mendota.buoy.rel_hum:aoss.tower.wind_speed&begin=-00:10:00&callback=foo
http://localhost:5000/api/data.jsonp?site=aoss&inst=tower&symbols=air_temp:dewpoint:rel_hum:wind_speed:wind_direction:accum_precip:solar_flux&begin=-00:10:00&callback=foo
http://localhost:5000/api/data.json?site=aoss&inst=tower&symbols=air_temp:dewpoint:rel_hum:wind_speed:wind_direction:accum_precip:solar_flux&begin=-00:10:00&callback=foo
http://localhost:5000/api/data.ascii?site=aoss&inst=tower&symbols=air_temp:dewpoint:rel_hum:wind_speed:wind_direction:accum_precip:solar_flux&begin=-00:10:00&callback=foo
http://localhost:5000/api/data.xml?site=aoss&inst=tower&symbols=air_temp:dewpoint:rel_hum:wind_speed:wind_direction:accum_precip:solar_flux&begin=-00:10:00&callback=foo
//400 error
http://localhost:5000/api/data.xml?site=aoss&symbols=air_temp:dewpoint:rel_hum:wind_speed:wind_direction:accum_precip:solar_flux&begin=-00:10:00&callback=foo
//404 error
http://localhost:5000/api/data.xml?site=aoss&inst=buoy&symbols=air_temp:dewpoint:rel_hum:wind_speed:wind_direction:accum_precip:solar_flux&begin=-00:10:00&callback=foo
//400 error
http://localhost:5000/api/data.xml?site=aoss&inst=tower&symbols=aoss.air_temp:dewpoint:rel_hum:wind_speed:wind_direction:accum_precip:solar_flux&begin=-00:10:00&callback=foo
http://localhost:5000/api/data.jsonp?site=aoss&inst=tower&symbols=air_temp:dewpoint:rel_hum:wind_speed:wind_direction:accum_precip:solar_flux&begin=-00:10:00&callback=foo&jsonFormat=column
http://localhost:5000/api/data.json?site=aoss&inst=tower&symbols=air_temp:dewpoint:rel_hum:wind_speed:wind_direction:accum_precip:solar_flux&begin=-00:10:00&callback=foo&jsonFormat=column
http://localhost:5000/api/data.json?symbols=aoss.tower.air_temp:aoss.tower.dewpoint:aoss.tower.rel_hum:mendota.buoy.air_temp:mendota.buoy.dewpoint:mendota.buoy.rel_hum:aoss.tower.wind_speed&begin=-00:10:00&callback=foo&jsonFormat=column
http://localhost:5000/api/data.jsonp?symbols=aoss.tower.air_temp:aoss.tower.dewpoint:aoss.tower.rel_hum:mendota.buoy.air_temp:mendota.buoy.dewpoint:mendota.buoy.rel_hum:aoss.tower.wind_speed&begin=-00:10:00&callback=foo&jsonFormat=column
http://localhost:5000/api/data.ascii?site=aoss&inst=tower&symbols=air_temp:dewpoint:rel_hum:wind_speed:wind_direction:accum_precip:solar_flux&begin=-00:10:00&callback=foo&jsonFormat=column&epoch=ms
http://localhost:5000/api/data.xml?site=aoss&inst=tower&symbols=air_temp:dewpoint:rel_hum:wind_speed:wind_direction:accum_precip:solar_flux&begin=-00:10:00&callback=foo&jsonFormat=column&epoch=ms
http://localhost:5000/api/data.ascii?symbols=aoss.tower.air_temp:aoss.tower.dewpoint:aoss.tower.rel_hum:mendota.buoy.air_temp:mendota.buoy.dewpoint:mendota.buoy.rel_hum:aoss.tower.wind_speed&begin=-00:10:00&callback=foo&jsonFormat=column&epoch=ms
http://localhost:5000/api/data.xml?symbols=aoss.tower.air_temp:aoss.tower.dewpoint:aoss.tower.rel_hum:mendota.buoy.air_temp:mendota.buoy.dew_point:mendota.buoy.rel_hum:aoss.tower.wind_speed&begin=-00:10:00&callback=foo&jsonFormat=column&epoch=ms
\ No newline at end of file
......@@ -92,7 +92,7 @@ class TestDataAPI(unittest.TestCase):
self.assertIn('\'site\'', res['message'])
self.assertIn('\'inst\'', res['message'])
@mock.patch('metobsapi.modifyData.query')
@mock.patch('metobsapi.data_api.query')
def test_shorthand_one_symbol_json_row(self, query_func):
r = fake_data('1m', {('aoss', 'tower'): ['time', 'air_temp']}, 9)
query_func.return_value = r
......@@ -106,7 +106,7 @@ class TestDataAPI(unittest.TestCase):
self.assertEqual(len(res['results']['data']), 9)
self.assertEqual(len(res['results']['data'][0]), 1)
@mock.patch('metobsapi.modifyData.query')
@mock.patch('metobsapi.data_api.query')
def test_shorthand_one_symbol_json_column(self, query_func):
r = fake_data('1m', {('aoss', 'tower'): ['time', 'air_temp']}, 9)
query_func.return_value = r
......@@ -118,7 +118,7 @@ class TestDataAPI(unittest.TestCase):
self.assertEqual(len(res['results']['data']['aoss.tower.air_temp']), 9)
self.assertEqual(len(res['results']['timestamps']), 9)
@mock.patch('metobsapi.modifyData.query')
@mock.patch('metobsapi.data_api.query')
def test_wind_speed_direction_json(self, query_func):
r = fake_data('1m', {('aoss', 'tower'): ['time', 'wind_speed', 'wind_direction', 'wind_east', 'wind_north']}, 9)
query_func.return_value = r
......@@ -130,7 +130,7 @@ class TestDataAPI(unittest.TestCase):
self.assertIn('aoss.tower.wind_speed', res['results']['data'])
self.assertEqual(len(list(res['results']['data'].keys())), 2)
@mock.patch('metobsapi.modifyData.query')
@mock.patch('metobsapi.data_api.query')
def test_one_symbol_two_insts_json_row(self, query_func):
r = fake_data('1m', {
('aoss', 'tower'): ['time', 'air_temp'],
......@@ -147,7 +147,7 @@ class TestDataAPI(unittest.TestCase):
self.assertEqual(len(res['results']['data']), 9)
self.assertEqual(len(res['results']['data'][0]), 2)
@mock.patch('metobsapi.modifyData.query')
@mock.patch('metobsapi.data_api.query')
def test_one_symbol_three_insts_json_row(self, query_func):
r = fake_data('1m', {
('site1', 'inst1'): ['time', 'air_temp'],
......@@ -171,7 +171,7 @@ class TestDataAPI(unittest.TestCase):
self.assertEqual(len(res['results']['data']), 9)
self.assertEqual(len(res['results']['data'][0]), 3)
@mock.patch('metobsapi.modifyData.query')
@mock.patch('metobsapi.data_api.query')
def test_one_symbol_csv(self, query_func):
r = fake_data('1m', {('aoss', 'tower'): ['time', 'air_temp']}, 9)
query_func.return_value = r
......@@ -182,7 +182,7 @@ class TestDataAPI(unittest.TestCase):
self.assertEqual(len(res.split('\n')), 5 + 9 + 1)
self.assertIn("# code: 200", res)
@mock.patch('metobsapi.modifyData.query')
@mock.patch('metobsapi.data_api.query')
def test_one_symbol_xml(self, query_func):
from xml.dom.minidom import parseString
r = fake_data('1m', {('aoss', 'tower'): ['time', 'air_temp']}, 9)
......
from xml.dom.minidom import Document
def createXML(code, message):
doc = Document()
header = 'metobs'
head = doc.createElement(header)
head.setAttribute('status', 'error')
head.setAttribute('code', code)
head.setAttribute('message', message)
head.setAttribute('num_results', '0')
doc.appendChild(head)
return doc.toprettyxml(indent=" ", encoding="utf-8")
def createJSON(code, message):
json = {}
json['status'] = 'error'
json['code'] = code
json['num_results'] = 0
json['message'] = message
return json
def createCSV(code, message):
returnString = '# status: error<br>'
returnString += '# code: ' + str(code) + '<br>'
returnString += '# num_results: 0<br># message: ' + message
return returnString
SYMBOL_TRANSLATIONS = {
('aoss', 'tower'): {
'air_temp': 'air_temp',
......@@ -100,63 +65,10 @@ INTERVALS = {
'24h': 60 * 60 * 24,
}
resources = 'menodota buoy, and the aoss tower.'
json_500 = createJSON(400, 'missing site or instrument parameter')
csv_500 = createCSV(400, 'missing site or instrument parameter')
xml_500 = createXML('400', 'missing site or instrument parameter')
json_404 = createJSON(404, 'the resource could not be found - The current resources supported are the ' + resources)
csv_404 = createCSV(404, 'the resource could not be found - The current resources supported are the ' + resources)
xml_404 = createXML('404', 'the resource could not be found - The current resources supported are the ' + resources)
json_stamp = createJSON(400, 'could not parse timestamp, check format')
csv_stamp = createCSV(400, 'could not parse timestamp, check format')
xml_stamp = createXML('400', 'could not parse timestamp, check format')
json_mal = createJSON(400, 'server could not recognize if request is single-site or multi-site request')
csv_mal = createCSV(400, 'server could not recognize if request is single-site or multi-site request')
xml_mal = createXML('400', 'server could not recognize if request is single-site or multi-site request')
json_symbol = createJSON(400, 'server could not recognize symbol')
csv_symbol = createCSV(400, 'server could not recognize symbol')
xml_symbol = createXML('400', 'server could not recognize symbol')
json_413 = createJSON(413, 'too many records requested. Please pick an earlier end date, a later start date, or a bigger interval')
csv_413 = createCSV(413, 'too many records requested. Please pick an earlier end date, a later start date, or a bigger interval')
xml_413 = createXML('413', 'too many records requested. Please pick an earlier end date, a later start date, or a bigger interval')
json_interval = createJSON(400, 'the interval given was not recognized')
csv_interval = createCSV(400, 'the interval given was not recognized')
xml_interval = createXML('400', 'the interval given was not recognized')
json_missingsymbol = createJSON(400, 'missing symbols parameter')
csv_misingsymbol = createCSV(400, 'missing symbols parameter')
xml_missingsymbol = createXML('400', 'missing symbols parameter')
json_unsupported_symbol = createJSON(400, 'missing symbols parameter')
csv__unsupported_symbol = createCSV(400, 'missing symbols parameter')
xml__unsupported_symbol = createXML('400', 'missing symbols parameter')
json_epoch = createJSON(400, 'the epoch value given is not supported - currently only h, m, s, ms, u, and ns are supported')
csv_epoch = createCSV(400, 'the epoch value given is not supported - currently only h, m, s, ms, u, and ns are supported')
xml_epoch = createXML('400', 'the epoch value given is not supported - currently only h, m, s, ms, u, and ns are supported')
json_json_format = createJSON(400, 'the json format given is not supported - currently only row and column are supported')
csv_json_format = createCSV(400, 'the json format given is not supported - currently only row and column are supported')
xml_json_format = createXML('400', 'the json format given is not supported - currently only row and column are supported')
api_version_header = "X-Metobs-API-VERSION"
api_version = 1.0
RESPONSES_LIMIT = 50000
ERROR_HANDLERS = {
'csv': createCSV,
'sh': createCSV,
'json': createJSON,
}
ERROR_MESSAGES = {
# FIXME: These are the file API error messages
'bad_order': (400, '\'order\' can only be \'column\' or \'row\' (default)'),
......
from xml.dom.minidom import Document
def create_xml(code, message):
doc = Document()
header = 'metobs'
head = doc.createElement(header)
head.setAttribute('status', 'error')
head.setAttribute('code', code)
head.setAttribute('message', message)
head.setAttribute('num_results', '0')
doc.appendChild(head)
return doc.toprettyxml(indent=" ", encoding="utf-8")
def create_json(code, message):
json = {}
json['status'] = 'error'
json['code'] = code
json['num_results'] = 0
json['message'] = message
return json
def create_csv(code, message):
returnString = '# status: error<br>'
returnString += '# code: ' + str(code) + '<br>'
returnString += '# num_results: 0<br># message: ' + message
return returnString
def create_bat(code, message):
returnString = ':: status: error<br>'
returnString += ':: code: ' + str(code) + '<br>'
returnString += ':: num_results: 0<br># message: ' + message
return returnString
ERROR_HANDLERS = {
'csv': create_csv,
'xml': create_xml,
'json': create_json,
'bat': create_bat,
}
import os
from collections import defaultdict
from metobsapi.util.data_responses import createCSV, createJSON
from metobsapi.util import ProductFrequency
def createBAT(code, message):
returnString = ':: status: error<br>'
returnString += ':: code: ' + str(code) + '<br>'
returnString += ':: num_results: 0<br># message: ' + message
return returnString
# Directory format for the type of data file frequency
FREQUENCY_DIR_FMT = {
ProductFrequency.DAILY_DIR: os.path.join('%Y', '%m', '%d'),
......@@ -186,14 +176,6 @@ for site, site_info in ARCHIVE_INFO.items():
ARCHIVE_STREAMS[all_patterns_recent_id] = all_products
INSTRUMENT_STREAMS[inst_name].append(all_patterns_recent_id)
ERROR_HANDLERS = {
'csv': createCSV,
'sh': createCSV,
'bat': createBAT,
'json': createJSON,
}
ERROR_MESSAGES = {
'datetime_error': (400, 'missing begin or end time parameters'),
'malformed_string': (400, 'could not parse timestamp, check format'),
......
from influxdb import InfluxDBClient
from io import StringIO
from datetime import datetime, timedelta
from datetime import timedelta
from flask import current_app
QUERY_FORMAT = "SELECT {symbol_list} FROM metobs.one_year.metobs_{interval} WHERE {where_clause} GROUP BY site,inst"
......@@ -14,13 +14,7 @@ def parse_dt(d):
return d.strftime('%Y-%m-%dT%H:%M:%SZ')
def query(site, inst, symbols, begin, end, value, epoch):
host = 'metobs01'
port = 8086
username = 'root'
password = 'root'
DB = 'metobs'
def build_queries(site, inst, symbols, begin, end, value):
if begin is None:
begin = timedelta(minutes=2)
begin = parse_dt(begin)
......@@ -42,11 +36,14 @@ def query(site, inst, symbols, begin, end, value, epoch):
)
queries.append(query)
queries = "; ".join(queries)
return queries
# query = StringIO(query)
print(queries)
client = InfluxDBClient(host, port, username, password, DB)
result = client.query(queries, epoch=epoch)
return result
\ No newline at end of file
def query(query_str, epoch):
client = InfluxDBClient(
current_app.config['INFLUXDB_HOST'],
current_app.config['INFLUXDB_PORT'],
current_app.config['INFLUXDB_USER'],
current_app.config['INFLUXDB_PASS'],
current_app.config['INFLUXDB_DB'])
return client.query(query_str, epoch=epoch)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment