From 2ff638b56ae72e4b6a606cdb28a230d2c3a4802a Mon Sep 17 00:00:00 2001
From: David Hoese <david.hoese@ssec.wisc.edu>
Date: Sat, 25 Feb 2023 15:04:41 -0600
Subject: [PATCH] Add pyproject.toml and .pre-commit-config.yaml

---
 .bandit                               |   3 +
 .git_archival.txt                     |   1 +
 .gitattributes                        |   1 +
 .gitignore                            |   2 +
 .pre-commit-config.yaml               |  49 +++++
 etc/systemd/system/metobs_api.service |   2 +-
 etc/systemd/system/metobs_api.socket  |   2 +-
 etc/tmpfiles.d/metobs_api.conf        |   2 +-
 metobsapi/__init__.py                 |   4 +-
 metobsapi/common_config.py            |  16 +-
 metobsapi/data_api.py                 | 187 ++++++++--------
 metobsapi/files_api.py                | 110 +++++-----
 metobsapi/orderForm/hidden.css        |   2 +-
 metobsapi/orderForm/main.js           | 118 +++++------
 metobsapi/orderForm/order_form.html   |  20 +-
 metobsapi/server.py                   | 128 +++++------
 metobsapi/static/sidebar.css          |   2 +-
 metobsapi/templates/400.html          |   2 +-
 metobsapi/templates/404.html          |   4 +-
 metobsapi/templates/500.html          |   2 +-
 metobsapi/templates/data_index.html   |  58 ++---
 metobsapi/templates/files_index.html  |  54 ++---
 metobsapi/templates/index.html        |  14 +-
 metobsapi/templates/status_index.html |  14 +-
 metobsapi/tests/test_data_api.py      | 282 ++++++++++++------------
 metobsapi/tests/test_files_api.py     | 115 +++++-----
 metobsapi/tests/test_misc.py          |  35 +--
 metobsapi/util/__init__.py            |  45 ++--
 metobsapi/util/data_responses.py      | 141 ++++++------
 metobsapi/util/file_responses.py      | 294 +++++++++++++-------------
 metobsapi/util/query_influx.py        |  22 +-
 metobsapi_development.py              |   1 +
 pyproject.toml                        |  68 ++++++
 setup.cfg                             |   9 +
 setup.py                              |  22 --
 35 files changed, 990 insertions(+), 841 deletions(-)
 create mode 100644 .bandit
 create mode 100644 .git_archival.txt
 create mode 100644 .gitattributes
 create mode 100644 .pre-commit-config.yaml
 create mode 100644 pyproject.toml
 create mode 100644 setup.cfg
 delete mode 100644 setup.py

diff --git a/.bandit b/.bandit
new file mode 100644
index 0000000..9ccd928
--- /dev/null
+++ b/.bandit
@@ -0,0 +1,3 @@
+[bandit]
+skips: B506
+exclude: metobsapi/tests
diff --git a/.git_archival.txt b/.git_archival.txt
new file mode 100644
index 0000000..95cb3ee
--- /dev/null
+++ b/.git_archival.txt
@@ -0,0 +1 @@
+ref-names: $Format:%D$
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000..00a7b00
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1 @@
+.git_archival.txt  export-subst
diff --git a/.gitignore b/.gitignore
index e2ba1a9..439f1e6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -110,3 +110,5 @@ sftp-config.json
 #all DS stores
 *.DS_Store
 .idea/
+
+metobsapi/version.py
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..faf43a6
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,49 @@
+exclude: '^$'
+fail_fast: false
+repos:
+  - repo: https://github.com/psf/black
+    rev: 23.1.0
+    hooks:
+      - id: black
+        language_version: python3
+        args:
+          - --target-version=py310
+  - repo: https://github.com/pycqa/isort
+    rev: 5.12.0
+    hooks:
+      - id: isort
+        language_version: python3
+  - repo: https://github.com/PyCQA/flake8
+    rev: 6.0.0
+    hooks:
+      - id: flake8
+        additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear, mccabe]
+        args: [--max-complexity, "10"]
+  - repo: https://github.com/pre-commit/pre-commit-hooks
+    rev: v4.4.0
+    hooks:
+      - id: trailing-whitespace
+      - id: end-of-file-fixer
+      - id: check-yaml
+  - repo: https://github.com/scop/pre-commit-shfmt
+    rev: v3.6.0-1
+    hooks:
+      # Choose one of:
+      - id: shfmt         # native (requires Go to build)
+        args: ["-i", "4"]
+      #- id: shfmt-docker  # Docker image (requires Docker to run)
+      #
+  - repo: https://github.com/PyCQA/bandit
+    rev: '1.7.4'
+    hooks:
+      - id: bandit
+        args: [--ini, .bandit]
+  - repo: https://github.com/pre-commit/mirrors-mypy
+    rev: 'v1.0.1'  # Use the sha / tag you want to point at
+    hooks:
+      - id: mypy
+        additional_dependencies:
+          - types-docutils
+          - types-pkg-resources
+          - types-PyYAML
+          - types-requests
diff --git a/etc/systemd/system/metobs_api.service b/etc/systemd/system/metobs_api.service
index 2199108..bb7fb11 100644
--- a/etc/systemd/system/metobs_api.service
+++ b/etc/systemd/system/metobs_api.service
@@ -18,4 +18,4 @@ PrivateTmp=true
 Restart=always
 
 [Install]
-WantedBy=multi-user.target
\ No newline at end of file
+WantedBy=multi-user.target
diff --git a/etc/systemd/system/metobs_api.socket b/etc/systemd/system/metobs_api.socket
index 69166bf..dcf1c1e 100644
--- a/etc/systemd/system/metobs_api.socket
+++ b/etc/systemd/system/metobs_api.socket
@@ -6,4 +6,4 @@ Description=Socket used by the MetObs API application
 ListenStream=8090
 
 [Install]
-WantedBy=sockets.target
\ No newline at end of file
+WantedBy=sockets.target
diff --git a/etc/tmpfiles.d/metobs_api.conf b/etc/tmpfiles.d/metobs_api.conf
index e969fae..3dddc72 100644
--- a/etc/tmpfiles.d/metobs_api.conf
+++ b/etc/tmpfiles.d/metobs_api.conf
@@ -1 +1 @@
-d /run/metobs_api 0755 daemon metobsgrp -
\ No newline at end of file
+d /run/metobs_api 0755 daemon metobsgrp -
diff --git a/metobsapi/__init__.py b/metobsapi/__init__.py
index e67a141..e9c169d 100644
--- a/metobsapi/__init__.py
+++ b/metobsapi/__init__.py
@@ -1 +1,3 @@
-from metobsapi.server import app
\ No newline at end of file
+from metobsapi.server import app
+
+from .version import __version__  # noqa
diff --git a/metobsapi/common_config.py b/metobsapi/common_config.py
index 5ae3c3e..63280cb 100644
--- a/metobsapi/common_config.py
+++ b/metobsapi/common_config.py
@@ -1,15 +1,15 @@
 JSONIFY_PRETTYPRINT_REGULAR = False
 
-if 'SECRET_KEY' not in globals():
+if "SECRET_KEY" not in globals():
     # we don't do anything with cookies or sessions, set this somewhere secret in the future
-    SECRET_KEY = 'secret!'
+    SECRET_KEY = "secret!"
 
-ARCHIVE_ROOT = '/data1/cache'
-ARCHIVE_URL = 'http://metobs.ssec.wisc.edu/pub/cache'
+ARCHIVE_ROOT = "/data1/cache"
+ARCHIVE_URL = "http://metobs.ssec.wisc.edu/pub/cache"
 
 # InfluxDB Settings
-INFLUXDB_HOST = 'rain01'
+INFLUXDB_HOST = "rain01"
 INFLUXDB_PORT = 8086
-INFLUXDB_USER = 'root'
-INFLUXDB_PASS = 'root'
-INFLUXDB_DB = 'metobs'
+INFLUXDB_USER = "root"
+INFLUXDB_PASS = "root"
+INFLUXDB_DB = "metobs"
diff --git a/metobsapi/data_api.py b/metobsapi/data_api.py
index 49e682a..a753ebd 100644
--- a/metobsapi/data_api.py
+++ b/metobsapi/data_api.py
@@ -4,7 +4,7 @@ from xml.dom.minidom import Document
 
 import numpy as np
 import pandas as pd
-from flask import render_template, Response
+from flask import Response, render_template
 from flask_json import as_json_p
 
 from metobsapi.util import data_responses
@@ -14,13 +14,13 @@ LOG = logging.getLogger(__name__)
 
 
 ROUNDING = {
-    'aoss.tower.rel_hum': 0,
-    'aoss.tower.wind_direction': 0,
-    'aoss.tower.accum_precip': 2,
-    'mendota.buoy.rel_hum': 0,
-    'mendota.buoy.wind_direction': 0,
-    'mendota.buoy.chlorophyll_ysi': 2,
-    'mendota.buoy.phycocyanin_ysi': 2,
+    "aoss.tower.rel_hum": 0,
+    "aoss.tower.wind_direction": 0,
+    "aoss.tower.accum_precip": 2,
+    "mendota.buoy.rel_hum": 0,
+    "mendota.buoy.wind_direction": 0,
+    "mendota.buoy.chlorophyll_ysi": 2,
+    "mendota.buoy.phycocyanin_ysi": 2,
 }
 
 
@@ -42,11 +42,8 @@ def handle_date(date):
 
 def handle_time_string(date_string):
     if date_string[0] == "-":
-        times = [float(x) for x in date_string[1:].split(':')]
-        diff = timedelta(
-            hours=times[0],
-            minutes=times[1],
-            seconds=times[2])
+        times = [float(x) for x in date_string[1:].split(":")]
+        diff = timedelta(hours=times[0], minutes=times[1], seconds=times[2])
         return diff
     return handle_date(date_string)
 
@@ -62,7 +59,7 @@ def handle_symbols(symbols):
         handled_symbols.add(symbol)
 
         try:
-            site, inst, s = symbol.split('.')
+            site, inst, s = symbol.split(".")
             si = (site, inst)
         except ValueError:
             raise ValueError("Symbols must have 3 period-separated parts: {}".format(symbol))
@@ -85,7 +82,7 @@ def handle_symbols(symbols):
     # Add the symbols needed to compute the wind_speed and wind_direction
     for si in add_winds:
         ret[si][0].extend((None, None))
-        ret[si][1].extend(('wind_east', 'wind_north'))
+        ret[si][1].extend(("wind_east", "wind_north"))
 
     return ret
 
@@ -100,18 +97,20 @@ def handle_influxdb_result(result, symbols, interval):
             # single query statement results in a single ResultSet
             res = result
 
-        columns = ['time'] + influx_symbs
+        columns = ["time"] + influx_symbs
         if not res:
             frame = pd.DataFrame(columns=columns)
         else:
-            data_points = res.get_points('metobs_' + interval, tags={'site': si[0], 'inst': si[1]})
-            frame = pd.DataFrame(data_points, columns=['time'] + influx_symbs)
-        frame.set_index('time', inplace=True)
+            data_points = res.get_points("metobs_" + interval, tags={"site": si[0], "inst": si[1]})
+            frame = pd.DataFrame(data_points, columns=["time"] + influx_symbs)
+        frame.set_index("time", inplace=True)
         frame.fillna(value=np.nan, inplace=True)
         # remove wind components
-        if influx_symbs[-1] == 'wind_north' and 'wind_direction' in frame.columns:
-            frame['wind_direction'] = np.rad2deg(np.arctan2(frame['wind_east'], frame['wind_north']))
-            frame['wind_direction'] = frame['wind_direction'].where(frame['wind_direction'] > 0, frame['wind_direction'] + 360.)
+        if influx_symbs[-1] == "wind_north" and "wind_direction" in frame.columns:
+            frame["wind_direction"] = np.rad2deg(np.arctan2(frame["wind_east"], frame["wind_north"]))
+            frame["wind_direction"] = frame["wind_direction"].where(
+                frame["wind_direction"] > 0, frame["wind_direction"] + 360.0
+            )
             frame = frame.iloc[:, :-2]
             frame.columns = req_syms[:-2]
         else:
@@ -143,8 +142,7 @@ def calc_file_size(num_records, num_streams):
     return num_records * num_streams * 7.0
 
 
-def handle_csv(frame, epoch, sep=',',
-               message='', code=200, status='success', **kwargs):
+def handle_csv(frame, epoch, sep=",", message="", code=200, status="success", **kwargs):
     output = """# status: {status}
 # code: {code:d}
 # message: {message}
@@ -157,15 +155,13 @@ def handle_csv(frame, epoch, sep=',',
     line_format = sep.join(["{time}", "{symbols}"])
     if frame is not None and not frame.empty:
         for t, row in frame.iterrows():
-            line = line_format.format(
-                time=t,
-                symbols=sep.join(str(x) for x in row.values))
+            line = line_format.format(time=t, symbols=sep.join(str(x) for x in row.values))
             data_lines.append(line)
 
     if not epoch:
-        epoch_str = '%Y-%m-%dT%H:%M:%SZ'
+        epoch_str = "%Y-%m-%dT%H:%M:%SZ"
     else:
-        epoch_str = data_responses.epoch_translation[epoch] + ' since epoch (1970-01-01 00:00:00)'
+        epoch_str = data_responses.epoch_translation[epoch] + " since epoch (1970-01-01 00:00:00)"
 
     output = output.format(
         sep=sep,
@@ -174,18 +170,17 @@ def handle_csv(frame, epoch, sep=',',
         message=message,
         num_results=frame.shape[0] if frame is not None else 0,
         epoch_str=epoch_str,
-        symbol_list=sep.join(frame.columns) if frame is not None else '',
+        symbol_list=sep.join(frame.columns) if frame is not None else "",
         symbol_data="\n".join(data_lines),
     )
 
-    res = Response(output, mimetype='text/csv')
-    res.headers.set('Content-Disposition', 'attachment', filename='data.csv')
+    res = Response(output, mimetype="text/csv")
+    res.headers.set("Content-Disposition", "attachment", filename="data.csv")
     return res, code
 
 
 @as_json_p(optional=True)
-def handle_json(frame, epoch, order='columns',
-                message='', code=200, status='success', **kwargs):
+def handle_json(frame, epoch, order="columns", message="", code=200, status="success", **kwargs):
     package = {}
 
     if frame is not None and not frame.empty:
@@ -196,75 +191,74 @@ def handle_json(frame, epoch, order='columns',
         # replace NaNs with None
         frame = frame.where(pd.notnull(frame), None)
 
-        package['timestamps'] = frame.index.values
+        package["timestamps"] = frame.index.values
         if epoch:
             newStamps = []
-            for stamp in package['timestamps']:
+            for stamp in package["timestamps"]:
                 newStamps.append(float(stamp))
-            package['timestamps'] = newStamps
+            package["timestamps"] = newStamps
 
-        if order == 'column':
-            package['data'] = dict(frame)
+        if order == "column":
+            package["data"] = dict(frame)
         else:
-            package['symbols'] = frame.columns
-            package['data'] = [frame.iloc[i].values for i in range(frame.shape[0])]
+            package["symbols"] = frame.columns
+            package["data"] = [frame.iloc[i].values for i in range(frame.shape[0])]
             # package['data'] = frame.values
     else:
-        package['timestamps'] = []
-        if order == 'column':
-            package['data'] = {}
+        package["timestamps"] = []
+        if order == "column":
+            package["data"] = {}
         else:
-            package['data'] = []
-            package['symbols'] = []
+            package["data"] = []
+            package["symbols"] = []
 
     output = {
-        'status': status,
-        'message': message,
-        'code': code,
-        'num_results': frame.shape[0] if frame is not None else 0,
-        'results': package,
+        "status": status,
+        "message": message,
+        "code": code,
+        "num_results": frame.shape[0] if frame is not None else 0,
+        "results": package,
     }
     return output, code
 
 
-def handle_xml(frame, epoch, sep=',',
-               message='', code=200, status='success', **kwargs):
+def handle_xml(frame, epoch, sep=",", message="", code=200, status="success", **kwargs):
     doc = Document()
-    header = 'metobs'
+    header = "metobs"
 
     head = doc.createElement(header)
-    head.setAttribute('status', status)
-    head.setAttribute('code', str(code))
-    head.setAttribute('message', message)
-    head.setAttribute('num_results', str(frame.shape[0]) if frame is not None else str(0))
-    head.setAttribute('seperator', sep)
-    data_elem = doc.createElement('data')
+    head.setAttribute("status", status)
+    head.setAttribute("code", str(code))
+    head.setAttribute("message", message)
+    head.setAttribute("num_results", str(frame.shape[0]) if frame is not None else str(0))
+    head.setAttribute("seperator", sep)
+    data_elem = doc.createElement("data")
 
     doc.appendChild(head)
-    columns_elem = doc.createElement('symbols')
+    columns_elem = doc.createElement("symbols")
 
-    time_elem = doc.createElement('symbol')
-    time_elem.setAttribute('name', 'time')
-    time_elem.setAttribute('short_name', 'time')
+    time_elem = doc.createElement("symbol")
+    time_elem.setAttribute("name", "time")
+    time_elem.setAttribute("short_name", "time")
     if not epoch:
-        time_elem.setAttribute('format', '%Y-%m-%dT%H:%M:%SZ')
+        time_elem.setAttribute("format", "%Y-%m-%dT%H:%M:%SZ")
     else:
-        time_elem.setAttribute('format', data_responses.epoch_translation[epoch] + ' since epoch (1970-01-01 00:00:00)')
+        time_elem.setAttribute("format", data_responses.epoch_translation[epoch] + " since epoch (1970-01-01 00:00:00)")
     columns_elem.appendChild(time_elem)
 
     if frame is not None and not frame.empty:
         for c in frame.columns:
-            col_elem = doc.createElement('symbol')
-            col_elem.setAttribute('name', c)
-            parts = c.split('.')
-            col_elem.setAttribute('short_name', parts[2])
-            col_elem.setAttribute('site', parts[0])
-            col_elem.setAttribute('inst', parts[1])
+            col_elem = doc.createElement("symbol")
+            col_elem.setAttribute("name", c)
+            parts = c.split(".")
+            col_elem.setAttribute("short_name", parts[2])
+            col_elem.setAttribute("site", parts[0])
+            col_elem.setAttribute("inst", parts[1])
             columns_elem.appendChild(col_elem)
 
         for idx, (t, row) in enumerate(frame.iterrows()):
-            row_elem = doc.createElement('row')
-            row_elem.setAttribute('id', str(idx))
+            row_elem = doc.createElement("row")
+            row_elem.setAttribute("id", str(idx))
             row_elem.appendChild(doc.createTextNode(str(t)))
             for point in row:
                 row_elem.appendChild(doc.createTextNode(sep))
@@ -273,58 +267,57 @@ def handle_xml(frame, epoch, sep=',',
     head.appendChild(columns_elem)
     head.appendChild(data_elem)
     txt = doc.toxml(encoding="utf-8")
-    res = Response(txt, mimetype='text/xml')
-    res.headers.set('Content-Disposition', 'attachment', filename='data.xml')
+    res = Response(txt, mimetype="text/xml")
+    res.headers.set("Content-Disposition", "attachment", filename="data.xml")
     return res, code
 
 
 def handle_error(fmt, error_str):
     handler = RESPONSE_HANDLERS[fmt]
     err_code, err_msg = data_responses.ERROR_MESSAGES.get(error_str, (400, error_str))
-    res = handler(None, None, message=err_msg, code=err_code, status='error')
+    res = handler(None, None, message=err_msg, code=err_code, status="error")
     return res
 
 
 RESPONSE_HANDLERS = {
-    'csv': handle_csv,
-    'xml': handle_xml,
-    'json': handle_json,
+    "csv": handle_csv,
+    "xml": handle_xml,
+    "json": handle_json,
 }
 
 
-def modify_data(fmt, begin, end, site, inst, symbols, interval,
-                sep=',', order='columns', epoch=None):
+def modify_data(fmt, begin, end, site, inst, symbols, interval, sep=",", order="columns", epoch=None):
     if fmt not in RESPONSE_HANDLERS:
-        return render_template('400.html', format=fmt), 400
+        return render_template("400.html", format=fmt), 400
 
     try:
         # these will be either datetime or timedelta objects
         begin = handle_time_string(begin) if begin else None
         end = handle_time_string(end) if end else None
     except (TypeError, ValueError):
-        return handle_error(fmt, 'malformed_timestamp')
+        return handle_error(fmt, "malformed_timestamp")
 
-    if order not in ('column', 'row'):
-        return handle_error(fmt, 'bad_order')
+    if order not in ("column", "row"):
+        return handle_error(fmt, "bad_order")
     if epoch and epoch not in data_responses.epoch_translation:
-        return handle_error(fmt, 'bad_epoch')
+        return handle_error(fmt, "bad_epoch")
     if not symbols:
-        return handle_error(fmt, 'missing_symbols')
+        return handle_error(fmt, "missing_symbols")
     if not interval:
-        interval = '1m'
+        interval = "1m"
     elif interval not in data_responses.INTERVALS:
-        return handle_error(fmt, 'bad_interval')
+        return handle_error(fmt, "bad_interval")
 
     if site and inst:
         # shorthand for symbols that all use the same site and inst
-        short_symbols = symbols.split(':')
+        short_symbols = symbols.split(":")
         symbols = ["{}.{}.{}".format(site, inst, s) for s in short_symbols]
     elif not site and not inst:
         # each symbol is fully qualified with site.inst.symbol
         short_symbols = None
-        symbols = symbols.split(':')
+        symbols = symbols.split(":")
     else:
-        return handle_error(fmt, 'missing_site_inst')
+        return handle_error(fmt, "missing_site_inst")
 
     try:
         influx_symbols = handle_symbols(symbols)
@@ -334,12 +327,12 @@ def modify_data(fmt, begin, end, site, inst, symbols, interval,
     if calc_num_records(begin, end, interval) > data_responses.RESPONSES_LIMIT:
         message = "Request will return too many values, please use files API"
         code = 413
-        status = 'fail'
+        status = "fail"
         result = None
     else:
         message = ""
         code = 200
-        status = 'success'
+        status = "success"
         queries = build_queries(influx_symbols, begin, end, interval)
         result = query(queries, epoch)
 
@@ -351,6 +344,4 @@ def modify_data(fmt, begin, end, site, inst, symbols, interval,
         frame.columns = short_symbols
 
     handler = RESPONSE_HANDLERS[fmt]
-    return handler(frame, epoch,
-                   sep=sep, order=order,
-                   status=status, code=code, message=message)
+    return handler(frame, epoch, sep=sep, order=order, status=status, code=code, message=message)
diff --git a/metobsapi/files_api.py b/metobsapi/files_api.py
index bfae336..92d813a 100644
--- a/metobsapi/files_api.py
+++ b/metobsapi/files_api.py
@@ -1,14 +1,14 @@
-import os
 import logging
+import os
 from datetime import datetime
 from datetime import timedelta as delta
 
 import pandas as pd
-from flask import render_template, Response
+from flask import Response, render_template
 from flask_json import as_json_p
 
-from metobsapi.util import file_responses
 from metobsapi.data_api import handle_date
+from metobsapi.util import file_responses
 
 LOG = logging.getLogger(__name__)
 
@@ -21,7 +21,7 @@ def handle_begin_end(begin, end, dates):
     if not end:
         end = now
         end = end.replace(hour=0, minute=0, second=0, microsecond=0)
-    elif end[0] == '-':
+    elif end[0] == "-":
         end = int(end[1:])
         # FIXME: Can a relative time be more complex than just days?
         end = now - delta(days=end)
@@ -32,7 +32,7 @@ def handle_begin_end(begin, end, dates):
     if not begin:
         begin = now
         begin = begin.replace(hour=0, minute=0, second=0, microsecond=0)
-    elif begin[0] == '-':
+    elif begin[0] == "-":
         # begin is now days
         begin = int(begin[1:])
         # FIXME: Can a relative time be more complex than just days?
@@ -46,13 +46,14 @@ def handle_begin_end(begin, end, dates):
 
 def get_data(dates, streams, frame=True):
     from flask import current_app as app
+
     data = []
 
     for stream_id in streams:
         stream_info = file_responses.ARCHIVE_STREAMS.get(stream_id)
         if stream_info is None:
             # an error occurred
-            return stream_id, 'unknown_stream'
+            return stream_id, "unknown_stream"
 
         if isinstance(stream_info, (list, tuple)):
             # special wildcard stream_id
@@ -60,21 +61,21 @@ def get_data(dates, streams, frame=True):
             data.extend(res)
             continue
 
-        relpath = stream_info['relpath']
+        relpath = stream_info["relpath"]
         f_set = set()
         for dt in dates:
             path = dt.strftime(relpath)
-            pathname = os.path.join(app.config['ARCHIVE_ROOT'], path)
+            pathname = os.path.join(app.config["ARCHIVE_ROOT"], path)
             if os.path.exists(pathname) and pathname not in f_set:
                 f_set.add(pathname)
                 file_info = stream_info.copy()
-                file_info['filename'] = os.path.basename(pathname)
-                file_info['url'] = os.path.join(app.config['ARCHIVE_URL'], path)
-                file_info['size'] = os.stat(pathname).st_size
-                file_info['relpath'] = path
-                if file_info['thumbnail'] is not None:
-                    file_info['thumbnail'] = dt.strftime(file_info['thumbnail'])
-                    file_info['thumbnail'] = os.path.join(app.config['ARCHIVE_URL'], file_info['thumbnail'])
+                file_info["filename"] = os.path.basename(pathname)
+                file_info["url"] = os.path.join(app.config["ARCHIVE_URL"], path)
+                file_info["size"] = os.stat(pathname).st_size
+                file_info["relpath"] = path
+                if file_info["thumbnail"] is not None:
+                    file_info["thumbnail"] = dt.strftime(file_info["thumbnail"])
+                    file_info["thumbnail"] = os.path.join(app.config["ARCHIVE_URL"], file_info["thumbnail"])
                 data.append(file_info)
 
     if frame:
@@ -83,8 +84,8 @@ def get_data(dates, streams, frame=True):
         return data
 
 
-def handle_csv(frame, message='', code=200, status='success'):
-    columns = ['filename', 'url', 'site', 'inst', 'level', 'version', 'size']
+def handle_csv(frame, message="", code=200, status="success"):
+    columns = ["filename", "url", "site", "inst", "level", "version", "size"]
     output = """# status: {status}
 # code: {code:d}
 # message: {message}
@@ -98,7 +99,7 @@ def handle_csv(frame, message='', code=200, status='success'):
         frame = frame[columns]
         data_lines = []
         for row in frame.values:
-            data_lines.append(','.join(str(x) for x in row))
+            data_lines.append(",".join(str(x) for x in row))
     else:
         data_lines = []
     rows = "\n".join(data_lines)
@@ -112,12 +113,12 @@ def handle_csv(frame, message='', code=200, status='success'):
         rows=rows,
     )
 
-    res = Response(output, mimetype='text/csv')
-    res.headers.set('Content-Disposition', 'attachment', filename='files.csv')
+    res = Response(output, mimetype="text/csv")
+    res.headers.set("Content-Disposition", "attachment", filename="files.csv")
     return res, code
 
 
-def handle_sh(frame, message='', code=200, status='success'):
+def handle_sh(frame, message="", code=200, status="success"):
     output = """#!/usr/bin/env bash
 # status: {status}
 # code: {code:d}
@@ -147,14 +148,14 @@ EOF
         code=code,
         message=message,
         num_results=frame.shape[0] if frame is not None else 0,
-        url_list='\n'.join(str(x) for x in frame['url']) if frame is not None else '',
+        url_list="\n".join(str(x) for x in frame["url"]) if frame is not None else "",
     )
-    res = Response(output, mimetype='text/plain')
-    res.headers.set('Content-Disposition', 'attachment', filename='files.sh')
+    res = Response(output, mimetype="text/plain")
+    res.headers.set("Content-Disposition", "attachment", filename="files.sh")
     return res, code
 
 
-def handle_bat(frame, message='', code=200, status='success'):
+def handle_bat(frame, message="", code=200, status="success"):
     output = """@echo off
 
 REM status: {status}
@@ -177,12 +178,12 @@ bitsadmin /monitor
 """
 
     if frame is not None and not frame.empty:
-        urls = frame['url']
+        urls = frame["url"]
         directories = []
         commands = []
         for idx, url in enumerate(urls):
-            relpath = frame['relpath'][idx]
-            directory = os.path.dirname(relpath).replace('/', '\\')
+            relpath = frame["relpath"][idx]
+            directory = os.path.dirname(relpath).replace("/", "\\")
 
             if directory not in directories:
                 directories.append(directory)
@@ -190,16 +191,15 @@ bitsadmin /monitor
 if not exist %cd%\\data\\{directory} (
     mkdir %cd%\\{directory}
 )
-""".format(directory=directory)
+""".format(
+                    directory=directory
+                )
                 commands.append(dir_str)
             commands.append("bitsadmin /create myDownloadJob\n")
 
         for idx, url in enumerate(urls):
             url_str = "bitsadmin /addfile myDownloadJob {url} %cd%\\data\\{relpath}"
-            url_str = url_str.format(
-                url=url,
-                relpath=frame['relpath'][idx].replace('/', '\\')
-            )
+            url_str = url_str.format(url=url, relpath=frame["relpath"][idx].replace("/", "\\"))
             commands.append(url_str)
     else:
         commands = []
@@ -212,36 +212,36 @@ if not exist %cd%\\data\\{directory} (
         commands="\n".join(commands),
     )
     # windows line endings
-    output = output.replace('\n', '\r\n')
-    res = Response(output, mimetype='text/plain')
-    res.headers.set('Content-Disposition', 'attachment', filename='files.bat')
+    output = output.replace("\n", "\r\n")
+    res = Response(output, mimetype="text/plain")
+    res.headers.set("Content-Disposition", "attachment", filename="files.bat")
     return res, code
 
 
 @as_json_p(optional=True)
-def handle_json(frame, message='', code=200, status='success'):
+def handle_json(frame, message="", code=200, status="success"):
     output = {}
-    output['status'] = status
-    output['message'] = message
-    output['code'] = code
-    output['num_results'] = (len(list(frame.index))) if frame is not None else 0
+    output["status"] = status
+    output["message"] = message
+    output["code"] = code
+    output["num_results"] = (len(list(frame.index))) if frame is not None else 0
 
     if frame is not None and not frame.empty:
         body = []
         for row in frame.values:
             new_row = dict((k, row[idx]) for idx, k in enumerate(frame.columns))
             body.append(new_row)
-        output['data'] = body
+        output["data"] = body
     else:
-        output['data'] = []
+        output["data"] = []
     return output, code
 
 
 RESPONSE_HANDLERS = {
-    'csv': handle_csv,
-    'sh': handle_sh,
-    'bat': handle_bat,
-    'json': handle_json,
+    "csv": handle_csv,
+    "sh": handle_sh,
+    "bat": handle_bat,
+    "json": handle_json,
 }
 
 
@@ -250,34 +250,34 @@ def handle_error(fmt, error_str, stream_id=None):
     err_code, err_msg = file_responses.ERROR_MESSAGES[error_str]
     if stream_id is not None:
         err_msg += ": '{}'".format(stream_id)
-    res = handler(None, message=err_msg, code=err_code, status='error')
+    res = handler(None, message=err_msg, code=err_code, status="error")
     return res, err_code
 
 
 def find_stream_files(fmt, begin_time, end_time, dates, streams):
     if fmt not in RESPONSE_HANDLERS:
-        return render_template('400.html', format=fmt), 400
+        return render_template("400.html", format=fmt), 400
 
     if not streams:
-        return handle_error(fmt, 'missing_streams')
+        return handle_error(fmt, "missing_streams")
     elif isinstance(streams, str):
-        streams = streams.split(':')
+        streams = streams.split(":")
 
     if isinstance(dates, str):
-        dates = dates.split(':')
+        dates = dates.split(":")
 
     try:
         dates = handle_begin_end(begin_time, end_time, dates)
     except (TypeError, ValueError):
-        return handle_error(fmt, 'malformed_timestamp')
+        return handle_error(fmt, "malformed_timestamp")
 
     frame = get_data(dates, streams)
     if isinstance(frame, tuple):
         stream_id, error_type = frame
         return handle_error(fmt, error_type, stream_id=stream_id)
 
-    message = ''
-    status = 'success'
+    message = ""
+    status = "success"
     code = 200
     handler = RESPONSE_HANDLERS[fmt]
     return handler(frame, message=message, status=status, code=code)
diff --git a/metobsapi/orderForm/hidden.css b/metobsapi/orderForm/hidden.css
index 775a0e8..28d37fb 100644
--- a/metobsapi/orderForm/hidden.css
+++ b/metobsapi/orderForm/hidden.css
@@ -1,3 +1,3 @@
 .hidden {
 	display: none
-}
\ No newline at end of file
+}
diff --git a/metobsapi/orderForm/main.js b/metobsapi/orderForm/main.js
index 0a6636d..a3e4d67 100644
--- a/metobsapi/orderForm/main.js
+++ b/metobsapi/orderForm/main.js
@@ -5,29 +5,29 @@ var instruments = ['aoss_tower', 'mendota_buoy', 'aoss_aeri'];
 
 var inst_levels = {'aoss_tower': ['l00', 'la1'], 'mendota_buoy': ['l00', 'la1'], 'aoss_aeri': ['l00']};
 
-var streams = 
+var streams =
 {
-	'aoss_tower': 
+	'aoss_tower':
 	{
-	    'l00': ['ascii'], 
+	    'l00': ['ascii'],
 	    'la1': ['nc']
-	}, 
-	'mendota_buoy': 
+	},
+	'mendota_buoy':
 	{
-		'l00': ['ascii'], 
+		'l00': ['ascii'],
 		'la1': ['nc']
-	}, 
-    'aoss_aeri': 
+	},
+    'aoss_aeri':
     {
-    	'l00': 
+    	'l00':
     	[
-    	    'par', 
-    	    'qc', 
-    	    'sum', 
-    	    'b1_cxs', 
-    	    'b1_uvs', 
-    	    'b2_cxs', 
-    	    'b2_uvs', 
+    	    'par',
+    	    'qc',
+    	    'sum',
+    	    'b1_cxs',
+    	    'b1_uvs',
+    	    'b2_cxs',
+    	    'b2_uvs',
     	    'c1_rnc',
     	    'c2_rnc',
     	    'f1_rnc',
@@ -38,7 +38,7 @@ var streams =
             'y_scr'
     	]
     }
-}    
+}
 
 module.exports.instruments = instruments;
 module.exports.inst_levels = inst_levels;
@@ -85,7 +85,7 @@ var LevelDiv = React.createClass(
          	checkboxes.push(React.createElement(Checkbox, {key: this.props.levels[i], level: this.props.levels[i]}));
         }
 
-        return React.createElement("div", {id: "checkboxes"}, " ", checkboxes, " ") 	
+        return React.createElement("div", {id: "checkboxes"}, " ", checkboxes, " ")
 	}
 })
 
@@ -99,26 +99,26 @@ var fileDiv = React.createClass(
             checkboxes.push(React.createElement(Checkbox, {key: this.props.streams[i], level: this.props.streams[i]}));
         }
 
-        return React.createElement("div", {id: "files"}, " ", checkboxes, " ")     
+        return React.createElement("div", {id: "files"}, " ", checkboxes, " ")
     }
-})  
+})
 
 var levelStore = stores.levelStore;
 
 //when store recieves signal, update component
-levelStore.on('add', function(instName) 
+levelStore.on('add', function(instName)
 {
     levelStore.addLevels(constants.inst_levels[instName]);
-    
+
     var levels = levelStore.getLevels();
 
     ReactDOM.render(React.createElement(LevelDiv, {levels: levels}), document.getElementById('levels'));
 });
 
-levelStore.on('remove', function(instName) 
+levelStore.on('remove', function(instName)
 {
     levelStore.removeLevels(constants.inst_levels[instName]);
-    
+
     var levels = levelStore.getLevels();
 
     ReactDOM.render(React.createElement(LevelDiv, {levels: levels}), document.getElementById('levels'));
@@ -127,19 +127,19 @@ levelStore.on('remove', function(instName)
 var streamStore = stores.fileStore
 
 //when store recieves signal, update component
-streamStore.on('add', function(names) 
+streamStore.on('add', function(names)
 {
     streamStore.addNames(names);
-    
+
     var newNames = streamStore.getNames();
 
     ReactDOM.render(React.createElement(fileDiv, {streams: newNames}), document.getElementById('streams'));
 });
 
-streamStore.on('remove', function(names) 
+streamStore.on('remove', function(names)
 {
     streamStore.removeNames(names);
-    
+
     var names = streamStore.getNames();
 
     ReactDOM.render(React.createElement(fileDiv, {streams: names}), document.getElementById('streams'));
@@ -269,7 +269,7 @@ function removeStream(level)
             }
         }
     }
-    
+
     $('#' + level).unbind('click');
     $('#' + level).click(function() {createStream(level);});
 }
@@ -293,7 +293,7 @@ function createStream(level)
             }
         }
     }
-    
+
     $('#' + level).unbind('click');
     $('#' + level).click(function() {removeStream(level);});
 }
@@ -315,7 +315,7 @@ function dispatchAdd(instName)
 
     $('#checkboxes').find(':checkbox').each(function () {
         $(this).unbind('click');
-        $(this).click(function() 
+        $(this).click(function()
             {
                 createStream($(this).attr('id'));
             });
@@ -375,7 +375,7 @@ function check(levels, instruments, startDate, endDate, format, streams)
 	}
 }
 
-levelStore.on('check', function() 
+levelStore.on('check', function()
 {
     var levels = [];
 	$('#checkboxes input:checked').each(function () {
@@ -433,7 +433,7 @@ levelStore.on('check', function()
 
         if(levels.length <= 0)
         {
-            informUser += product 
+            informUser += product
         }
 
         if(instrument.length <= 0)
@@ -465,19 +465,19 @@ var ee = require('event-emitter');
 //creates checkbox store
 var levelEmitter = ee(
 {
-    init() 
+    init()
     {
         //keeps trace of where each plot is
         this.levels = [];
-        this.levelCounter = {};  
+        this.levelCounter = {};
     },
 
-    getLevels() 
+    getLevels()
     {
         return this.levels
     },
 
-    addLevels(levelArray) 
+    addLevels(levelArray)
     {
         for(var i = 0; i < levelArray.length; i++)
         {
@@ -536,26 +536,26 @@ var levelEmitter = ee(
             {
                 this.emit(action.status);
             }
-        }    
+        }
     }
 });
 
 //creates checkbox store
 var fileEmitter = ee(
 {
-    init() 
+    init()
     {
         //keeps trace of where each plot is
         this.names = [];
-        this.nameCounter = {};  
+        this.nameCounter = {};
     },
 
-    getNames() 
+    getNames()
     {
         return this.names;
     },
 
-    addNames(filenames) 
+    addNames(filenames)
     {
         for(var i = 0; i < filenames.length; i++)
         {
@@ -606,7 +606,7 @@ var fileEmitter = ee(
         if(action.store == 'file')
         {
             this.emit(action.status, action.names);
-        }    
+        }
     }
 });
 
@@ -1147,7 +1147,7 @@ module.exports.Dispatcher = require('./lib/Dispatcher');
  * of patent rights can be found in the PATENTS file in the same directory.
  *
  * @providesModule Dispatcher
- * 
+ *
  * @preventMunge
  */
 
@@ -29919,7 +29919,7 @@ module.exports = HTMLDOMPropertyConfig;
  * of patent rights can be found in the PATENTS file in the same directory.
  *
  * @providesModule KeyEscapeUtils
- * 
+ *
  */
 
 'use strict';
@@ -37608,7 +37608,7 @@ module.exports = ReactEventListener;
  * of patent rights can be found in the PATENTS file in the same directory.
  *
  * @providesModule ReactFeatureFlags
- * 
+ *
  */
 
 'use strict';
@@ -39062,7 +39062,7 @@ module.exports = ReactMultiChildUpdateTypes;
  * of patent rights can be found in the PATENTS file in the same directory.
  *
  * @providesModule ReactNodeTypes
- * 
+ *
  */
 
 'use strict';
@@ -40287,7 +40287,7 @@ module.exports = ReactServerRenderingTransaction;
  * of patent rights can be found in the PATENTS file in the same directory.
  *
  * @providesModule ReactServerUpdateQueue
- * 
+ *
  */
 
 'use strict';
@@ -43172,7 +43172,7 @@ module.exports = ViewportMetrics;
  * of patent rights can be found in the PATENTS file in the same directory.
  *
  * @providesModule accumulateInto
- * 
+ *
  */
 
 'use strict';
@@ -43232,7 +43232,7 @@ module.exports = accumulateInto;
  * of patent rights can be found in the PATENTS file in the same directory.
  *
  * @providesModule adler32
- * 
+ *
  */
 
 'use strict';
@@ -43682,7 +43682,7 @@ module.exports = findDOMNode;
  * of patent rights can be found in the PATENTS file in the same directory.
  *
  * @providesModule flattenChildren
- * 
+ *
  */
 
 'use strict';
@@ -43745,7 +43745,7 @@ module.exports = flattenChildren;
  * of patent rights can be found in the PATENTS file in the same directory.
  *
  * @providesModule forEachAccumulated
- * 
+ *
  */
 
 'use strict';
@@ -44042,7 +44042,7 @@ module.exports = getHostComponentFromComposite;
  * of patent rights can be found in the PATENTS file in the same directory.
  *
  * @providesModule getIteratorFn
- * 
+ *
  */
 
 'use strict';
@@ -44505,7 +44505,7 @@ module.exports = isEventSupported;
  * of patent rights can be found in the PATENTS file in the same directory.
  *
  * @providesModule isTextInputElement
- * 
+ *
  */
 
 'use strict';
@@ -44625,7 +44625,7 @@ module.exports = quoteAttributeValueForBrowser;
  * of patent rights can be found in the PATENTS file in the same directory.
  *
  * @providesModule reactProdInvariant
- * 
+ *
  */
 'use strict';
 
@@ -45603,7 +45603,7 @@ module.exports = camelizeStyleName;
  * LICENSE file in the root directory of this source tree. An additional grant
  * of patent rights can be found in the PATENTS file in the same directory.
  *
- * 
+ *
  */
 
 var isTextNode = require('./isTextNode');
@@ -45858,7 +45858,7 @@ module.exports = createNodesFromMarkup;
  * LICENSE file in the root directory of this source tree. An additional grant
  * of patent rights can be found in the PATENTS file in the same directory.
  *
- * 
+ *
  */
 
 function makeEmptyFunction(arg) {
@@ -46423,7 +46423,7 @@ module.exports = mapObject;
  * LICENSE file in the root directory of this source tree. An additional grant
  * of patent rights can be found in the PATENTS file in the same directory.
  *
- * 
+ *
  * @typechecks static-only
  */
 
@@ -46511,7 +46511,7 @@ module.exports = performanceNow;
  * of patent rights can be found in the PATENTS file in the same directory.
  *
  * @typechecks
- * 
+ *
  */
 
 /*eslint-disable no-self-compare */
diff --git a/metobsapi/orderForm/order_form.html b/metobsapi/orderForm/order_form.html
index 7ead33f..b9c77ba 100644
--- a/metobsapi/orderForm/order_form.html
+++ b/metobsapi/orderForm/order_form.html
@@ -5,7 +5,7 @@
 	<link rel="stylesheet" href="http://ajax.googleapis.com/ajax/libs/jqueryui/1.11.3/themes/smoothness/jquery-ui.css" type="text/css">
 	<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/jquery-timepicker/1.8.10/jquery.timepicker.min.css" type="text/css">
     <link rel="stylesheet" href="hidden.css" type="text/css">
-</head>    
+</head>
 
 <body>
     <div id="main" class="container-fluid">
@@ -26,13 +26,13 @@
                     	    </div>
                     	    <div class="col-md-6" style="float: right">
 						        &nbsp End Date: &nbsp  <input id="End-date-picker" type="text"/>
-						    </div>    
+						    </div>
 				        </p>
                     </div>
                 </div>
             </div>
-        </div>    
-                
+        </div>
+
         <div class="row" id="row2">
             <div class="col-md-8 col-md-offset-2">
                 <div class="panel panel-default">
@@ -85,7 +85,7 @@
         </div>
         <div class="row" id="row3">
             <div class="col-md-8 col-md-offset-2">
-                <p>Format: 
+                <p>Format:
                     <select id="format">
                         <option selected>--</option>
                         <option> CSV </option>
@@ -98,17 +98,17 @@
                     <select id='version'>
                         <option selected>Most Recent</option>
                         <option>v00</option>
-                    </select>    
+                    </select>
                 </p>
             </div>
-        </div>            
+        </div>
         <div class="row" id="row4">
             <div class="col-md-8 col-md-offset-2">
                 <button class="btn btn-default" id="submit">Submit</button>
             </div>
         </div>
-    </div> 
+    </div>
 
-    <script src="main.js"></script>   
+    <script src="main.js"></script>
 </body>
-</html>
\ No newline at end of file
+</html>
diff --git a/metobsapi/server.py b/metobsapi/server.py
index 9b14183..5a95b95 100644
--- a/metobsapi/server.py
+++ b/metobsapi/server.py
@@ -1,30 +1,32 @@
-import os
 import json as builtin_json
 import logging
+import os
 from datetime import datetime
-from urllib.request import urlopen, URLError
+from enum import Enum
+from urllib.request import URLError, urlopen
 
-from flask import Flask, render_template, request, jsonify
+from flask import Flask, jsonify, render_template, request
 from flask_cors import CORS
 from flask_json import FlaskJSON
 
-from enum import Enum
 from metobsapi import data_api, files_api
-from metobsapi.util import file_responses, data_responses
+from metobsapi.util import data_responses, file_responses
 
 LOG = logging.getLogger(__name__)
 
 app = Flask(__name__)
 
 # Load custom configuration file is specified
-if os.environ.get('METOBSAPI_SETTINGS') is None:
-    app.config.from_object('metobsapi.common_config')
+if os.environ.get("METOBSAPI_SETTINGS") is None:
+    app.config.from_object("metobsapi.common_config")
 else:
-    app.config.from_pyfile(os.environ.get('METOBSAPI_SETTINGS'))
+    app.config.from_pyfile(os.environ.get("METOBSAPI_SETTINGS"))
 
 
 # Load json handler and add custom enum encoder
 json = FlaskJSON(app)
+
+
 @json.encoder
 def enum_encoder(o):
     if isinstance(o, Enum):
@@ -32,37 +34,39 @@ def enum_encoder(o):
 
 
 # Allow for cross-domain access to the API using CORS
-CORS(app, resources=r'/api/*', allow_headers='Content-Type')
+CORS(app, resources=r"/api/*", allow_headers="Content-Type")
 
 
-@app.route('/api/')
+@app.route("/api/")
 def index():
     """Main App Documentation"""
-    return render_template('index.html')
+    return render_template("index.html")
 
 
-@app.route('/api/files')
+@app.route("/api/files")
 def files_index():
     """Files API Documentation"""
-    return render_template('files_index.html',
-                           archive_info=file_responses.ARCHIVE_STREAMS,
-                           instrument_streams=file_responses.INSTRUMENT_STREAMS)
+    return render_template(
+        "files_index.html",
+        archive_info=file_responses.ARCHIVE_STREAMS,
+        instrument_streams=file_responses.INSTRUMENT_STREAMS,
+    )
 
 
-@app.route('/api/data')
+@app.route("/api/data")
 def data_index():
     """Data API Documentation"""
-    return render_template('data_index.html')
+    return render_template("data_index.html")
 
 
 @app.errorhandler(404)
 def page_not_found(e):
-    return render_template('404.html'), 404
+    return render_template("404.html"), 404
 
 
 @app.errorhandler(500)
-def	internal_server(e):
-    return render_template('500.html'), 500        
+def internal_server(e):
+    return render_template("500.html"), 500
 
 
 @app.after_request
@@ -71,46 +75,46 @@ def apply_header(response):
     return response
 
 
-@app.route('/api/data.<fmt>', methods=['GET'])
+@app.route("/api/data.<fmt>", methods=["GET"])
 def get_data(fmt):
-    begin_time = request.args.get('begin')
-    end_time = request.args.get('end')
-    site = request.args.get('site')
-    inst = request.args.get('inst')
-    symbols = request.args.get('symbols')
-    interval = request.args.get('interval')
-    sep = request.args.get('sep', ',')
-    order = request.args.get('order', 'row')
-    epoch = request.args.get('epoch')
-
-    result = data_api.modify_data(fmt, begin_time,
-                                  end_time, site, inst, symbols, interval,
-                                  sep, order, epoch)
+    begin_time = request.args.get("begin")
+    end_time = request.args.get("end")
+    site = request.args.get("site")
+    inst = request.args.get("inst")
+    symbols = request.args.get("symbols")
+    interval = request.args.get("interval")
+    sep = request.args.get("sep", ",")
+    order = request.args.get("order", "row")
+    epoch = request.args.get("epoch")
+
+    result = data_api.modify_data(fmt, begin_time, end_time, site, inst, symbols, interval, sep, order, epoch)
 
     return result
 
 
-@app.route('/api/files.<fmt>', methods=['GET'])
+@app.route("/api/files.<fmt>", methods=["GET"])
 def get_files(fmt):
-    begin_time = request.args.get('begin')
-    end_time = request.args.get('end')
-    dates = request.args.get('dates')
-    streams = request.args.get('streams')
+    begin_time = request.args.get("begin")
+    end_time = request.args.get("end")
+    dates = request.args.get("dates")
+    streams = request.args.get("streams")
     return files_api.find_stream_files(fmt, begin_time, end_time, dates, streams)
 
 
-@app.route('/api/archive/info', methods=['GET'])
+@app.route("/api/archive/info", methods=["GET"])
 def get_archive_info():
-    return jsonify({
-        'code': 200,
-        'message': '',
-        'sites': file_responses.ARCHIVE_INFO,
-    })
+    return jsonify(
+        {
+            "code": 200,
+            "message": "",
+            "sites": file_responses.ARCHIVE_INFO,
+        }
+    )
 
 
-@app.route('/api/status', methods=['GET'])
+@app.route("/api/status", methods=["GET"])
 def status_index():
-    return render_template('status_index.html')
+    return render_template("status_index.html")
 
 
 def _status_dict_to_html(response):
@@ -119,20 +123,22 @@ def _status_dict_to_html(response):
 <body>
 {}
 </body>
-</html>""".format(items)
+</html>""".format(
+        items
+    )
 
 
 def _status_render(response, fmt):
-    if fmt == 'json':
+    if fmt == "json":
         return jsonify(response)
     else:
         return _status_dict_to_html(response)
 
 
-@app.route('/api/status/<site>/<inst>.<fmt>', methods=['GET'])
-@app.route('/api/status/<site>/<inst>', methods=['GET'])
-@app.route('/api/status/<site>.<fmt>', methods=['GET'])
-@app.route('/api/status/<site>', methods=['GET'])
+@app.route("/api/status/<site>/<inst>.<fmt>", methods=["GET"])
+@app.route("/api/status/<site>/<inst>", methods=["GET"])
+@app.route("/api/status/<site>.<fmt>", methods=["GET"])
+@app.route("/api/status/<site>", methods=["GET"])
 def get_instrument_status(site, inst=None, fmt=None):
     """See `/api/status/` for more information."""
     # defaults:
@@ -147,8 +153,8 @@ def get_instrument_status(site, inst=None, fmt=None):
 
     if fmt is None:
         fmt = "html"
-    if fmt not in ['html', 'json']:
-        return render_template('400.html', format=fmt), 400
+    if fmt not in ["html", "json"]:
+        return render_template("400.html", format=fmt), 400
 
     if inst is None:
         json_subpath = os.path.join(site, "status.json")
@@ -165,16 +171,16 @@ def get_instrument_status(site, inst=None, fmt=None):
         try:
             json_str = urlopen(json_url).read()
         except URLError:
-            response['status_message'] = "Could not retrieve configured status: {}".format(json_url)
+            response["status_message"] = "Could not retrieve configured status: {}".format(json_url)
             json_str = None
     else:
         base_path = app.config.get("ARCHIVE_ROOT")
         json_path = os.path.join(base_path, json_subpath)
         try:
-            json_str = open(json_path, 'r').read()
+            json_str = open(json_path, "r").read()
             mod_time = datetime.fromtimestamp(os.path.getmtime(json_path))
         except FileNotFoundError:
-            response['status_message'] = "No status information found."
+            response["status_message"] = "No status information found."
             json_str = None
 
     if json_str is None:
@@ -182,11 +188,11 @@ def get_instrument_status(site, inst=None, fmt=None):
         return _status_render(response, fmt)
 
     json_dict = builtin_json.loads(json_str)
-    response['last_updated'] = mod_time.strftime("%Y-%m-%d %H:%M:%SZ")
+    response["last_updated"] = mod_time.strftime("%Y-%m-%d %H:%M:%SZ")
     response.update(json_dict)
     return _status_render(response, fmt)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     app.debug = True
-    app.run('0.0.0.0', threaded=True)
+    app.run("0.0.0.0", threaded=True)
diff --git a/metobsapi/static/sidebar.css b/metobsapi/static/sidebar.css
index 26e3570..98a6df2 100644
--- a/metobsapi/static/sidebar.css
+++ b/metobsapi/static/sidebar.css
@@ -1,3 +1,3 @@
 .fixed {
 	position: fixed;
-}
\ No newline at end of file
+}
diff --git a/metobsapi/templates/400.html b/metobsapi/templates/400.html
index f1fbafa..07aff2a 100755
--- a/metobsapi/templates/400.html
+++ b/metobsapi/templates/400.html
@@ -5,4 +5,4 @@
 
 {% if format %}
 <p>No data file format found of type '{{ format }}'</p>
-{% endif %}
\ No newline at end of file
+{% endif %}
diff --git a/metobsapi/templates/404.html b/metobsapi/templates/404.html
index 386db23..e79d898 100755
--- a/metobsapi/templates/404.html
+++ b/metobsapi/templates/404.html
@@ -2,7 +2,7 @@
 	<head>
 		<title>404 Not Found</title>
 	</head>
-	
+
 	<body>
 	<h1>404 Not Found</h1>
 	"
@@ -10,4 +10,4 @@
 	<br>
 	<br>
 	</body>
-</html>	 	
\ No newline at end of file
+</html>
diff --git a/metobsapi/templates/500.html b/metobsapi/templates/500.html
index 0c094ad..bfea71f 100755
--- a/metobsapi/templates/500.html
+++ b/metobsapi/templates/500.html
@@ -9,4 +9,4 @@
 		 (generated by waitress)"
 		</pre>
 	</body>
-</html>		 
\ No newline at end of file
+</html>
diff --git a/metobsapi/templates/data_index.html b/metobsapi/templates/data_index.html
index 38e1ff4..08f6204 100644
--- a/metobsapi/templates/data_index.html
+++ b/metobsapi/templates/data_index.html
@@ -6,7 +6,7 @@
         <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css">
         <link rel="stylesheet" href="{{ url_for('static', filename='sidebar.css') }}">
 </head>
-<body>  
+<body>
 <div class="container-fluid">
     <div class="row" id="row1">
         <nav class="col-md-2" id="sidebar-wrapper">
@@ -14,7 +14,7 @@
                     <li>
                         <a href="#Metobs Data Request Application">
                             Metobs Data Request Application
-                        </a>    
+                        </a>
                     </li>
                     <li>
                         <a href="#Data available">
@@ -29,7 +29,7 @@
                     <li>
                         <a href="#Instrument Symbols">
                             Instrument symbols
-                        </a>    
+                        </a>
                     </li>
                     <li>
                         <a href="#Example Queries">
@@ -45,7 +45,7 @@
                         <a href="#Getting Help">
                             Getting Help
                         </a>
-                    </li>                    
+                    </li>
                 </ul>
         </nav>
         <div class="col-md-8" style='border-left: solid;border-color: #A0A0A0;'>
@@ -65,11 +65,11 @@
                         <p>
                              The data are provided courtesy of SSEC/AOSS UW-Madison. Clients causing issues due to size or
                              quantity of queries may be blocked from further access until the issue
-                             is resolved 
+                             is resolved
                         </p>
                         <br>
-                        <a href='http://www.ssec.wisc.edu/disclaimer.html'> 
-                            Disclaimer 
+                        <a href='http://www.ssec.wisc.edu/disclaimer.html'>
+                            Disclaimer
                         </a>
                     </div>
             </div>
@@ -98,7 +98,7 @@
                     </h3>
 
                     <p>
-                        The base URL of a data query specifies the requested data return format:   
+                        The base URL of a data query specifies the requested data return format:
                     </p>
                     <p style='text-indent: 50px'>
                         <a href='http://metobs.ssec.wisc.edu/api/data'>http://metobs.ssec.wisc.edu/api/data</a>.&#60fmt&#62
@@ -125,7 +125,7 @@
                         format for a HTTP GET query string can be found at <a href="http://en.wikipedia.org/wiki/Query_string">
                         http://en.wikipedia.org/wiki/query_string</a>. Essentially, separate the URL from the query string using a ?
                         and the <cite style='font-style: italic;'>key</cite>=<cite style='font-style: italic'>value</cite> parameters
-                        with a &#38. Depending on the client, you may need to use URL character encoding in the URL, such as a + in 
+                        with a &#38. Depending on the client, you may need to use URL character encoding in the URL, such as a + in
                         place of any spaces. For more information see <a href='#Example Queries'>Example queries</a>.
                     </p>
                     <p>
@@ -134,7 +134,7 @@
                     <table>
                         <tr>
                             <td style='padding: 6px;'>
-                                <b>symbols:</b> 
+                                <b>symbols:</b>
                             </td>
                             <td style='padding: 6px;'>
                                 (Required) Colon separated list of data quantities to query for. These are specified
@@ -167,7 +167,7 @@
                         </tr>
                         <tr>
                             <td style='padding: 6px;'>
-                                <b>begin:</b> 
+                                <b>begin:</b>
                             </td>
                             <td style='padding: 6px;'>
                                 Start of the query interval in UTC as YYYY-MM-DDTHH:MM:SS or -HH:MM:SS.
@@ -178,7 +178,7 @@
                         </tr>
                         <tr>
                             <td style='padding: 6px;'>
-                                <b>end:</b> 
+                                <b>end:</b>
                             </td>
                             <td style='padding: 6px;'>
                                 End of the query interval in UTC as YYYY-MM-DDTHH:MM:SS or -HH:MM:SS.
@@ -189,7 +189,7 @@
                         </tr>
                         <tr>
                             <td style='padding: 6px;'>
-                                <b>sep:</b> 
+                                <b>sep:</b>
                             </td>
                             <td style='padding: 6px;'>
                                 The separator to use between each data point. Works with CSV and XML formats. By default, the separator is a ','.
@@ -197,7 +197,7 @@
                         </tr>
                         <tr>
                             <td style='padding: 6px;'>
-                                <b>interval:</b> 
+                                <b>interval:</b>
                             </td>
                             <td style='padding: 6px;'>
                                 The interval for the requested data. The
@@ -208,20 +208,20 @@
                         </tr>
                         <tr>
                             <td style='padding: 6px;'>
-                                <b>callback:</b> 
+                                <b>callback:</b>
                             </td>
                             <td style='padding: 6px;'>
                                 (Creates a JSONP response) Name of the call back function to wrap the JSON response data.
-                                <a href='https://en.wikipedia.org/wiki/JSON#JSONP'>JSONP</a> is a <a href='http://www.json.org'>JSON</a> 
-                                structure wrapped in a javascript callback. This is one method if you want to use the 
-                                data for display using Javascript or AJAX because it avoids the 
+                                <a href='https://en.wikipedia.org/wiki/JSON#JSONP'>JSONP</a> is a <a href='http://www.json.org'>JSON</a>
+                                structure wrapped in a javascript callback. This is one method if you want to use the
+                                data for display using Javascript or AJAX because it avoids the
                                 <a href='https://en.wikipedia.org/wiki/Same-origin_policy'>SOP issue.</a> For the second method, look at the <a href="#Data Formats">JSON</a> Format Below. Note: it looks like
                                 'mycallback(JSON);'.
                             </td>
                         </tr>
                         <tr>
                             <td style='padding: 6px;'>
-                                <b>epoch:</b> 
+                                <b>epoch:</b>
                             </td>
                             <td style='padding: 6px;'>
                                 Only included in the query string if the user wants the returned timestamps in a Unix epoch format.
@@ -241,7 +241,7 @@
                             </td>
                         </tr>
                     </table>
-                </div>    
+                </div>
             </div>
 
             <!-- Instrument symbols div -->
@@ -777,7 +777,7 @@
                         <ul>
                             <a href="{{ url_for('get_data', fmt='csv', symbols='aoss.tower.air_temp:aoss.tower.dewpoint:aoss.tower.pressure') }}" }}>
                                 {{ url_for('get_data', fmt='csv', symbols='aoss.tower.air_temp:aoss.tower.dewpoint:aoss.tower.pressure') | replace('%3A', ':') }}
-                            </a>    
+                            </a>
                         </ul>
                     </li>
                     <li style='font-size: 15px'>
@@ -785,7 +785,7 @@
                         <ul>
                             <a href="{{ url_for('get_data', fmt='csv', symbols='aoss.tower.air_temp:aoss.tower.dewpoint:aoss.tower.pressure', epoch='ms') }}">
                                 {{ url_for('get_data', fmt='csv', symbols='aoss.tower.air_temp:aoss.tower.dewpoint:aoss.tower.pressure', epoch='ms') | replace('%3A', ':') }}
-                            </a>    
+                            </a>
                         </ul>
                     </li>
                     <li style='font-size: 15px'>
@@ -856,7 +856,7 @@
                     JSON
                 </h3>
                 <p>
-                    For displaying data using javascript or AJAX, there are two methods the API uses. 
+                    For displaying data using javascript or AJAX, there are two methods the API uses.
                     The user could set the callback parameter to return a JSONP format which avoids
                     the SOP issue, or use a JSON format without a callback parameter, which the api automatically returns as a <a href='https://en.wikipedia.org/wiki/Cross-origin_resource_sharing'>CORS</a> HTTP request;
                     this particular CORS request allows cross origin get requests.
@@ -911,7 +911,7 @@
                     <li>
                         When the callback parameter is set to 'mycallback', the <a href='http://www.json.org'>JSON</a> structure
                         is transformed into a <a href='https://en.wikipedia.org/wiki/JSON#JSONP'>jsonp</a> format. This format looks
-                        like: 
+                        like:
                         <pre style='padding: 0px; padding-top: 15px; background: none; border: none'>
 mycallback({
     "code":200,
@@ -936,7 +936,7 @@ mycallback({
 });
                         </pre>
                     </li>
-                </ul>    
+                </ul>
                 <h3>
                     XML
                 </h3>
@@ -955,7 +955,7 @@ mycallback({
     &lt;/data&gt;
 &lt;/metobs&gt;
                         </pre>
-                    </li>        
+                    </li>
             </div>
 
             <!-- Getting help -->
@@ -964,13 +964,13 @@ mycallback({
                     Getting Help
                 </h2>
                 <p>
-                    If you have comments, suggestions, or require help, please contact the 
+                    If you have comments, suggestions, or require help, please contact the
                     <a href='http://www.ssec.wisc.edu/contact-form/index.php?name=MetObs%20Webmaster'>MetObs Webmaster</a>.
                 </p>
             </div>
         </div>
-    </div>    
+    </div>
 </div>
 
 <!-- <script type="text/javascript" src="index.js"></script> -->
-</body>
\ No newline at end of file
+</body>
diff --git a/metobsapi/templates/files_index.html b/metobsapi/templates/files_index.html
index 8557b5f..d5f7de8 100644
--- a/metobsapi/templates/files_index.html
+++ b/metobsapi/templates/files_index.html
@@ -6,7 +6,7 @@
         <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css">
         <link rel="stylesheet" href="{{ url_for('static', filename='sidebar.css') }}">
 </head>
-<body>  
+<body>
 <div class="container-fluid">
     <div class="row" id="row1">
         <nav class="col-md-2" id="sidebar-wrapper">
@@ -14,7 +14,7 @@
                     <li>
                         <a href="#Metobs File Request Application">
                             Metobs File Request Application
-                        </a>    
+                        </a>
                     </li>
                     <li>
                         <a href="#Products available">
@@ -29,7 +29,7 @@
                     <li>
                         <a href="#Stream Quantifiers">
                             Stream Quantifiers
-                        </a>    
+                        </a>
                     </li>
                     <li>
                         <a href="#Example Queries">
@@ -45,7 +45,7 @@
                         <a href="#Getting Help">
                             Getting Help
                         </a>
-                    </li>                    
+                    </li>
                 </ul>
         </nav>
         <div class="col-md-8" style='border-left: solid;border-color: #A0A0A0;'>
@@ -65,11 +65,11 @@
                         <p>
                              The files are provided courtesy of SSEC/AOSS UW-Madison. Clients causing issues due to size or
                              quantity of queries may be blocked from further access until the issue
-                             is resolved 
+                             is resolved
                         </p>
                         <br>
-                        <a href='http://www.ssec.wisc.edu/disclaimer.html'> 
-                            Disclaimer 
+                        <a href='http://www.ssec.wisc.edu/disclaimer.html'>
+                            Disclaimer
                         </a>
                     </div>
             </div>
@@ -99,7 +99,7 @@
                     </h3>
 
                     <p>
-                        The base URL of a files query specifies the requested file return format:   
+                        The base URL of a files query specifies the requested file return format:
                     </p>
                     <p style='text-indent: 50px'>
                         <a href='http://metobs.ssec.wisc.edu/api/files'>http://metobs.ssec.wisc.edu/api/files</a>.&#60fmt&#62
@@ -127,7 +127,7 @@
                         format for a HTTP GET query string can be found at <a href="http://en.wikipedia.org/wiki/Query_string">
                         http://en.wikipedia.org/wiki/query_string</a>. Essentially, separate the URL from the query string using a ?
                         and the <cite style='font-style: italic;'>key</cite>=<cite style='font-style: italic'>value</cite> parameters
-                        with a &#38. Depending on the client, you may need to use URL character encoding in the URL, such as a + in 
+                        with a &#38. Depending on the client, you may need to use URL character encoding in the URL, such as a + in
                         place of any spaces. For more information see <a href='#Example Queries'>Example queries</a>.
                     </p>
                     <p>
@@ -136,7 +136,7 @@
                     <table>
                         <tr>
                             <td style='padding: 6px;'>
-                                <b>begin:</b> 
+                                <b>begin:</b>
                             </td>
                             <td style='padding: 6px;'>
                                 Start of the query interval in UTC as YYYY-MM-DD or -&#60number of days&#62.
@@ -147,21 +147,21 @@
                         </tr>
                         <tr>
                             <td style='padding: 6px;'>
-                                <b>streams:</b> 
+                                <b>streams:</b>
                             </td>
                             <td style='padding: 6px;'>
-                                (Required) Colon separated list of product attributes to query for. 
+                                (Required) Colon separated list of product attributes to query for.
                                 streams are specified as a colon separated list of '&#60site&#62.&#60instrument&#62.&#60product_id&#62.&#60product_level&#62.&#60version_number&#62'.
                                 The version number may be specificed as '*' to choose the most recent version.
                                 The product ID may also be specified as '*' to choose all products for a given level.
                                 <br><b>Note</b>: level and version is abbreviated
                                 (i.e. level_00 becomes l00 and version_00 becomes v00).
-                                Valid streams quantifiers for each site/instrument are documented below. 
+                                Valid streams quantifiers for each site/instrument are documented below.
                             </td>
                         </tr>
                         <tr>
                             <td style='padding: 6px;'>
-                                <b>end:</b> 
+                                <b>end:</b>
                             </td>
                             <td style='padding: 6px;'>
                                 End of the query interval in UTC as YYYY-MM-DD. If not provided, it defaults to today.
@@ -177,7 +177,7 @@
                             </td>
                         </tr>
                     </table>
-                </div>    
+                </div>
             </div>
 
             <!-- Stream Quantifiers div -->
@@ -216,7 +216,7 @@
                         </tr>
                     {% endfor %}
                 </table>
-                
+
                 <h3>
                     AOSS AERI:
                 </h3>
@@ -269,7 +269,7 @@
                         <ul>
                             <a href="{{ url_for('get_files', fmt='csv', streams='aoss.tower.ascii.l00.*') }}" }}>
                                 {{ url_for('get_files', fmt='csv', streams='aoss.tower.ascii.level_00.*') | replace('%3A', ':') | replace('%2A', '*') }}
-                            </a>    
+                            </a>
                         </ul>
                     </li>
                     <li style='font-size: 15px'>
@@ -277,7 +277,7 @@
                         <ul>
                             <a href="{{ url_for('get_files', fmt='csv', streams='aoss.aeri.scr-aesitter.l00.*:aoss.aeri.scr-radiance.l00.*:aoss.aeri.scr-summary.l00.*') }}">
                                 {{ url_for('get_files', fmt='csv', streams='aoss.aeri.scr-aesitter.l00.*:aoss.aeri.scr-radiance.l00.*:aoss.aeri.scr-summary.l00.*') | replace('%3A', ':') | replace('%2A', '*') }}
-                            </a>    
+                            </a>
                         </ul>
                     </li>
                     <li style='font-size: 15px'>
@@ -366,7 +366,7 @@ rig_tower.2016-08-04.ascii,http://metobs.ssec.wisc.edu/pub/cache/aoss/tower/leve
 }
                         </pre>
                     </li>
-                </ul>    
+                </ul>
                 <h3>
                     SH
                 </h3>
@@ -379,8 +379,8 @@ rig_tower.2016-08-04.ascii,http://metobs.ssec.wisc.edu/pub/cache/aoss/tower/leve
 # code: 200
 # message:
 # num_results: 2
-# 
-# Instructions for post-download: 
+#
+# Instructions for post-download:
 # 1. Using the command line, go to the directory where this script is saved
 # 2. Make this script executable 'chmod +x &#60name of script&#62'
 # 3. Run the script 'bash &#60name of script&#62'
@@ -416,9 +416,9 @@ REM code: 200
 REM message:
 REM num_results: 2
 
-REM Instructions for post-download: 
+REM Instructions for post-download:
 REM 1. Using windows' command line, go to the directory where this script is saved
-REM 2. Tap in 
+REM 2. Tap in
 REM 3. Hold ctrl + c when download are done and hit Y, then enter
 REM 4. Finally, files will be in a data directory
 
@@ -437,7 +437,7 @@ bitsadmin /resume myDownloadJob
 bitsadmin /SetNotifyCmdLine myDownloadJob "%SystemRoot%\system32\bitsadmin.exe" "%SystemRoot%\syste,32\bitsadmin.exe /complete myDownloadJob"
 bitsadmin /monitor
                         </pre>
-                    </li>                
+                    </li>
             </div>
 
             <!-- Getting help -->
@@ -446,13 +446,13 @@ bitsadmin /monitor
                     Getting Help
                 </h2>
                 <p>
-                    If you have comments, suggestions, or require help, please contact the 
+                    If you have comments, suggestions, or require help, please contact the
                     <a href='http://www.ssec.wisc.edu/contact-form/index.php?name=MetObs%20Webmaster'>MetObs Webmaster</a>.
                 </p>
             </div>
         </div>
-    </div>    
+    </div>
 </div>
 
 <!-- <script type="text/javascript" src="index.js"></script> -->
-</body>
\ No newline at end of file
+</body>
diff --git a/metobsapi/templates/index.html b/metobsapi/templates/index.html
index 1eb67e5..aa6b93d 100644
--- a/metobsapi/templates/index.html
+++ b/metobsapi/templates/index.html
@@ -5,7 +5,7 @@
         <title>Metobs Data Request Application</title>
         <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css">
 </head>
-<body> 
+<body>
     <div class="row" id="row1">
     <div class="col-md-2">
         <nav class="col-md-2" id="sidebar-wrapper">
@@ -13,7 +13,7 @@
                     <li>
                         <a href="#Metobs API">
                             Metobs API
-                        </a>    
+                        </a>
                     </li>
                     <li>
                         <a href="#Data API Documentation">
@@ -24,11 +24,11 @@
                         <a href="#File API Documentation">
                             File API Documentation
                         </a>
-                    </li>                    
+                    </li>
                 </ul>
         </nav>
     </div>
-                
+
     <div class="col-md-8" style='border-left: solid;border-color: #A0A0A0;height: 1280px'>
         <h1 id="Metobs API">
             Metobs API
@@ -39,10 +39,10 @@
             </a>
         </h2>
         <p>
-            This API returns Aoss Tower and Mendota Buoy data. The user can query for data using 
+            This API returns Aoss Tower and Mendota Buoy data. The user can query for data using
             the variable names, intervals, and times that they desire. The user will get a CSV, json, or
             XML file with the data that they asked for. For more information, click the link above.
-        </p>    
+        </p>
 
         <h2 id="#File API Documentation">
             <a href="{{ url_for('files_index')}}">
@@ -68,4 +68,4 @@
     </div>
 <!-- <script type="text/javascript" src="index.js"></script> -->
 </body>
-</html>
\ No newline at end of file
+</html>
diff --git a/metobsapi/templates/status_index.html b/metobsapi/templates/status_index.html
index eed7220..89ed482 100644
--- a/metobsapi/templates/status_index.html
+++ b/metobsapi/templates/status_index.html
@@ -6,7 +6,7 @@
         <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css">
         <link rel="stylesheet" href="{{ url_for('static', filename='sidebar.css') }}">
 </head>
-<body>  
+<body>
 <div class="container-fluid">
     <div class="row" id="row1">
         <nav class="col-md-2" id="sidebar-wrapper">
@@ -14,7 +14,7 @@
                     <li>
                         <a href="#Instrument Status API">
                             Instrument Status API
-                        </a>    
+                        </a>
                     </li>
                     <li>
                         <a href="#Status Information">
@@ -52,11 +52,11 @@
                         <p>
                              The files are provided courtesy of SSEC/AOSS UW-Madison. Clients causing issues due to size or
                              quantity of queries may be blocked from further access until the issue
-                             is resolved 
+                             is resolved
                         </p>
                         <br>
-                        <a href='http://www.ssec.wisc.edu/disclaimer.html'> 
-                            Disclaimer 
+                        <a href='http://www.ssec.wisc.edu/disclaimer.html'>
+                            Disclaimer
                         </a>
                     </div>
             </div>
@@ -180,8 +180,8 @@
                 </ul>
             </div>
         </div>
-    </div>    
+    </div>
 </div>
 
 <!-- <script type="text/javascript" src="index.js"></script> -->
-</body>
\ No newline at end of file
+</body>
diff --git a/metobsapi/tests/test_data_api.py b/metobsapi/tests/test_data_api.py
index 84647df..c6ff10d 100644
--- a/metobsapi/tests/test_data_api.py
+++ b/metobsapi/tests/test_data_api.py
@@ -1,49 +1,53 @@
 import json
 import unittest
 from unittest import mock
+
 import metobsapi
 
 
 def fake_data(interval, symbols, num_vals, single_result=False):
     import random
     from datetime import datetime, timedelta
+
     from influxdb.resultset import ResultSet
+
     now = datetime(2017, 3, 5, 19, 0, 0)
     t_format = "%Y-%m-%dT%H:%M:%SZ"
     measurement_name = "metobs_" + interval
     series = []
     for (site, inst), columns in symbols.items():
-        tags = {'site': site, 'inst': inst}
+        tags = {"site": site, "inst": inst}
         vals = []
         for i in range(num_vals):
-            vals.append(
-                [(now + timedelta(minutes=i)).strftime(t_format)] + \
-                [random.random()] * (len(columns) - 1)
-            )
+            vals.append([(now + timedelta(minutes=i)).strftime(t_format)] + [random.random()] * (len(columns) - 1))
             # make up some Nones/nulls (but not all the time)
             r_idx = int(random.random() * len(columns) * 3)
             # don't include time (index 0)
             if 0 < r_idx < len(columns):
                 vals[-1][r_idx] = None
         s = {
-            'name': measurement_name,
-            'columns': columns,
-            'tags': tags,
-            'values': vals,
+            "name": measurement_name,
+            "columns": columns,
+            "tags": tags,
+            "values": vals,
         }
         if single_result:
             series.append(s)
         else:
-            series.append(ResultSet({
-                'series': [s],
-                'statement_id': 0,
-            }))
+            series.append(
+                ResultSet(
+                    {
+                        "series": [s],
+                        "statement_id": 0,
+                    }
+                )
+            )
 
     if single_result:
         ret = {
-                  'series': series,
-                  'statement_id': 0,
-              }
+            "series": series,
+            "statement_id": 0,
+        }
         return ResultSet(ret)
     else:
         return series
@@ -51,214 +55,232 @@ def fake_data(interval, symbols, num_vals, single_result=False):
 
 class TestDataAPI(unittest.TestCase):
     def setUp(self):
-        metobsapi.app.config['TESTING'] = True
-        metobsapi.app.config['DEBUG'] = True
+        metobsapi.app.config["TESTING"] = True
+        metobsapi.app.config["DEBUG"] = True
         self.app = metobsapi.app.test_client()
 
     def test_doc(self):
-        res = self.app.get('/api/data')
-        assert b'Data Request Application' in res.data
+        res = self.app.get("/api/data")
+        assert b"Data Request Application" in res.data
 
     def test_bad_format(self):
-        res = self.app.get('/api/data.fake')
-        self.assertIn(b'No data file format', res.data)
+        res = self.app.get("/api/data.fake")
+        self.assertIn(b"No data file format", res.data)
 
     def test_bad_begin_json(self):
-        res = self.app.get('/api/data.json?symbols=air_temp&begin=blah')
+        res = self.app.get("/api/data.json?symbols=air_temp&begin=blah")
         res = json.loads(res.data.decode())
-        self.assertEqual(res['code'], 400)
-        self.assertEqual(res['status'], 'error')
-        self.assertIn('timestamp', res['message'])
+        self.assertEqual(res["code"], 400)
+        self.assertEqual(res["status"], "error")
+        self.assertIn("timestamp", res["message"])
 
     def test_bad_order(self):
-        res = self.app.get('/api/data.json?order=blah&symbols=air_temp')
+        res = self.app.get("/api/data.json?order=blah&symbols=air_temp")
         res = json.loads(res.data.decode())
-        self.assertIn('column', res['message'])
-        self.assertIn('row', res['message'])
+        self.assertIn("column", res["message"])
+        self.assertIn("row", res["message"])
 
     def test_bad_epoch(self):
-        res = self.app.get('/api/data.json?epoch=blah&symbols=air_temp')
+        res = self.app.get("/api/data.json?epoch=blah&symbols=air_temp")
         res = json.loads(res.data.decode())
-        self.assertIn('\'h\'', res['message'])
-        self.assertIn('\'m\'', res['message'])
-        self.assertIn('\'s\'', res['message'])
-        self.assertIn('\'u\'', res['message'])
+        self.assertIn("'h'", res["message"])
+        self.assertIn("'m'", res["message"])
+        self.assertIn("'s'", res["message"])
+        self.assertIn("'u'", res["message"])
 
     def test_bad_interval(self):
-        res = self.app.get('/api/data.json?interval=blah&symbols=air_temp')
+        res = self.app.get("/api/data.json?interval=blah&symbols=air_temp")
         res = json.loads(res.data.decode())
-        self.assertIn('\'1m\'', res['message'])
-        self.assertIn('\'5m\'', res['message'])
-        self.assertIn('\'1h\'', res['message'])
+        self.assertIn("'1m'", res["message"])
+        self.assertIn("'5m'", res["message"])
+        self.assertIn("'1h'", res["message"])
 
     def test_missing_inst(self):
-        res = self.app.get('/api/data.json?site=X&symbols=air_temp&begin=-05:00:00')
+        res = self.app.get("/api/data.json?site=X&symbols=air_temp&begin=-05:00:00")
         res = json.loads(res.data.decode())
-        self.assertEqual(res['code'], 400)
-        self.assertEqual(res['status'], 'error')
-        self.assertIn('\'site\'', res['message'])
-        self.assertIn('\'inst\'', res['message'])
+        self.assertEqual(res["code"], 400)
+        self.assertEqual(res["status"], "error")
+        self.assertIn("'site'", res["message"])
+        self.assertIn("'inst'", res["message"])
 
     def test_missing_site(self):
-        res = self.app.get('/api/data.json?inst=X&symbols=air_temp&begin=-05:00:00')
+        res = self.app.get("/api/data.json?inst=X&symbols=air_temp&begin=-05:00:00")
         res = json.loads(res.data.decode())
-        self.assertEqual(res['code'], 400)
-        self.assertEqual(res['status'], 'error')
-        self.assertIn('\'site\'', res['message'])
-        self.assertIn('\'inst\'', res['message'])
+        self.assertEqual(res["code"], 400)
+        self.assertEqual(res["status"], "error")
+        self.assertIn("'site'", res["message"])
+        self.assertIn("'inst'", res["message"])
 
     def test_missing_symbols(self):
-        res = self.app.get('/api/data.json?begin=-05:00:00')
+        res = self.app.get("/api/data.json?begin=-05:00:00")
         res = json.loads(res.data.decode())
-        self.assertEqual(res['code'], 400)
-        self.assertEqual(res['status'], 'error')
-        self.assertIn('\'symbols\'', res['message'])
+        self.assertEqual(res["code"], 400)
+        self.assertEqual(res["status"], "error")
+        self.assertIn("'symbols'", res["message"])
 
     def test_too_many_points(self):
-        res = self.app.get('/api/data.json?symbols=aoss.tower.air_temp&begin=1970-01-01T00:00:00')
+        res = self.app.get("/api/data.json?symbols=aoss.tower.air_temp&begin=1970-01-01T00:00:00")
         self.assertEqual(res.status_code, 413)
         res = json.loads(res.data.decode())
-        self.assertIn('too many values', res['message'])
-        self.assertEqual(res['code'], 413)
-        self.assertEqual(res['status'], 'fail')
+        self.assertIn("too many values", res["message"])
+        self.assertEqual(res["code"], 413)
+        self.assertEqual(res["status"], "fail")
 
-    @mock.patch('metobsapi.data_api.query')
+    @mock.patch("metobsapi.data_api.query")
     def test_shorthand_one_symbol_json_row(self, query_func):
-        r = fake_data('1m', {('aoss', 'tower'): ['time', 'air_temp']}, 9)
+        r = fake_data("1m", {("aoss", "tower"): ["time", "air_temp"]}, 9)
         query_func.return_value = r
         # row should be the default
-        res = self.app.get('/api/data.json?site=aoss&inst=tower&symbols=air_temp&begin=-00:10:00')
+        res = self.app.get("/api/data.json?site=aoss&inst=tower&symbols=air_temp&begin=-00:10:00")
         res = json.loads(res.data.decode())
-        self.assertEqual(res['code'], 200)
-        self.assertEqual(res['num_results'], 9)
-        self.assertListEqual(res['results']['symbols'], ['air_temp'])
-        self.assertEqual(len(res['results']['timestamps']), 9)
-        self.assertEqual(len(res['results']['data']), 9)
-        self.assertEqual(len(res['results']['data'][0]), 1)
+        self.assertEqual(res["code"], 200)
+        self.assertEqual(res["num_results"], 9)
+        self.assertListEqual(res["results"]["symbols"], ["air_temp"])
+        self.assertEqual(len(res["results"]["timestamps"]), 9)
+        self.assertEqual(len(res["results"]["data"]), 9)
+        self.assertEqual(len(res["results"]["data"][0]), 1)
 
-    @mock.patch('metobsapi.data_api.query')
+    @mock.patch("metobsapi.data_api.query")
     def test_shorthand_one_symbol_json_column(self, query_func):
-        r = fake_data('1m', {('aoss', 'tower'): ['time', 'air_temp']}, 9)
+        r = fake_data("1m", {("aoss", "tower"): ["time", "air_temp"]}, 9)
         query_func.return_value = r
-        res = self.app.get('/api/data.json?site=aoss&inst=tower&symbols=air_temp&begin=-00:10:00&order=column')
+        res = self.app.get("/api/data.json?site=aoss&inst=tower&symbols=air_temp&begin=-00:10:00&order=column")
         res = json.loads(res.data.decode())
-        self.assertEqual(res['code'], 200)
-        self.assertEqual(res['num_results'], 9)
-        self.assertIn('air_temp', res['results']['data'])
-        self.assertEqual(len(res['results']['data']['air_temp']), 9)
-        self.assertEqual(len(res['results']['timestamps']), 9)
+        self.assertEqual(res["code"], 200)
+        self.assertEqual(res["num_results"], 9)
+        self.assertIn("air_temp", res["results"]["data"])
+        self.assertEqual(len(res["results"]["data"]["air_temp"]), 9)
+        self.assertEqual(len(res["results"]["timestamps"]), 9)
 
-    @mock.patch('metobsapi.data_api.query')
+    @mock.patch("metobsapi.data_api.query")
     def test_wind_speed_direction_json(self, query_func):
-        r = fake_data('1m', {('aoss', 'tower'): ['time', 'wind_speed', 'wind_direction', 'wind_east', 'wind_north']}, 9)
+        r = fake_data("1m", {("aoss", "tower"): ["time", "wind_speed", "wind_direction", "wind_east", "wind_north"]}, 9)
         query_func.return_value = r
-        res = self.app.get('/api/data.json?symbols=aoss.tower.wind_speed:aoss.tower.wind_direction&begin=-00:10:00&order=column')
+        res = self.app.get(
+            "/api/data.json?symbols=aoss.tower.wind_speed:aoss.tower.wind_direction&begin=-00:10:00&order=column"
+        )
         res = json.loads(res.data.decode())
-        self.assertEqual(res['code'], 200)
-        self.assertEqual(res['num_results'], 9)
-        self.assertIn('aoss.tower.wind_direction', res['results']['data'])
-        self.assertIn('aoss.tower.wind_speed', res['results']['data'])
-        self.assertEqual(len(list(res['results']['data'].keys())), 2)
+        self.assertEqual(res["code"], 200)
+        self.assertEqual(res["num_results"], 9)
+        self.assertIn("aoss.tower.wind_direction", res["results"]["data"])
+        self.assertIn("aoss.tower.wind_speed", res["results"]["data"])
+        self.assertEqual(len(list(res["results"]["data"].keys())), 2)
 
-    @mock.patch('metobsapi.data_api.query')
+    @mock.patch("metobsapi.data_api.query")
     def test_one_symbol_two_insts_json_row(self, query_func):
-        r = fake_data('1m', {
-            ('aoss', 'tower'): ['time', 'air_temp'],
-            ('mendota', 'buoy'): ['time', 'air_temp'],
-        }, 9)
+        r = fake_data(
+            "1m",
+            {
+                ("aoss", "tower"): ["time", "air_temp"],
+                ("mendota", "buoy"): ["time", "air_temp"],
+            },
+            9,
+        )
         query_func.return_value = r
         # row should be the default
-        res = self.app.get('/api/data.json?symbols=aoss.tower.air_temp:mendota.buoy.air_temp&begin=-00:10:00')
+        res = self.app.get("/api/data.json?symbols=aoss.tower.air_temp:mendota.buoy.air_temp&begin=-00:10:00")
         res = json.loads(res.data.decode())
-        self.assertEqual(res['code'], 200)
-        self.assertEqual(res['num_results'], 9)
-        self.assertListEqual(res['results']['symbols'], ['aoss.tower.air_temp', 'mendota.buoy.air_temp'])
-        self.assertEqual(len(res['results']['timestamps']), 9)
-        self.assertEqual(len(res['results']['data']), 9)
-        self.assertEqual(len(res['results']['data'][0]), 2)
+        self.assertEqual(res["code"], 200)
+        self.assertEqual(res["num_results"], 9)
+        self.assertListEqual(res["results"]["symbols"], ["aoss.tower.air_temp", "mendota.buoy.air_temp"])
+        self.assertEqual(len(res["results"]["timestamps"]), 9)
+        self.assertEqual(len(res["results"]["data"]), 9)
+        self.assertEqual(len(res["results"]["data"][0]), 2)
 
-    @mock.patch('metobsapi.data_api.query')
+    @mock.patch("metobsapi.data_api.query")
     def test_one_symbol_three_insts_json_row(self, query_func):
-        r = fake_data('1m', {
-            ('site1', 'inst1'): ['time', 'air_temp'],
-            ('site2', 'inst2'): ['time', 'air_temp'],
-            ('site3', 'inst3'): ['time', 'air_temp'],
-        }, 9)
+        r = fake_data(
+            "1m",
+            {
+                ("site1", "inst1"): ["time", "air_temp"],
+                ("site2", "inst2"): ["time", "air_temp"],
+                ("site3", "inst3"): ["time", "air_temp"],
+            },
+            9,
+        )
         query_func.return_value = r
         # row should be the default
         from metobsapi.util.data_responses import SYMBOL_TRANSLATIONS as st
+
         st = st.copy()
-        st[('site1', 'inst1')] = st[('aoss', 'tower')]
-        st[('site2', 'inst2')] = st[('aoss', 'tower')]
-        st[('site3', 'inst3')] = st[('aoss', 'tower')]
-        with mock.patch('metobsapi.util.data_responses.SYMBOL_TRANSLATIONS', st):
-            res = self.app.get('/api/data.json?symbols=site1.inst1.air_temp:site2.inst2.air_temp:site3.inst3.air_temp&begin=-00:10:00')
+        st[("site1", "inst1")] = st[("aoss", "tower")]
+        st[("site2", "inst2")] = st[("aoss", "tower")]
+        st[("site3", "inst3")] = st[("aoss", "tower")]
+        with mock.patch("metobsapi.util.data_responses.SYMBOL_TRANSLATIONS", st):
+            res = self.app.get(
+                "/api/data.json?symbols=site1.inst1.air_temp:site2.inst2.air_temp:site3.inst3.air_temp&begin=-00:10:00"
+            )
             res = json.loads(res.data.decode())
-            self.assertEqual(res['code'], 200)
-            self.assertEqual(res['num_results'], 9)
-            self.assertListEqual(res['results']['symbols'], ['site1.inst1.air_temp', 'site2.inst2.air_temp', 'site3.inst3.air_temp'])
-            self.assertEqual(len(res['results']['timestamps']), 9)
-            self.assertEqual(len(res['results']['data']), 9)
-            self.assertEqual(len(res['results']['data'][0]), 3)
+            self.assertEqual(res["code"], 200)
+            self.assertEqual(res["num_results"], 9)
+            self.assertListEqual(
+                res["results"]["symbols"], ["site1.inst1.air_temp", "site2.inst2.air_temp", "site3.inst3.air_temp"]
+            )
+            self.assertEqual(len(res["results"]["timestamps"]), 9)
+            self.assertEqual(len(res["results"]["data"]), 9)
+            self.assertEqual(len(res["results"]["data"][0]), 3)
 
-    @mock.patch('metobsapi.data_api.query')
+    @mock.patch("metobsapi.data_api.query")
     def test_one_symbol_csv(self, query_func):
-        r = fake_data('1m', {('aoss', 'tower'): ['time', 'air_temp']}, 9)
+        r = fake_data("1m", {("aoss", "tower"): ["time", "air_temp"]}, 9)
         query_func.return_value = r
         # row should be the default
-        res = self.app.get('/api/data.csv?symbols=aoss.tower.air_temp&begin=-00:10:00')
+        res = self.app.get("/api/data.csv?symbols=aoss.tower.air_temp&begin=-00:10:00")
         res = res.data.decode()
         # header, data, newline at end
-        lines = res.split('\n')
+        lines = res.split("\n")
         self.assertEqual(len(lines), 5 + 9 + 1)
         # time + 1 channel
-        self.assertEqual(len(lines[5].split(',')), 2)
+        self.assertEqual(len(lines[5].split(",")), 2)
         self.assertIn("# code: 200", res)
 
-    @mock.patch('metobsapi.data_api.query')
+    @mock.patch("metobsapi.data_api.query")
     def test_one_symbol_xml(self, query_func):
         from xml.dom.minidom import parseString
-        r = fake_data('1m', {('aoss', 'tower'): ['time', 'air_temp']}, 9)
+
+        r = fake_data("1m", {("aoss", "tower"): ["time", "air_temp"]}, 9)
         query_func.return_value = r
         # row should be the default
-        res = self.app.get('/api/data.xml?symbols=aoss.tower.air_temp&begin=-00:10:00')
+        res = self.app.get("/api/data.xml?symbols=aoss.tower.air_temp&begin=-00:10:00")
         res = parseString(res.data.decode())
         # symbols: time and air_temp
         self.assertEqual(len(res.childNodes[0].childNodes[0].childNodes), 2)
         # data rows
         self.assertEqual(len(res.childNodes[0].childNodes[1].childNodes), 9)
 
-    @mock.patch('metobsapi.data_api.query')
+    @mock.patch("metobsapi.data_api.query")
     def test_three_symbol_csv(self, query_func):
         """Test that multiple channels in a CSV file are structured properly."""
-        r = fake_data('1m', {('aoss', 'tower'): ['time', 'air_temp', 'rel_hum', 'wind_speed']}, 9)
+        r = fake_data("1m", {("aoss", "tower"): ["time", "air_temp", "rel_hum", "wind_speed"]}, 9)
         query_func.return_value = r
         # row should be the default
-        res = self.app.get('/api/data.csv?symbols=aoss.tower.air_temp:'
-                           'aoss.tower.rel_hum:aoss.tower.wind_speed&begin=-00:10:00')
+        res = self.app.get(
+            "/api/data.csv?symbols=aoss.tower.air_temp:" "aoss.tower.rel_hum:aoss.tower.wind_speed&begin=-00:10:00"
+        )
         res = res.data.decode()
         # header, data, newline at end
-        lines = res.split('\n')
+        lines = res.split("\n")
         self.assertEqual(len(lines), 5 + 9 + 1)
         # time + 3 channels
-        self.assertEqual(len(lines[5].split(',')), 4)
+        self.assertEqual(len(lines[5].split(",")), 4)
         self.assertIn("# code: 200", res)
 
-    @mock.patch('metobsapi.data_api.query')
+    @mock.patch("metobsapi.data_api.query")
     def test_three_symbol_csv_repeat(self, query_func):
         """Test that multiple channels in a CSV file are structured properly."""
-        r = fake_data('1m', {('aoss', 'tower'): ['time', 'air_temp', 'rel_hum', 'wind_speed']}, 9)
+        r = fake_data("1m", {("aoss", "tower"): ["time", "air_temp", "rel_hum", "wind_speed"]}, 9)
         query_func.return_value = r
         # row should be the default
-        res = self.app.get('/api/data.csv?symbols=aoss.tower.air_temp:'
-                           'aoss.tower.air_temp:aoss.tower.air_temp&begin=-00:10:00')
+        res = self.app.get(
+            "/api/data.csv?symbols=aoss.tower.air_temp:" "aoss.tower.air_temp:aoss.tower.air_temp&begin=-00:10:00"
+        )
         res = res.data.decode()
         # header, data, newline at end
-        lines = res.split('\n')
+        lines = res.split("\n")
         # header, data (one empty line), newline at end
         self.assertEqual(len(lines), 5 + 1 + 1)
         # time + 1 channel
-        self.assertEqual(len(lines[5].split(',')), 1)
+        self.assertEqual(len(lines[5].split(",")), 1)
         self.assertIn("# code: 400", res)
 
     # @mock.patch('metobsapi.data_api.query')
diff --git a/metobsapi/tests/test_files_api.py b/metobsapi/tests/test_files_api.py
index b82f269..5ae3310 100644
--- a/metobsapi/tests/test_files_api.py
+++ b/metobsapi/tests/test_files_api.py
@@ -1,16 +1,19 @@
-import metobsapi
-import unittest
-import tempfile
-import shutil
 import json
+import shutil
+import tempfile
+import unittest
+
+import metobsapi
 
 
 class TestFilesAPI(unittest.TestCase):
     @classmethod
     def setUpClass(cls):
-        from metobsapi.util import create_fake_archive, file_responses
         from datetime import datetime, timedelta
-        cls.archive_dir = tempfile.mkdtemp(suffix='_metobsapi_files_test')
+
+        from metobsapi.util import create_fake_archive, file_responses
+
+        cls.archive_dir = tempfile.mkdtemp(suffix="_metobsapi_files_test")
         # need now for 'recent' queries
         now = datetime.utcnow()
         cls._datetimes = [now, now - timedelta(days=1), now - timedelta(days=2)]
@@ -23,59 +26,59 @@ class TestFilesAPI(unittest.TestCase):
         shutil.rmtree(cls.archive_dir)
 
     def setUp(self):
-        metobsapi.app.config['TESTING'] = True
-        metobsapi.app.config['DEBUG'] = True
-        metobsapi.app.config['ARCHIVE_ROOT'] = self.archive_dir
+        metobsapi.app.config["TESTING"] = True
+        metobsapi.app.config["DEBUG"] = True
+        metobsapi.app.config["ARCHIVE_ROOT"] = self.archive_dir
         self.app = metobsapi.app.test_client()
 
     def tearDown(self):
         pass
 
     def test_doc(self):
-        res = self.app.get('/api/files')
-        assert b'File Request Application' in res.data
+        res = self.app.get("/api/files")
+        assert b"File Request Application" in res.data
 
     def test_bad_format(self):
-        res = self.app.get('/api/files.fake')
-        self.assertIn(b'No data file format', res.data)
+        res = self.app.get("/api/files.fake")
+        self.assertIn(b"No data file format", res.data)
 
     def test_missing_streams(self):
-        res = self.app.get('/api/files.json')
-        self.assertIn(b'stream', res.data)
+        res = self.app.get("/api/files.json")
+        self.assertIn(b"stream", res.data)
 
     def test_bad_begin(self):
-        res = self.app.get('/api/files.json?streams=test&begin=bad')
-        self.assertIn(b'timestamp', res.data)
+        res = self.app.get("/api/files.json?streams=test&begin=bad")
+        self.assertIn(b"timestamp", res.data)
 
     def test_bad_symbol(self):
-        res = self.app.get('/api/files.json?streams=test')
-        self.assertIn(b'stream', res.data)
+        res = self.app.get("/api/files.json?streams=test")
+        self.assertIn(b"stream", res.data)
 
     def test_tower_daily_ascii_csv(self):
-        res = self.app.get('/api/files.csv?streams=aoss.tower.ascii.l00.*')
-        fn = bytes(self._datetimes[0].strftime('rig_tower.%Y-%m-%d.ascii'), encoding='utf-8')
+        res = self.app.get("/api/files.csv?streams=aoss.tower.ascii.l00.*")
+        fn = bytes(self._datetimes[0].strftime("rig_tower.%Y-%m-%d.ascii"), encoding="utf-8")
         assert fn in res.data
 
     def test_tower_daily_ascii_json(self):
-        res = self.app.get('/api/files.json?streams=aoss.tower.ascii.l00.*')
-        fn = bytes(self._datetimes[0].strftime('rig_tower.%Y-%m-%d.ascii'), encoding='utf-8')
+        res = self.app.get("/api/files.json?streams=aoss.tower.ascii.l00.*")
+        fn = bytes(self._datetimes[0].strftime("rig_tower.%Y-%m-%d.ascii"), encoding="utf-8")
         assert fn in res.data
 
     def test_tower_daily_ascii_sh(self):
-        res = self.app.get('/api/files.sh?streams=aoss.tower.ascii.l00.*')
-        fn = bytes(self._datetimes[0].strftime('rig_tower.%Y-%m-%d.ascii'), encoding='utf-8')
+        res = self.app.get("/api/files.sh?streams=aoss.tower.ascii.l00.*")
+        fn = bytes(self._datetimes[0].strftime("rig_tower.%Y-%m-%d.ascii"), encoding="utf-8")
         assert fn in res.data
 
     def test_tower_daily_ascii_bat(self):
-        res = self.app.get('/api/files.bat?streams=aoss.tower.ascii.l00.*')
-        fn = bytes(self._datetimes[0].strftime('rig_tower.%Y-%m-%d.ascii'), encoding='utf-8')
+        res = self.app.get("/api/files.bat?streams=aoss.tower.ascii.l00.*")
+        fn = bytes(self._datetimes[0].strftime("rig_tower.%Y-%m-%d.ascii"), encoding="utf-8")
         assert fn in res.data
 
     def test_tower_daily_ascii_dated_json(self):
         dt = self._datetimes[1]
-        begin = dt.strftime('%Y-%m-%d')
-        res = self.app.get('/api/files.json?streams=aoss.tower.ascii.l00.*&begin={}'.format(begin))
-        fn = bytes(dt.strftime('rig_tower.%Y-%m-%d.ascii'), encoding='utf-8')
+        begin = dt.strftime("%Y-%m-%d")
+        res = self.app.get("/api/files.json?streams=aoss.tower.ascii.l00.*&begin={}".format(begin))
+        fn = bytes(dt.strftime("rig_tower.%Y-%m-%d.ascii"), encoding="utf-8")
         assert fn in res.data
 
     def test_tower_daily_ascii_relative_json(self):
@@ -86,12 +89,12 @@ class TestFilesAPI(unittest.TestCase):
         two days of files.
         """
         dt = self._datetimes[1]
-        begin = dt.strftime('%Y-%m-%d')
-        res = self.app.get('/api/files.json?streams=aoss.tower.ascii.l00.*&begin=-2&end={}'.format(begin))
+        begin = dt.strftime("%Y-%m-%d")
+        res = self.app.get("/api/files.json?streams=aoss.tower.ascii.l00.*&begin=-2&end={}".format(begin))
         for dt in self._datetimes[1:]:
-            fn = bytes(dt.strftime('rig_tower.%Y-%m-%d.ascii'), encoding='utf-8')
+            fn = bytes(dt.strftime("rig_tower.%Y-%m-%d.ascii"), encoding="utf-8")
             assert fn in res.data
-        fn = bytes(self._datetimes[0].strftime('rig_tower.%Y-%m-%d.ascii'), encoding='utf-8')
+        fn = bytes(self._datetimes[0].strftime("rig_tower.%Y-%m-%d.ascii"), encoding="utf-8")
         assert fn not in res.data
 
     def test_tower_daily_ascii_both_relative_json(self):
@@ -100,43 +103,43 @@ class TestFilesAPI(unittest.TestCase):
         Begin and end are relative to today so begin=-2 and end=-2 should mean
         we only get 1 file for the day before yesterday.
         """
-        res = self.app.get('/api/files.json?streams=aoss.tower.ascii.l00.*&begin=-2&end=-2')
+        res = self.app.get("/api/files.json?streams=aoss.tower.ascii.l00.*&begin=-2&end=-2")
         for dt in self._datetimes[2:3]:
-            fn = bytes(dt.strftime('rig_tower.%Y-%m-%d.ascii'), encoding='utf-8')
+            fn = bytes(dt.strftime("rig_tower.%Y-%m-%d.ascii"), encoding="utf-8")
             assert fn in res.data
-        fn = bytes(self._datetimes[0].strftime('rig_tower.%Y-%m-%d.ascii'), encoding='utf-8')
+        fn = bytes(self._datetimes[0].strftime("rig_tower.%Y-%m-%d.ascii"), encoding="utf-8")
         assert fn not in res.data
 
     def test_tower_daily_ascii_relative_noend_json(self):
-        res = self.app.get('/api/files.json?streams=aoss.tower.ascii.l00.*&begin=-2')
+        res = self.app.get("/api/files.json?streams=aoss.tower.ascii.l00.*&begin=-2")
         for dt in self._datetimes:
-            fn = bytes(dt.strftime('rig_tower.%Y-%m-%d.ascii'), encoding='utf-8')
+            fn = bytes(dt.strftime("rig_tower.%Y-%m-%d.ascii"), encoding="utf-8")
             assert fn in res.data
         # fn = bytes(self._datetimes[0].strftime('rig_tower.%Y-%m-%d.ascii'), encoding='utf-8')
         # assert fn not in res.data
 
     def test_tower_all_patterns(self):
-        res = self.app.get('/api/files.json?streams=aoss.tower.*.l00.*')
-        res = json.loads(str(res.data, encoding='utf-8'))
-        fn = self._datetimes[0].strftime('rig_tower.%Y-%m-%d.ascii')
-        assert res['data'][0]['filename'] == fn
+        res = self.app.get("/api/files.json?streams=aoss.tower.*.l00.*")
+        res = json.loads(str(res.data, encoding="utf-8"))
+        fn = self._datetimes[0].strftime("rig_tower.%Y-%m-%d.ascii")
+        assert res["data"][0]["filename"] == fn
 
     def test_tower_multi_all_patterns(self):
-        res = self.app.get('/api/files.json?streams=aoss.tower.*.l00.*:aoss.tower.nc-daily.lb1.v00')
-        res = json.loads(str(res.data, encoding='utf-8'))
-        fn = self._datetimes[0].strftime('rig_tower.%Y-%m-%d.ascii')
-        assert res['data'][0]['filename'] == fn
-        fn = self._datetimes[0].strftime('aoss_tower.%Y-%m-%d.nc')
-        assert res['data'][1]['filename'] == fn
+        res = self.app.get("/api/files.json?streams=aoss.tower.*.l00.*:aoss.tower.nc-daily.lb1.v00")
+        res = json.loads(str(res.data, encoding="utf-8"))
+        fn = self._datetimes[0].strftime("rig_tower.%Y-%m-%d.ascii")
+        assert res["data"][0]["filename"] == fn
+        fn = self._datetimes[0].strftime("aoss_tower.%Y-%m-%d.nc")
+        assert res["data"][1]["filename"] == fn
 
     def test_tower_dates(self):
-        dates = tuple(dt.strftime('%Y-%m-%d') for dt in self._datetimes[::2])
-        res = self.app.get('/api/files.json?streams=aoss.tower.nc-daily.lb1.v00&dates={}:{}'.format(*dates))
-        res = json.loads(str(res.data, encoding='utf-8'))
-        fn = self._datetimes[0].strftime('aoss_tower.%Y-%m-%d.nc')
-        assert res['data'][0]['filename'] == fn
-        fn = self._datetimes[2].strftime('aoss_tower.%Y-%m-%d.nc')
-        assert res['data'][1]['filename'] == fn
+        dates = tuple(dt.strftime("%Y-%m-%d") for dt in self._datetimes[::2])
+        res = self.app.get("/api/files.json?streams=aoss.tower.nc-daily.lb1.v00&dates={}:{}".format(*dates))
+        res = json.loads(str(res.data, encoding="utf-8"))
+        fn = self._datetimes[0].strftime("aoss_tower.%Y-%m-%d.nc")
+        assert res["data"][0]["filename"] == fn
+        fn = self._datetimes[2].strftime("aoss_tower.%Y-%m-%d.nc")
+        assert res["data"][1]["filename"] == fn
 
 
 if __name__ == "__main__":
diff --git a/metobsapi/tests/test_misc.py b/metobsapi/tests/test_misc.py
index 1ba66fa..5b0b275 100644
--- a/metobsapi/tests/test_misc.py
+++ b/metobsapi/tests/test_misc.py
@@ -1,8 +1,7 @@
-import unittest
-import tempfile
-import shutil
 import json
-
+import shutil
+import tempfile
+import unittest
 
 # class TestErrorHandlers(unittest.TestCase):
 #     def setUp(self):
@@ -21,24 +20,27 @@ import json
 class TestIndex(unittest.TestCase):
     def setUp(self):
         import metobsapi
-        metobsapi.app.config['TESTING'] = True
-        metobsapi.app.config['DEBUG'] = True
+
+        metobsapi.app.config["TESTING"] = True
+        metobsapi.app.config["DEBUG"] = True
         self.app = metobsapi.app.test_client()
 
     def tearDown(self):
         pass
 
     def test_index(self):
-        res = self.app.get('/api/')
-        self.assertIn(b'Metobs API', res.data)
+        res = self.app.get("/api/")
+        self.assertIn(b"Metobs API", res.data)
 
 
 class TestArchiveInfo(unittest.TestCase):
     @classmethod
     def setUpClass(cls):
-        from metobsapi.util import create_fake_archive, file_responses
         from datetime import datetime, timedelta
-        cls.archive_dir = tempfile.mkdtemp(suffix='_metobsapi_files_test')
+
+        from metobsapi.util import create_fake_archive, file_responses
+
+        cls.archive_dir = tempfile.mkdtemp(suffix="_metobsapi_files_test")
         # need now for 'recent' queries
         now = datetime.utcnow()
         cls._datetimes = [now, now - timedelta(days=1), now - timedelta(days=2)]
@@ -52,19 +54,20 @@ class TestArchiveInfo(unittest.TestCase):
 
     def setUp(self):
         import metobsapi
-        metobsapi.app.config['TESTING'] = True
-        metobsapi.app.config['DEBUG'] = True
-        metobsapi.app.config['ARCHIVE_ROOT'] = self.archive_dir
+
+        metobsapi.app.config["TESTING"] = True
+        metobsapi.app.config["DEBUG"] = True
+        metobsapi.app.config["ARCHIVE_ROOT"] = self.archive_dir
         self.app = metobsapi.app.test_client()
 
     def tearDown(self):
         pass
 
     def test_archive_info(self):
-        res = self.app.get('/api/archive/info')
+        res = self.app.get("/api/archive/info")
         res = json.loads(res.data.decode())
-        self.assertEqual(res['code'], 200)
-        self.assertIn('sites', res)
+        self.assertEqual(res["code"], 200)
+        self.assertIn("sites", res)
 
 
 if __name__ == "__main__":
diff --git a/metobsapi/util/__init__.py b/metobsapi/util/__init__.py
index d6821ec..c64b90d 100644
--- a/metobsapi/util/__init__.py
+++ b/metobsapi/util/__init__.py
@@ -7,26 +7,26 @@ from datetime import datetime
 from enum import Enum
 
 SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
-FAKE_ARCHIVE_PATH = os.path.join(SCRIPT_DIR, '..', '..', 'fake_archive')
+FAKE_ARCHIVE_PATH = os.path.join(SCRIPT_DIR, "..", "..", "fake_archive")
 
 
 class ProductFrequency(Enum):
     # Single file in a YYYY/MM/ directory
-    DAILY_FILE = 'daily_file'
+    DAILY_FILE = "daily_file"
     # One or more files in a YYYY/MM/DD/ directory
-    DAILY_DIR = 'daily_dir'
+    DAILY_DIR = "daily_dir"
     # Single file in a YYYY/ directory
-    MONTHLY_FILE = 'monthly_file'
+    MONTHLY_FILE = "monthly_file"
     # One or more files in a YYYY/MM/ directory
-    MONTHLY_DIR = 'monthly_dir'
+    MONTHLY_DIR = "monthly_dir"
 
 
 # Directory format for the type of data file frequency
 FREQUENCY_DIR_FMT = {
-    ProductFrequency.DAILY_DIR: os.path.join('%Y', '%m', '%d'),
-    ProductFrequency.DAILY_FILE: os.path.join('%Y', '%m', '%d'),
-    ProductFrequency.MONTHLY_DIR: os.path.join('%Y', '%m'),
-    ProductFrequency.MONTHLY_FILE: os.path.join('%Y', '%m'),
+    ProductFrequency.DAILY_DIR: os.path.join("%Y", "%m", "%d"),
+    ProductFrequency.DAILY_FILE: os.path.join("%Y", "%m", "%d"),
+    ProductFrequency.MONTHLY_DIR: os.path.join("%Y", "%m"),
+    ProductFrequency.MONTHLY_FILE: os.path.join("%Y", "%m"),
 }
 
 
@@ -40,29 +40,28 @@ def create_fake_archive(archive_info, root=FAKE_ARCHIVE_PATH, datetimes=None):
     for site, inst_info in archive_info.items():
         os.makedirs(site, exist_ok=True)
         os.chdir(site)
-        for inst, inst_info in inst_info['instruments'].items():
+        for inst, inst_info in inst_info["instruments"].items():
             os.makedirs(inst, exist_ok=True)
             os.chdir(inst)
-            for level_name, level_info in inst_info['levels'].items():
+            for level_name, level_info in inst_info["levels"].items():
                 os.makedirs(level_name, exist_ok=True)
                 os.chdir(level_name)
-                for version_name in level_info['versions']:
+                for version_name in level_info["versions"]:
                     os.makedirs(version_name, exist_ok=True)
                     os.chdir(version_name)
                     for dt in datetimes:
-                        for pattern_name, pattern_info in level_info['products'].items():
-                            if pattern_info['frequency'] not in FREQUENCY_DIR_FMT:
-                                raise RuntimeError("Unknown frequency '%s'", pattern_info['frequency'])
+                        for pattern_name, pattern_info in level_info["products"].items():
+                            if pattern_info["frequency"] not in FREQUENCY_DIR_FMT:
+                                raise RuntimeError("Unknown frequency '%s'", pattern_info["frequency"])
 
-                            fmt = FREQUENCY_DIR_FMT[pattern_info['frequency']]
+                            fmt = FREQUENCY_DIR_FMT[pattern_info["frequency"]]
                             dated_dir = dt.strftime(fmt)
                             os.makedirs(dated_dir, exist_ok=True)
                             os.chdir(dated_dir)
-                            open(dt.strftime(pattern_info['pattern']), 'a').close()
-                            os.chdir('../' * (fmt.count(os.sep) + 1))
-                    os.chdir('..')
-                os.chdir('..')
-            os.chdir('..')
-        os.chdir('..')
+                            open(dt.strftime(pattern_info["pattern"]), "a").close()
+                            os.chdir("../" * (fmt.count(os.sep) + 1))
+                    os.chdir("..")
+                os.chdir("..")
+            os.chdir("..")
+        os.chdir("..")
     os.chdir(curr_dir)
-
diff --git a/metobsapi/util/data_responses.py b/metobsapi/util/data_responses.py
index 311305a..0805a46 100644
--- a/metobsapi/util/data_responses.py
+++ b/metobsapi/util/data_responses.py
@@ -1,57 +1,57 @@
 SYMBOL_TRANSLATIONS = {
-    ('aoss', 'tower'): {
-        'air_temp': 'air_temp',
-        'rel_hum': 'rel_hum',
-        'dewpoint': 'dewpoint',
-        'accum_precip': 'accum_precip',
-        'pressure': 'pressure',
-        'altimeter': 'altimeter',
-        'solar_flux': 'solar_flux',
-        'wind_speed': 'wind_speed',
+    ("aoss", "tower"): {
+        "air_temp": "air_temp",
+        "rel_hum": "rel_hum",
+        "dewpoint": "dewpoint",
+        "accum_precip": "accum_precip",
+        "pressure": "pressure",
+        "altimeter": "altimeter",
+        "solar_flux": "solar_flux",
+        "wind_speed": "wind_speed",
     },
-    ('mendota', 'buoy'): {
-        'air_temp': 'air_temp',
-        'dewpoint': 'dewpoint',
-        'rel_hum': 'rel_hum',
-        'wind_speed': 'wind_speed',
-        'compass': 'compass',
-        'uc_wind_dir': 'uc_wind_dir',
-        'run_wind_speed': 'run_wind_speed',
-        'gust': 'gust',
-        'TargmV': 'TargmV',
-        'SBTempC': 'SBTempC',
-        'water_skin_temp': 'water_skin_temp',
-        'water_temp_1': 'water_temp_1',
-        'water_temp_2': 'water_temp_2',
-        'water_temp_3': 'water_temp_3',
-        'water_temp_4': 'water_temp_4',
-        'water_temp_5': 'water_temp_5',
-        'water_temp_6': 'water_temp_6',
-        'water_temp_7': 'water_temp_7',
-        'water_temp_8': 'water_temp_8',
-        'water_temp_9': 'water_temp_9',
-        'water_temp_10': 'water_temp_10',
-        'water_temp_11': 'water_temp_11',
-        'water_temp_12': 'water_temp_12',
-        'water_temp_13': 'water_temp_13',
-        'water_temp_14': 'water_temp_14',
-        'water_temp_15': 'water_temp_15',
-        'water_temp_16': 'water_temp_16',
-        'water_temp_17': 'water_temp_17',
-        'water_temp_18': 'water_temp_18',
-        'water_temp_19': 'water_temp_19',
-        'water_temp_20': 'water_temp_20',
-        'water_temp_21': 'water_temp_21',
-        'water_temp_22': 'water_temp_22',
-        'water_temp_23': 'water_temp_23',
-        'doptotemp': 'doptotemp',
-        'doptosat': 'doptosat',
-        'doptoppm': 'doptoppm',
-        'chlorophyll': 'chlorophyll',
-        'phycocyanin': 'phycocyanin',
-        'pco2ppm_avg': 'pco2ppm_avg',
-        'par_above_avg': 'par_above_avg',
-        'par_below_avg': 'par_below_avg',
+    ("mendota", "buoy"): {
+        "air_temp": "air_temp",
+        "dewpoint": "dewpoint",
+        "rel_hum": "rel_hum",
+        "wind_speed": "wind_speed",
+        "compass": "compass",
+        "uc_wind_dir": "uc_wind_dir",
+        "run_wind_speed": "run_wind_speed",
+        "gust": "gust",
+        "TargmV": "TargmV",
+        "SBTempC": "SBTempC",
+        "water_skin_temp": "water_skin_temp",
+        "water_temp_1": "water_temp_1",
+        "water_temp_2": "water_temp_2",
+        "water_temp_3": "water_temp_3",
+        "water_temp_4": "water_temp_4",
+        "water_temp_5": "water_temp_5",
+        "water_temp_6": "water_temp_6",
+        "water_temp_7": "water_temp_7",
+        "water_temp_8": "water_temp_8",
+        "water_temp_9": "water_temp_9",
+        "water_temp_10": "water_temp_10",
+        "water_temp_11": "water_temp_11",
+        "water_temp_12": "water_temp_12",
+        "water_temp_13": "water_temp_13",
+        "water_temp_14": "water_temp_14",
+        "water_temp_15": "water_temp_15",
+        "water_temp_16": "water_temp_16",
+        "water_temp_17": "water_temp_17",
+        "water_temp_18": "water_temp_18",
+        "water_temp_19": "water_temp_19",
+        "water_temp_20": "water_temp_20",
+        "water_temp_21": "water_temp_21",
+        "water_temp_22": "water_temp_22",
+        "water_temp_23": "water_temp_23",
+        "doptotemp": "doptotemp",
+        "doptosat": "doptosat",
+        "doptoppm": "doptoppm",
+        "chlorophyll": "chlorophyll",
+        "phycocyanin": "phycocyanin",
+        "pco2ppm_avg": "pco2ppm_avg",
+        "par_above_avg": "par_above_avg",
+        "par_below_avg": "par_below_avg",
         # new starting in 2019
         "pco2volt_avg": "pco2volt_avg",
         "water_temp_2_5": "water_temp_2_5",  # 0.75m below surface (aka waterT)
@@ -68,19 +68,19 @@ SYMBOL_TRANSLATIONS = {
 }
 
 epoch_translation = {
-    'h': 'hours',
-    'm': 'minutes',
-    's': 'seconds',
-    'ms': 'milliseconds',
-    'u': 'microseconds',
-    'ns': 'nanoseconds'
+    "h": "hours",
+    "m": "minutes",
+    "s": "seconds",
+    "ms": "milliseconds",
+    "u": "microseconds",
+    "ns": "nanoseconds",
 }
 epoch_keys = epoch_translation.keys()
 
 INTERVALS = {
-    '1m': 60,
-    '5m': 60 * 5,
-    '1h': 60 * 60,
+    "1m": 60,
+    "5m": 60 * 5,
+    "1h": 60 * 60,
 }
 
 api_version_header = "X-Metobs-API-VERSION"
@@ -88,11 +88,16 @@ api_version = 1.0
 RESPONSES_LIMIT = 1000000
 
 ERROR_MESSAGES = {
-    'bad_order': (400, '\'order\' can only be \'column\' or \'row\' (default)'),
-    'bad_epoch': (400, '\'epoch\' can only be unspecified or {}'.format(", ".join(["\'{}\'".format(x) for x in epoch_keys]))),
-    'bad_interval': (400, '\'interval\' can only be unspecified or {}'.format(", ".join(["\'{}\'".format(x) for x in INTERVALS.keys()]))),
-    'malformed_timestamp': (400, 'could not parse timestamp parameters \'begin\' or \'end\', check format'),
-    'missing_symbols': (400, '\'symbols\' must be specified'),
-    'missing_site_inst': (400, '\'site\' and \'inst\' must both be specified or not at all'),
+    "bad_order": (400, "'order' can only be 'column' or 'row' (default)"),
+    "bad_epoch": (
+        400,
+        "'epoch' can only be unspecified or {}".format(", ".join(["'{}'".format(x) for x in epoch_keys])),
+    ),
+    "bad_interval": (
+        400,
+        "'interval' can only be unspecified or {}".format(", ".join(["'{}'".format(x) for x in INTERVALS.keys()])),
+    ),
+    "malformed_timestamp": (400, "could not parse timestamp parameters 'begin' or 'end', check format"),
+    "missing_symbols": (400, "'symbols' must be specified"),
+    "missing_site_inst": (400, "'site' and 'inst' must both be specified or not at all"),
 }
-
diff --git a/metobsapi/util/file_responses.py b/metobsapi/util/file_responses.py
index 56bf3df..5b2865c 100644
--- a/metobsapi/util/file_responses.py
+++ b/metobsapi/util/file_responses.py
@@ -1,6 +1,7 @@
 import os
 from collections import defaultdict
-from metobsapi.util import ProductFrequency, FREQUENCY_DIR_FMT
+
+from metobsapi.util import FREQUENCY_DIR_FMT, ProductFrequency
 
 # ARM Data Levels: https://www.arm.gov/policies/datapolicies/formatting-and-file-naming-protocols
 L00_DESCRIPTION = "raw data – primary raw data stream collected directly from instrument"
@@ -10,114 +11,114 @@ LB1_DESCRIPTION = "QC checks applied to measurements"
 
 # TODO: Load from config file
 ARCHIVE_INFO = {
-    'aoss': {
-        'display_name': 'AOSS',
-        'instruments': {
-            'tower': {
-                'display_name': 'Tower',
-                'levels': {
-                    'level_00': {
-                        'description': L00_DESCRIPTION,
-                        'versions': ('version_00',),
-                        'products': {
-                            'ascii': {
-                                'frequency': ProductFrequency.DAILY_FILE,
-                                'pattern': 'aoss_tower.%Y-%m-%d.ascii',
-                                'display_name': 'Daily CSV (rig_tower.YYYY-MM-DD.ascii)',
+    "aoss": {
+        "display_name": "AOSS",
+        "instruments": {
+            "tower": {
+                "display_name": "Tower",
+                "levels": {
+                    "level_00": {
+                        "description": L00_DESCRIPTION,
+                        "versions": ("version_00",),
+                        "products": {
+                            "ascii": {
+                                "frequency": ProductFrequency.DAILY_FILE,
+                                "pattern": "aoss_tower.%Y-%m-%d.ascii",
+                                "display_name": "Daily CSV (rig_tower.YYYY-MM-DD.ascii)",
                             },
                         },
                     },
-                    'level_b1': {
-                        'description': LB1_DESCRIPTION,
-                        'versions': ('version_00',),
-                        'products': {
+                    "level_b1": {
+                        "description": LB1_DESCRIPTION,
+                        "versions": ("version_00",),
+                        "products": {
                             # 'nc-monthly': {
                             #     'frequency': ProductFrequency.MONTHLY_DIR,
                             #     'pattern': 'aoss_tower.%Y-%m.nc',
                             #     'display_name': 'Monthly NetCDF file (aoss_tower.YYYY-MM.nc)',
                             # },
-                            'nc-daily': {
-                                'frequency': ProductFrequency.DAILY_FILE,
-                                'pattern': 'aoss_tower.%Y-%m-%d.nc',
-                                'display_name': 'Daily NetCDF file (aoss_tower.YYYY-MM-DD.nc)',
-                                'preview_product': 'meteorogram-daily',
+                            "nc-daily": {
+                                "frequency": ProductFrequency.DAILY_FILE,
+                                "pattern": "aoss_tower.%Y-%m-%d.nc",
+                                "display_name": "Daily NetCDF file (aoss_tower.YYYY-MM-DD.nc)",
+                                "preview_product": "meteorogram-daily",
                             },
-                            'meteorogram-daily': {
-                                'frequency': ProductFrequency.DAILY_FILE,
-                                'pattern': 'aoss_tower.meteorogram.%Y-%m-%d.png',
-                                'thumbnail_pattern': 'aoss_tower.meteorogram.%Y-%m-%d_thumbnail.png',
-                                'display_name': 'Daily Meteorogram (aoss_tower.meteorogram.YYYY-MM-DD.png)',
+                            "meteorogram-daily": {
+                                "frequency": ProductFrequency.DAILY_FILE,
+                                "pattern": "aoss_tower.meteorogram.%Y-%m-%d.png",
+                                "thumbnail_pattern": "aoss_tower.meteorogram.%Y-%m-%d_thumbnail.png",
+                                "display_name": "Daily Meteorogram (aoss_tower.meteorogram.YYYY-MM-DD.png)",
                             },
-                            'td-daily': {
-                                'frequency': ProductFrequency.DAILY_FILE,
-                                'pattern': 'aoss_tower.td.%Y-%m-%d.png',
-                                'thumbnail_pattern': 'aoss_tower.td.%Y-%m-%d_thumbnail.png',
-                                'display_name': 'Daily Air and Dewpoint Temperature (aoss_tower.td.YYYY-MM-DD.png)',
+                            "td-daily": {
+                                "frequency": ProductFrequency.DAILY_FILE,
+                                "pattern": "aoss_tower.td.%Y-%m-%d.png",
+                                "thumbnail_pattern": "aoss_tower.td.%Y-%m-%d_thumbnail.png",
+                                "display_name": "Daily Air and Dewpoint Temperature (aoss_tower.td.YYYY-MM-DD.png)",
                             },
-                            'pressure-daily': {
-                                'frequency': ProductFrequency.DAILY_FILE,
-                                'pattern': 'aoss_tower.pressure.%Y-%m-%d.png',
-                                'thumbnail_pattern': 'aoss_tower.pressure.%Y-%m-%d_thumbnail.png',
-                                'display_name': 'Daily Pressure (aoss_tower.pressure.YYYY-MM-DD.png)',
+                            "pressure-daily": {
+                                "frequency": ProductFrequency.DAILY_FILE,
+                                "pattern": "aoss_tower.pressure.%Y-%m-%d.png",
+                                "thumbnail_pattern": "aoss_tower.pressure.%Y-%m-%d_thumbnail.png",
+                                "display_name": "Daily Pressure (aoss_tower.pressure.YYYY-MM-DD.png)",
                             },
-                            'wind-speed-daily': {
-                                'frequency': ProductFrequency.DAILY_FILE,
-                                'pattern': 'aoss_tower.wind_speed.%Y-%m-%d.png',
-                                'thumbnail_pattern': 'aoss_tower.wind_speed.%Y-%m-%d_thumbnail.png',
-                                'display_name': 'Daily Wind Speed (aoss_tower.wind_speed.YYYY-MM-DD.png)',
+                            "wind-speed-daily": {
+                                "frequency": ProductFrequency.DAILY_FILE,
+                                "pattern": "aoss_tower.wind_speed.%Y-%m-%d.png",
+                                "thumbnail_pattern": "aoss_tower.wind_speed.%Y-%m-%d_thumbnail.png",
+                                "display_name": "Daily Wind Speed (aoss_tower.wind_speed.YYYY-MM-DD.png)",
                             },
-                            'wind-dir-daily': {
-                                'frequency': ProductFrequency.DAILY_FILE,
-                                'pattern': 'aoss_tower.wind_dir.%Y-%m-%d.png',
-                                'thumbnail_pattern': 'aoss_tower.wind_dir.%Y-%m-%d_thumbnail.png',
-                                'display_name': 'Daily Wind Direction (aoss_tower.wind_dir.YYYY-MM-DD.png)',
+                            "wind-dir-daily": {
+                                "frequency": ProductFrequency.DAILY_FILE,
+                                "pattern": "aoss_tower.wind_dir.%Y-%m-%d.png",
+                                "thumbnail_pattern": "aoss_tower.wind_dir.%Y-%m-%d_thumbnail.png",
+                                "display_name": "Daily Wind Direction (aoss_tower.wind_dir.YYYY-MM-DD.png)",
                             },
-                            'accum-precip-daily': {
-                                'frequency': ProductFrequency.DAILY_FILE,
-                                'pattern': 'aoss_tower.accum_precip.%Y-%m-%d.png',
-                                'thumbnail_pattern': 'aoss_tower.accum_precip.%Y-%m-%d_thumbnail.png',
-                                'display_name': 'Daily Accumulated Precipitation (aoss_tower.accum_precip.YYYY-MM-DD.png)',
+                            "accum-precip-daily": {
+                                "frequency": ProductFrequency.DAILY_FILE,
+                                "pattern": "aoss_tower.accum_precip.%Y-%m-%d.png",
+                                "thumbnail_pattern": "aoss_tower.accum_precip.%Y-%m-%d_thumbnail.png",
+                                "display_name": "Daily Accumulated Precipitation (aoss_tower.accum_precip.YYYY-MM-DD.png)",
                             },
-                            'solar-flux-daily': {
-                                'frequency': ProductFrequency.DAILY_FILE,
-                                'pattern': 'aoss_tower.solar_flux.%Y-%m-%d.png',
-                                'thumbnail_pattern': 'aoss_tower.solar_flux.%Y-%m-%d_thumbnail.png',
-                                'display_name': 'Daily Solar Flux (aoss_tower.solar_flux.YYYY-MM-DD.png)',
+                            "solar-flux-daily": {
+                                "frequency": ProductFrequency.DAILY_FILE,
+                                "pattern": "aoss_tower.solar_flux.%Y-%m-%d.png",
+                                "thumbnail_pattern": "aoss_tower.solar_flux.%Y-%m-%d_thumbnail.png",
+                                "display_name": "Daily Solar Flux (aoss_tower.solar_flux.YYYY-MM-DD.png)",
                             },
                         },
                     },
                 },
             },
-            'aeri': {
-                'display_name': 'AERI',
-                'levels': {
-                    'level_00': {
-                        'description': L00_DESCRIPTION,
-                        'versions': ('version_00',),
-                        'products': {
-                            'par': {
-                                'frequency': ProductFrequency.DAILY_DIR,
-                                'pattern': '%y%m%d.PAR',
+            "aeri": {
+                "display_name": "AERI",
+                "levels": {
+                    "level_00": {
+                        "description": L00_DESCRIPTION,
+                        "versions": ("version_00",),
+                        "products": {
+                            "par": {
+                                "frequency": ProductFrequency.DAILY_DIR,
+                                "pattern": "%y%m%d.PAR",
                             },
-                            'qc': {
-                                'frequency': ProductFrequency.DAILY_DIR,
-                                'pattern': '%y%m%d.QC',
+                            "qc": {
+                                "frequency": ProductFrequency.DAILY_DIR,
+                                "pattern": "%y%m%d.QC",
                             },
-                            'sum': {
-                                'frequency': ProductFrequency.DAILY_DIR,
-                                'pattern': '%y%m%d.SUM',
+                            "sum": {
+                                "frequency": ProductFrequency.DAILY_DIR,
+                                "pattern": "%y%m%d.SUM",
                             },
-                            'scr-aesitter': {
-                                'frequency': ProductFrequency.DAILY_DIR,
-                                'pattern': 'AESITTER.SCR',
+                            "scr-aesitter": {
+                                "frequency": ProductFrequency.DAILY_DIR,
+                                "pattern": "AESITTER.SCR",
                             },
-                            'scr-radiance': {
-                                'frequency': ProductFrequency.DAILY_DIR,
-                                'pattern': 'RADIANCE.SCR',
+                            "scr-radiance": {
+                                "frequency": ProductFrequency.DAILY_DIR,
+                                "pattern": "RADIANCE.SCR",
                             },
-                            'scr-summary': {
-                                'frequency': ProductFrequency.DAILY_DIR,
-                                'pattern': 'SUMMARY.SCR',
+                            "scr-summary": {
+                                "frequency": ProductFrequency.DAILY_DIR,
+                                "pattern": "SUMMARY.SCR",
                             },
                         },
                     },
@@ -125,33 +126,33 @@ ARCHIVE_INFO = {
             },
         },
     },
-    'mendota': {
-        'display_name': 'Mendota',
-        'instruments': {
-
-        },
+    "mendota": {
+        "display_name": "Mendota",
+        "instruments": {},
     },
 }
 # Add the other AERI file types
-for file_suffix in ('B1.CXS',
-                    'B1.UVS',
-                    'B2.CXS',
-                    'B2.UVS',
-                    'C1.RNC',
-                    'C2.RNC',
-                    'F1.CSV',
-                    'F1.CXS',
-                    'F1.UVS',
-                    'F2.CSV',
-                    'F2.CXS',
-                    'F2.UVS'):
-    parts = file_suffix.split('.')
-    product_id = parts[1].lower() + '-' + parts[0].lower()
+for file_suffix in (
+    "B1.CXS",
+    "B1.UVS",
+    "B2.CXS",
+    "B2.UVS",
+    "C1.RNC",
+    "C2.RNC",
+    "F1.CSV",
+    "F1.CXS",
+    "F1.UVS",
+    "F2.CSV",
+    "F2.CXS",
+    "F2.UVS",
+):
+    parts = file_suffix.split(".")
+    product_id = parts[1].lower() + "-" + parts[0].lower()
     nfo = {
-        'frequency': ProductFrequency.DAILY_DIR,
-        'pattern': '%y%m%d{}'.format(file_suffix),
+        "frequency": ProductFrequency.DAILY_DIR,
+        "pattern": "%y%m%d{}".format(file_suffix),
     }
-    ARCHIVE_INFO['aoss']['instruments']['aeri']['levels']['level_00']['products'][product_id] = nfo
+    ARCHIVE_INFO["aoss"]["instruments"]["aeri"]["levels"]["level_00"]["products"][product_id] = nfo
 
 # Create stream_id -> stream_info mapping
 ARCHIVE_STREAMS = {}
@@ -159,52 +160,56 @@ ARCHIVE_STREAMS = {}
 INSTRUMENT_STREAMS = defaultdict(list)
 stream_id_fmt = "{site}.{inst}.{product}.{level}.{version}"
 for site, site_info in ARCHIVE_INFO.items():
-    for inst, inst_info in site_info['instruments'].items():
-        inst_name = site + '.' + inst
-        for level, level_info in inst_info['levels'].items():
+    for inst, inst_info in site_info["instruments"].items():
+        inst_name = site + "." + inst
+        for level, level_info in inst_info["levels"].items():
             all_products_recent = []
-            for version in level_info['versions']:
+            for version in level_info["versions"]:
                 all_products = []
-                for product_id, pattern_info in level_info['products'].items():
+                for product_id, pattern_info in level_info["products"].items():
                     stream_id = stream_id_fmt.format(
                         site=site,
                         inst=inst,
-                        level=level.replace('level_', 'l'),
+                        level=level.replace("level_", "l"),
                         product=product_id,
-                        version=version.replace('version_', 'v'),
+                        version=version.replace("version_", "v"),
                     )
                     all_products.append(stream_id)
 
-                    path = os.path.join(site,
-                                        inst,
-                                        level,
-                                        version,
-                                        FREQUENCY_DIR_FMT[pattern_info['frequency']],
-                                        pattern_info['pattern'])
+                    path = os.path.join(
+                        site,
+                        inst,
+                        level,
+                        version,
+                        FREQUENCY_DIR_FMT[pattern_info["frequency"]],
+                        pattern_info["pattern"],
+                    )
 
                     stream_info = {
-                        'relpath': path,
-                        'site': site,
-                        'inst': inst,
-                        'level': level,
-                        'version': version,
-                        'file_pattern': pattern_info['pattern']
+                        "relpath": path,
+                        "site": site,
+                        "inst": inst,
+                        "level": level,
+                        "version": version,
+                        "file_pattern": pattern_info["pattern"],
                     }
-                    if 'thumbnail_pattern' in pattern_info:
-                        stream_info['thumbnail'] = path.replace(pattern_info['pattern'], pattern_info['thumbnail_pattern'])
+                    if "thumbnail_pattern" in pattern_info:
+                        stream_info["thumbnail"] = path.replace(
+                            pattern_info["pattern"], pattern_info["thumbnail_pattern"]
+                        )
                     else:
-                        stream_info['thumbnail'] = None
+                        stream_info["thumbnail"] = None
                     ARCHIVE_STREAMS[stream_id] = stream_info
                     INSTRUMENT_STREAMS[inst_name].append(stream_id)
 
                     # Special 'most recent' version stream_id
-                    if version == level_info['versions'][0]:
+                    if version == level_info["versions"][0]:
                         recent_stream_id = stream_id_fmt.format(
                             site=site,
                             inst=inst,
-                            level=level.replace('level_', 'l'),
+                            level=level.replace("level_", "l"),
                             product=product_id,
-                            version='*',
+                            version="*",
                         )
                         ARCHIVE_STREAMS[recent_stream_id] = ARCHIVE_STREAMS[stream_id]
                         INSTRUMENT_STREAMS[inst_name].append(recent_stream_id)
@@ -212,25 +217,24 @@ for site, site_info in ARCHIVE_INFO.items():
                 all_patterns_id = stream_id_fmt.format(
                     site=site,
                     inst=inst,
-                    level=level.replace('level_', 'l'),
-                    product='*',
-                    version=version.replace('version_', 'v'),
+                    level=level.replace("level_", "l"),
+                    product="*",
+                    version=version.replace("version_", "v"),
                 )
                 ARCHIVE_STREAMS[all_patterns_id] = all_products
                 INSTRUMENT_STREAMS[inst_name].append(all_patterns_id)
-                if version == level_info['versions'][0]:
-                    all_patterns_recent_id = all_patterns_id.replace(
-                        version.replace('version_', 'v'), '*')
+                if version == level_info["versions"][0]:
+                    all_patterns_recent_id = all_patterns_id.replace(version.replace("version_", "v"), "*")
                     ARCHIVE_STREAMS[all_patterns_recent_id] = all_products
                     INSTRUMENT_STREAMS[inst_name].append(all_patterns_recent_id)
 
 ERROR_MESSAGES = {
-    'datetime_error': (400, 'missing begin or end time parameters'),
-    'malformed_timestamp': (400, 'could not parse timestamp parameters \'begin\' or \'end\', check format'),
-    'missing_inst': (400, 'missing or unknown inst parameter'),
-    'missing_site': (400, 'missing or unknown site parameter'),
-    'missing_level': (400, 'missing or unknown level parameter'),
-    'missing_streams': (400, 'missing or unknown stream pattern parameter'),
-    'missing_version': (400, 'missing or unknown version parameter'),
-    'unknown_stream': (400, "unknown stream ID, expected 'site'.'inst'.'level=lXX'.'pattern'.'versionXX or \"*\"'"),
+    "datetime_error": (400, "missing begin or end time parameters"),
+    "malformed_timestamp": (400, "could not parse timestamp parameters 'begin' or 'end', check format"),
+    "missing_inst": (400, "missing or unknown inst parameter"),
+    "missing_site": (400, "missing or unknown site parameter"),
+    "missing_level": (400, "missing or unknown level parameter"),
+    "missing_streams": (400, "missing or unknown stream pattern parameter"),
+    "missing_version": (400, "missing or unknown version parameter"),
+    "unknown_stream": (400, "unknown stream ID, expected 'site'.'inst'.'level=lXX'.'pattern'.'versionXX or \"*\"'"),
 }
diff --git a/metobsapi/util/query_influx.py b/metobsapi/util/query_influx.py
index 25153cc..35c03ef 100644
--- a/metobsapi/util/query_influx.py
+++ b/metobsapi/util/query_influx.py
@@ -1,17 +1,18 @@
-from influxdb import InfluxDBClient
 from datetime import timedelta
+
 from flask import current_app
+from influxdb import InfluxDBClient
 
 QUERY_FORMAT = "SELECT {symbol_list} FROM metobs.forever.metobs_{interval} WHERE {where_clause} GROUP BY site,inst"
 
 
 def parse_dt(d):
     if d is None:
-        return 'now()'
+        return "now()"
     elif isinstance(d, timedelta):
-        return 'now() - {:d}s'.format(int(d.total_seconds()))
+        return "now() - {:d}s".format(int(d.total_seconds()))
     else:
-        return d.strftime('\'%Y-%m-%dT%H:%M:%SZ\'')
+        return d.strftime("'%Y-%m-%dT%H:%M:%SZ'")
 
 
 def build_queries(symbols, begin, end, value):
@@ -31,7 +32,7 @@ def build_queries(symbols, begin, end, value):
         wc.append("site='{}'".format(si[0]))
         wc.append("inst='{}'".format(si[1]))
         query = QUERY_FORMAT.format(
-            symbol_list=', '.join(s_list[1]),
+            symbol_list=", ".join(s_list[1]),
             interval=value,
             where_clause=" AND ".join(wc),
         )
@@ -42,9 +43,10 @@ def build_queries(symbols, begin, end, value):
 
 def query(query_str, epoch):
     client = InfluxDBClient(
-        current_app.config['INFLUXDB_HOST'],
-        current_app.config['INFLUXDB_PORT'],
-        current_app.config['INFLUXDB_USER'],
-        current_app.config['INFLUXDB_PASS'],
-        current_app.config['INFLUXDB_DB'])
+        current_app.config["INFLUXDB_HOST"],
+        current_app.config["INFLUXDB_PORT"],
+        current_app.config["INFLUXDB_USER"],
+        current_app.config["INFLUXDB_PASS"],
+        current_app.config["INFLUXDB_DB"],
+    )
     return client.query(query_str, epoch=epoch)
diff --git a/metobsapi_development.py b/metobsapi_development.py
index 985da47..f10d2d9 100644
--- a/metobsapi_development.py
+++ b/metobsapi_development.py
@@ -1,4 +1,5 @@
 import os
+
 from metobsapi.common_config import *
 from metobsapi.util import FAKE_ARCHIVE_PATH, create_fake_archive
 from metobsapi.util.file_responses import ARCHIVE_INFO
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..6e4a838
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,68 @@
+[build-system]
+requires = ["setuptools>=45", "wheel", "setuptools_scm[toml]>=6.2", 'setuptools_scm_git_archive']
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools_scm]
+write_to = "metobsapi/version.py"
+
+[tool.isort]
+sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
+profile = "black"
+skip_gitignore = true
+default_section = "THIRDPARTY"
+known_first_party = "metobsapi"
+line_length = 120
+skip = [".gitignore", "metobsapi/version.py"]
+
+[tool.black]
+line-length = 120
+exclude = '''
+(
+  \.git
+  | build
+  | dist
+  | metobsapi/version\.py
+)
+
+'''
+
+[tool.mypy]
+python_version = "3.10"
+
+[project]
+name = "metobsapi"
+authors = [
+    {name = "David Hoese", email = "david.hoese@ssec.wisc.edu"},
+]
+description = "Flask application for serving RAIN Meteorology Observations"
+readme = "README.md"
+keywords = ["geosphere", "satellite", "cspp", "ssec", "cimss"]
+license = {text = "MIT"}
+classifiers = [
+    "Framework :: Flask",
+    "Programming Language :: Python :: 3",
+    "Development Status :: 3 - Alpha",
+    "Intended Audience :: Science/Research",
+    "License :: OSI Approved :: MIT License",
+    "Operating System :: OS Independent",
+    "Programming Language :: Python",
+    "Topic :: Scientific/Engineering",
+]
+requires-python = ">=3.10"
+dependencies = [
+    "flask",
+    "influxdb-client",
+    "pandas",
+    "flask_json",
+    "flask_cors",
+]
+dynamic = ["version"]
+
+[project.optional-dependencies]
+tests = [
+    "pytest",
+]
+
+[tool.setuptools]
+packages = ["metobsapi"]
+
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..03adec3
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,9 @@
+[flake8]
+max-line-length = 120
+ignore = D101,D102,D103,D104,D106,D107,W503,E203,B008
+
+[coverage:run]
+relative_files = True
+omit =
+    metobsapi/version.py
+
diff --git a/setup.py b/setup.py
deleted file mode 100644
index dc9da17..0000000
--- a/setup.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from setuptools import setup, find_packages
-
-requires = [
-    'flask',
-    'influxdb',
-    'pandas',
-    'flask_json',
-    'flask_cors',
-    ]
-
-setup(name='metobsapi',
-      version='0.1',
-      description='MetObsApi',
-      author='David Hoese',
-      author_email='david.hoese@ssec.wisc.edu',
-      url='http://metobs.ssec.wisc.edu/',
-      packages=find_packages(),
-      include_package_data=True,
-      zip_safe=False,
-      install_requires=requires,
-      tests_require=requires,
-      )
\ No newline at end of file
-- 
GitLab