diff --git a/.bandit b/.bandit
new file mode 100644
index 0000000000000000000000000000000000000000..9ccd9289dbe7ad46fbb3d859512332cc1359ae02
--- /dev/null
+++ b/.bandit
@@ -0,0 +1,3 @@
+[bandit]
+skips: B506
+exclude: metobsapi/tests
diff --git a/.git_archival.txt b/.git_archival.txt
new file mode 100644
index 0000000000000000000000000000000000000000..95cb3eea4e336c70fb50a38b6e59771f4e76649c
--- /dev/null
+++ b/.git_archival.txt
@@ -0,0 +1 @@
+ref-names: $Format:%D$
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000000000000000000000000000000000000..00a7b00c94e08b86c765d47689b6523148c46eec
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1 @@
+.git_archival.txt  export-subst
diff --git a/.gitignore b/.gitignore
index 3b9cbf5865ffe95846d7396358ed7fded7ac02d3..20f654f0a3933451a6aef02d532a4c3667764016 100644
--- a/.gitignore
+++ b/.gitignore
@@ -144,3 +144,5 @@ com_crashlytics_export_strings.xml
 crashlytics.properties
 crashlytics-build.properties
 fabric.properties
+
+aosstower/version.py
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..847932ae7b467d3c25e73fce2f1433f80e538d75
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,54 @@
+exclude: '^$'
+fail_fast: false
+repos:
+  - repo: https://github.com/psf/black
+    rev: 23.1.0
+    hooks:
+      - id: black
+        language_version: python3
+        args:
+          - --target-version=py310
+  - repo: https://github.com/pycqa/isort
+    rev: 5.12.0
+    hooks:
+      - id: isort
+        language_version: python3
+  - repo: https://github.com/charliermarsh/ruff-pre-commit
+    # Ruff version.
+    rev: 'v0.0.254'
+    hooks:
+      - id: ruff
+#  - repo: https://github.com/PyCQA/flake8
+#    rev: 6.0.0
+#    hooks:
+#      - id: flake8
+#        additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear, mccabe]
+#        args: [--max-complexity, "10"]
+  - repo: https://github.com/pre-commit/pre-commit-hooks
+    rev: v4.4.0
+    hooks:
+      - id: trailing-whitespace
+      - id: end-of-file-fixer
+      - id: check-yaml
+  - repo: https://github.com/scop/pre-commit-shfmt
+    rev: v3.6.0-1
+    hooks:
+      # Choose one of:
+      - id: shfmt         # native (requires Go to build)
+        args: ["-i", "4"]
+      #- id: shfmt-docker  # Docker image (requires Docker to run)
+      #
+#  - repo: https://github.com/PyCQA/bandit
+#    rev: '1.7.4'
+#    hooks:
+#      - id: bandit
+#        args: [--ini, .bandit]
+  - repo: https://github.com/pre-commit/mirrors-mypy
+    rev: 'v1.0.1'  # Use the sha / tag you want to point at
+    hooks:
+      - id: mypy
+        additional_dependencies:
+          - types-docutils
+          - types-pkg-resources
+          - types-PyYAML
+          - types-requests
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000000000000000000000000000000000000..bf4d7327646e3d01307f6b8d45d9c7a1f399d486
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,19 @@
+Copyright (c) 2023 SSEC Developers
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..396f1ada91ac93ead740a6f4548e49fd55f9afce
--- /dev/null
+++ b/README.md
@@ -0,0 +1 @@
+# AOSS Tower
diff --git a/aosstower/frame.py b/aosstower/frame.py
index b2b837988a40c4c0822b28802b03ae186dcda6ed..d93865eae923f0f56cd903bd55063ce3a7c0a742 100644
--- a/aosstower/frame.py
+++ b/aosstower/frame.py
@@ -4,7 +4,6 @@ from aosstower import station
 
 
 class Frame(MutableMapping):
-
     def __init__(self, width=station.DATA_INTERVAL):
         self._data = {}
         self.width = width
diff --git a/aosstower/level_00/influxdb.py b/aosstower/level_00/influxdb.py
index 9771149b0cbf593f1d6d7772837f85e08f996b10..8b5177c54fa53b92cbbf34cde73cbca4c81b576b 100644
--- a/aosstower/level_00/influxdb.py
+++ b/aosstower/level_00/influxdb.py
@@ -1,22 +1,20 @@
 #!/usr/bin/env python
-# encoding: utf8
-"""Insert Tower data in to an InfluxDB for real time use.
-
-"""
+"""Insert Tower data in to an InfluxDB for real time use."""
 import logging
 import logging.handlers
-import time
 import sys
-import requests
+import time
 from datetime import timedelta
 from urllib.parse import urlencode
+
+import numpy as np
+import pandas as pd
+import requests
 from metobscommon import influxdb
-from aosstower.level_00.parser import read_frames
 from metobscommon.util import calc
 from metobscommon.util.nc import calculate_wind_gust
-import numpy as np
-import pandas as pd
-import warnings
+
+from aosstower.level_00.parser import read_frames
 
 LOG = logging.getLogger(__name__)
 # map station name to InfluxDB tags
@@ -25,37 +23,37 @@ STATIONS = {
 }
 # parser symbols to influxdb symbols
 SYMBOL_CONVERSIONS = {
-    'stamp': 'timestamp',
-    'box_temp': 'box_temp',
-    'box_pressure': 'box_pressure',
-    'paro_air_temp_period': 'paro_air_temp_period',
-    'paro_pressure_period': 'paro_pressure_period',
-    'paro_air_temp': 'paro_air_temp',
-    'pressure': 'pressure',
-    'paro_cal_sig': 'paro_cal_sig',
-    'box_rh': 'box_rh',
-    'box_air_temp': 'box_air_temp',
-    'air_temp_2': 'air_temp_2',
-    'air_temp_3': 'air_temp_3',
-    'air_temp_4': 'air_temp_4',
-    'air_temp_5': 'air_temp_5',
-    'wind_speed': 'wind_speed',
-    'wind_dir': 'wind_dir',
-    'rh_shield_freq': 'rh_shield_freq',
-    'rh': 'rel_hum',
-    'air_temp_6_3m': 'air_temp_6_3m',
-    'dewpoint': 'dewpoint',
-    'rtd_shield_freq': 'rtd_shied_freq',
-    'air_temp': 'air_temp',
-    'solar_flux': 'solar_flux',
-    'precip': 'precip',
-    'accum_precip': 'accum_precip',
-    'altimeter': 'altimeter',
+    "stamp": "timestamp",
+    "box_temp": "box_temp",
+    "box_pressure": "box_pressure",
+    "paro_air_temp_period": "paro_air_temp_period",
+    "paro_pressure_period": "paro_pressure_period",
+    "paro_air_temp": "paro_air_temp",
+    "pressure": "pressure",
+    "paro_cal_sig": "paro_cal_sig",
+    "box_rh": "box_rh",
+    "box_air_temp": "box_air_temp",
+    "air_temp_2": "air_temp_2",
+    "air_temp_3": "air_temp_3",
+    "air_temp_4": "air_temp_4",
+    "air_temp_5": "air_temp_5",
+    "wind_speed": "wind_speed",
+    "wind_dir": "wind_dir",
+    "rh_shield_freq": "rh_shield_freq",
+    "rh": "rel_hum",
+    "air_temp_6_3m": "air_temp_6_3m",
+    "dewpoint": "dewpoint",
+    "rtd_shield_freq": "rtd_shied_freq",
+    "air_temp": "air_temp",
+    "solar_flux": "solar_flux",
+    "precip": "precip",
+    "accum_precip": "accum_precip",
+    "altimeter": "altimeter",
 }
 SYMBOLS = list(SYMBOL_CONVERSIONS.values())
 
 
-class Updater(object):
+class Updater:
     """Append weather record (taken as a dict) and do averages when enough data is ready.
 
     At least 12 minutes of data is required to do averaging for gust_10m: 10 minutes of wind gusts,
@@ -64,15 +62,16 @@ class Updater(object):
     This class is created once at startup and calls rolling_average every time new data is available which tries to do
     averaging every submit_interval of data added.
     """
+
     def __init__(self, data_interval=timedelta(seconds=5), submit_interval=timedelta(minutes=5)):
         """intervals are timedelta objects."""
-        self.data = {'timestamp': np.array([])}
+        self.data = {"timestamp": np.array([])}
         self.data_interval = data_interval.total_seconds()
         self.submit_interval = submit_interval.total_seconds()
 
     def rolling_average(self, record):
         # Keeps data within 12 minutes.
-        time_mask = self.data['timestamp'] > record['timestamp'] - timedelta(minutes=12)
+        time_mask = self.data["timestamp"] > record["timestamp"] - timedelta(minutes=12)
         # Appending to a DataFrame is slow. Instead, this adds to a np array in chunks and passes it to the DataFrame.
         for key in record:
             if self.data.get(key) is None:
@@ -85,43 +84,45 @@ class Updater(object):
         # current minute was 13 and the submit interval was 5 minutes, then data would be submitted at 18, 23, 28,
         # etc. If data collection went down and the next recorded current minute was 14, then data would be submitted
         # at 19, 24, 29, etc (inconsistent).
-        reference = pd.datetime(record['timestamp'].year, 1, 1)
-        progress = (record['timestamp'] - reference).total_seconds() % self.submit_interval
+        reference = pd.datetime(record["timestamp"].year, 1, 1)
+        progress = (record["timestamp"] - reference).total_seconds() % self.submit_interval
         # If data hits or will pass over a submit_interval interval, return data.
         if progress == 0 or progress > self.submit_interval - self.data_interval:
             return self._calculate_averages()
+        return None
 
     def _calculate_averages(self):
-        frame = pd.DataFrame(self.data).set_index('timestamp')
-        frame = frame.mask(frame == -99999.)
+        frame = pd.DataFrame(self.data).set_index("timestamp")
+        frame = frame.mask(frame == -99999.0)
         # Add wind direction components so we can average wind direction properly.
-        frame['wind_east'], frame['wind_north'], _ = calc.wind_vector_components(frame['wind_speed'],
-                                                                                 frame['wind_dir'])
-        frame['wind_dir'] = calc.wind_vector_degrees(frame['wind_east'], frame['wind_north'])
+        frame["wind_east"], frame["wind_north"], _ = calc.wind_vector_components(frame["wind_speed"], frame["wind_dir"])
+        frame["wind_dir"] = calc.wind_vector_degrees(frame["wind_east"], frame["wind_north"])
 
-        if 'air_temp' in frame and 'rh' in frame and ('dewpoint' in frame or 'dewpoint_mean' in frame):
-            LOG.info("'dewpoint' is missing from the input file, will calculate "
-                     "it from air temp and relative humidity")
-            frame['dewpoint'] = calc.dewpoint(frame['air_temp'], frame['rh'])
+        if "air_temp" in frame and "rh" in frame and ("dewpoint" in frame or "dewpoint_mean" in frame):
+            LOG.info(
+                "'dewpoint' is missing from the input file, will calculate " "it from air temp and relative humidity",
+            )
+            frame["dewpoint"] = calc.dewpoint(frame["air_temp"], frame["rh"])
 
         # https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases
         # 2 minute rolling average.
-        winds_frame_2m = frame[['wind_speed', 'wind_east', 'wind_north']].rolling('2T', closed='right').mean()
+        winds_frame_2m = frame[["wind_speed", "wind_east", "wind_north"]].rolling("2T", closed="right").mean()
         # Makes 2 minute averages nans if given less than 2 minutes of data.
         if len(frame[frame.index > frame.index[-1] - timedelta(minutes=2)]) < 120 / self.data_interval:
-            winds_frame_2m = winds_frame_2m.mask(winds_frame_2m['wind_speed'] > -1)
-        frame['wind_speed_2m'] = winds_frame_2m['wind_speed']
-        frame['wind_dir_2m'] = calc.wind_vector_degrees(winds_frame_2m['wind_east'], winds_frame_2m['wind_north'])
+            winds_frame_2m = winds_frame_2m.mask(winds_frame_2m["wind_speed"] > -1)
+        frame["wind_speed_2m"] = winds_frame_2m["wind_speed"]
+        frame["wind_dir_2m"] = calc.wind_vector_degrees(winds_frame_2m["wind_east"], winds_frame_2m["wind_north"])
         # 1 minute rolling peaks
-        wind_peak_1m = frame['wind_speed'].rolling(window='1T', closed='right').max()
+        wind_peak_1m = frame["wind_speed"].rolling(window="1T", closed="right").max()
         # criteria for a fast wind to be considered a wind gust. Note that it needs winds_frame_2m['wind_speed'].
-        gust_mask = (winds_frame_2m['wind_speed'] >= calc.knots_to_mps(9.)) &\
-                    (wind_peak_1m >= winds_frame_2m['wind_speed'] + calc.knots_to_mps(5.))
-        frame['gust_1m'] = wind_peak_1m.mask(~gust_mask)
-        frame['gust_10m'] = calculate_wind_gust(frame['wind_speed'], winds_frame_2m['wind_speed'])
+        gust_mask = (winds_frame_2m["wind_speed"] >= calc.knots_to_mps(9.0)) & (
+            wind_peak_1m >= winds_frame_2m["wind_speed"] + calc.knots_to_mps(5.0)
+        )
+        frame["gust_1m"] = wind_peak_1m.mask(~gust_mask)
+        frame["gust_10m"] = calculate_wind_gust(frame["wind_speed"], winds_frame_2m["wind_speed"])
         # Makes 10 minute gusts before 12 minutes nans because data is insufficient.
         if len(frame) < 720 / self.data_interval:
-            frame['gust_10m'] = frame['gust_10m'].mask(frame['gust_10m'] > -1.)
+            frame["gust_10m"] = frame["gust_10m"].mask(frame["gust_10m"] > -1.0)
         return frame.fillna(value=np.nan)
 
 
@@ -138,9 +139,9 @@ def construct_url(data):
     # Sends null data as empty string to show that recording worked, but that the data is nonexistent.
     for key, val in data.items():
         if val is None or isinstance(val, float) and np.isnan(val):
-            data[key] = ''
+            data[key] = ""
     # Makes url be "url escaped".
-    return 'http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?' + urlencode(data)
+    return "http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?" + urlencode(data)
 
 
 def get_url_data(avg, wu_id, wu_pw):
@@ -149,67 +150,105 @@ def get_url_data(avg, wu_id, wu_pw):
     # Previously at:
     # https://feedback.weather.com/customer/en/portal/articles/2924682-pws-upload-protocol?b_id=17298
     timestamp = avg.index[-1]
-    wind_dir = avg['wind_dir'][-1]
-    wind_dir_2m = avg['wind_dir_2m'][-1]
-    rel_hum = avg['rel_hum'][-1]
-    solar_flux = avg['solar_flux'][-1]
-    precip = avg['precip'][-1]
-    accum_precip = avg['accum_precip'][-1]
+    wind_dir = avg["wind_dir"][-1]
+    wind_dir_2m = avg["wind_dir_2m"][-1]
+    rel_hum = avg["rel_hum"][-1]
+    solar_flux = avg["solar_flux"][-1]
+    precip = avg["precip"][-1]
+    accum_precip = avg["accum_precip"][-1]
     # Converts from m/s to mph.
-    wind_speed = avg['wind_speed'][-1] * 2.23694
-    wind_speed_2m = avg['wind_speed_2m'][-1] * 2.23694
-    gust_1m = avg['gust_1m'][-1] * 2.23694
-    gust_10m = avg['gust_10m'][-1] * 2.23694
+    wind_speed = avg["wind_speed"][-1] * 2.23694
+    wind_speed_2m = avg["wind_speed_2m"][-1] * 2.23694
+    gust_1m = avg["gust_1m"][-1] * 2.23694
+    gust_10m = avg["gust_10m"][-1] * 2.23694
     # Converts degrees Celsius to degrees Fahrenheit
-    air_temp = avg['air_temp'][-1] * 9. / 5. + 32.
-    dewpoint = avg['dewpoint'][-1] * 9. / 5. + 32.
+    air_temp = avg["air_temp"][-1] * 9.0 / 5.0 + 32.0
+    dewpoint = avg["dewpoint"][-1] * 9.0 / 5.0 + 32.0
     # hpa to barometric pressure inches
-    pressure = avg['pressure'][-1] * 0.02952998016471232
-    return {'ID': wu_id, 'PASSWORD': wu_pw, 'dateutc': timestamp, 'winddir': wind_dir, 'winddir_avg2m': wind_dir_2m,
-            'windspeedmph': wind_speed, 'windspdmph_avg2m': wind_speed_2m, 'windgustmph': gust_1m,
-            'windgustmph_10m': gust_10m, 'humidity': rel_hum, 'tempf': air_temp, 'baromin': pressure,
-            'dewptf': dewpoint, 'solarradiation': solar_flux, 'rainin': precip, 'dailyrainin': accum_precip,
-            'softwaretype': 'SSEC-RIG', 'action': 'updateraw'}
+    pressure = avg["pressure"][-1] * 0.02952998016471232
+    return {
+        "ID": wu_id,
+        "PASSWORD": wu_pw,
+        "dateutc": timestamp,
+        "winddir": wind_dir,
+        "winddir_avg2m": wind_dir_2m,
+        "windspeedmph": wind_speed,
+        "windspdmph_avg2m": wind_speed_2m,
+        "windgustmph": gust_1m,
+        "windgustmph_10m": gust_10m,
+        "humidity": rel_hum,
+        "tempf": air_temp,
+        "baromin": pressure,
+        "dewptf": dewpoint,
+        "solarradiation": solar_flux,
+        "rainin": precip,
+        "dailyrainin": accum_precip,
+        "softwaretype": "SSEC-RIG",
+        "action": "updateraw",
+    }
 
 
 def main():
     import argparse
+
     parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
-    parser.add_argument('--logfn', help='Log to rotating file (Not Implemented)')
-    parser.add_argument('--debug', action='store_true',
-                        help='Don\'t submit records to the database, print them to stdout')
-    parser.add_argument('-t', '--tail', action='store_true',
-                        help=('Tail file forever, not returning. This will start at the end '
-                              'of the file and insert any new data added after starting'))
-    parser.add_argument('--ldmp', action='store_true',
-                        help='Treat `src` file as a station name and read records from'
-                             'LoggerNet LDMP server (port: 1024)')
-    parser.add_argument('--tables', nargs='*', default=['1'],
-                        help="LoggerNet LDMP tables to read in")
-    parser.add_argument("--host", default=influxdb.DB_HOST,
-                        help="Hostname of database connection")
-    parser.add_argument("--port", default=influxdb.DB_PORT,
-                        help="Port of database connection")
-    parser.add_argument("--dbname", default=influxdb.DB_NAME,
-                        help="Name of database to modify")
-    parser.add_argument('-s', '--station', dest='station', default='AOSS Tower', choices=STATIONS.keys(),
-                        help='Name of station to use to determine symbols')
-    parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0,
-                        help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG')
-    parser.add_argument('--sleep-interval', type=float,
-                        help="Seconds to wait between submitting each record")
-    parser.add_argument('--weather-underground', action='store_true',
-                        help="Send new records to wunderground.com")
-    parser.add_argument('--wu-id', default='KWIMADIS52',
-                        help='Weather underground station ID')
-    parser.add_argument('--wu-password-file', default='/home/metobs/wunderground_password.txt',
-                        help='File containing the password for the weather underground upload')
-    parser.add_argument('--bulk', type=int, default=1,
-                        help="Number of records to buffer before sending to "
-                             "the database. For large inserts this should be "
-                             "between 5000 to 10000. Default: 1")
-    parser.add_argument('src', help='Level 0 raw data file or station name '
-                                    'for LDMP reading')
+    parser.add_argument("--logfn", help="Log to rotating file (Not Implemented)")
+    parser.add_argument(
+        "--debug",
+        action="store_true",
+        help="Don't submit records to the database, print them to stdout",
+    )
+    parser.add_argument(
+        "-t",
+        "--tail",
+        action="store_true",
+        help=(
+            "Tail file forever, not returning. This will start at the end "
+            "of the file and insert any new data added after starting"
+        ),
+    )
+    parser.add_argument(
+        "--ldmp",
+        action="store_true",
+        help="Treat `src` file as a station name and read records from" "LoggerNet LDMP server (port: 1024)",
+    )
+    parser.add_argument("--tables", nargs="*", default=["1"], help="LoggerNet LDMP tables to read in")
+    parser.add_argument("--host", default=influxdb.DB_HOST, help="Hostname of database connection")
+    parser.add_argument("--port", default=influxdb.DB_PORT, help="Port of database connection")
+    parser.add_argument("--dbname", default=influxdb.DB_NAME, help="Name of database to modify")
+    parser.add_argument(
+        "-s",
+        "--station",
+        dest="station",
+        default="AOSS Tower",
+        choices=STATIONS.keys(),
+        help="Name of station to use to determine symbols",
+    )
+    parser.add_argument(
+        "-v",
+        "--verbose",
+        dest="verbosity",
+        action="count",
+        default=0,
+        help="each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG",
+    )
+    parser.add_argument("--sleep-interval", type=float, help="Seconds to wait between submitting each record")
+    parser.add_argument("--weather-underground", action="store_true", help="Send new records to wunderground.com")
+    parser.add_argument("--wu-id", default="KWIMADIS52", help="Weather underground station ID")
+    parser.add_argument(
+        "--wu-password-file",
+        default="/home/metobs/wunderground_password.txt",
+        help="File containing the password for the weather underground upload",
+    )
+    parser.add_argument(
+        "--bulk",
+        type=int,
+        default=1,
+        help="Number of records to buffer before sending to "
+        "the database. For large inserts this should be "
+        "between 5000 to 10000. Default: 1",
+    )
+    parser.add_argument("src", help="Level 0 raw data file or station name " "for LDMP reading")
 
     args = parser.parse_args()
     levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
@@ -222,13 +261,14 @@ def main():
 
     wu_pw = None
     if args.weather_underground:
-        wu_pw = open(args.wu_password_file, 'r').read().strip()
+        wu_pw = open(args.wu_password_file).read().strip()
 
     if args.ldmp:
         from aosstower.level_00.parser import LDMPGenerator
+
         record_gen = LDMPGenerator(args.src, args.tables)
     else:
-        src = open(args.src, "r")
+        src = open(args.src)
         record_gen = read_frames(src, tail=args.tail)
 
     try:
@@ -251,8 +291,13 @@ def main():
                     try:
                         resp = requests.post(url, timeout=15)
                         if resp.status_code != 200:
-                            LOG.warning('Data failed to upload to {0} with status code {1}: {2}'.format(
-                                url, resp.status_code, resp.text))
+                            LOG.warning(
+                                "Data failed to upload to {} with status code {}: {}".format(
+                                    url,
+                                    resp.status_code,
+                                    resp.text,
+                                ),
+                            )
                         else:
                             LOG.info("Upload successful")
                     except requests.Timeout:
@@ -263,7 +308,7 @@ def main():
             if args.sleep_interval:
                 time.sleep(args.sleep_interval)
     except (RuntimeError, ValueError, KeyError, requests.RequestException):
-        if hasattr(record_gen, 'close'):
+        if hasattr(record_gen, "close"):
             record_gen.close()
 
 
diff --git a/aosstower/level_00/legacy_db_insert.py b/aosstower/level_00/legacy_db_insert.py
index d6b62d20718d07d8155cddfa2c05cf0b33eb9eec..aca2246efe4f6b21ea4e7d0b2c55a40aa5b6c224 100644
--- a/aosstower/level_00/legacy_db_insert.py
+++ b/aosstower/level_00/legacy_db_insert.py
@@ -10,43 +10,45 @@ to make sure errors are logged:
 import logging
 import logging.handlers
 import sys
+
 from metobscommon import legacy_db
+
 from aosstower.level_00.parser import read_frames
 
-logging.addLevelName(9, 'TRACE')
+logging.addLevelName(9, "TRACE")
 LOG = logging.getLogger(__name__)
-ERR = logging.getLogger(__name__ + '.error')
+ERR = logging.getLogger(__name__ + ".error")
 
 
 _symbol_record_map = (
-    (1, 'station'),
-    (2, 'year'),
-    (3, 'day'),
-    (4, 'hour_minute'),
-    (5, 'seconds'),
-    (6, 'box_pressure'),
-    (7, 'parosci_air_temp_period'),
-    (8, 'parosci_pressure_period'),
-    (9, 'parosci_air_temp'),
-    (10, 'pressure'),
-    (11, 'parosci_calc_sig'),
-    (12, 'box_rh'),
-    (13, 'box_air_temp'),
-    (14, 'temp2'),
-    (15, 'temp3'),
-    (16, 'temp4'),
-    (17, 'wind_speed'),
-    (18, 'wind_direction'),
-    (19, 'rh_shield_freq'),
-    (20, 'relative_humidity'),
-    (21, 'air_temp_6_3'),
-    (22, 'dewpoint'),
-    (23, 'rtd_shield_freq'),
-    (24, 'air_temp'),
-    (25, 'solar_flux'),
-    (26, 'precipitation'),
-    (27, 'accumulated_precipitation'),
-    (28, 'altimeter'),
+    (1, "station"),
+    (2, "year"),
+    (3, "day"),
+    (4, "hour_minute"),
+    (5, "seconds"),
+    (6, "box_pressure"),
+    (7, "parosci_air_temp_period"),
+    (8, "parosci_pressure_period"),
+    (9, "parosci_air_temp"),
+    (10, "pressure"),
+    (11, "parosci_calc_sig"),
+    (12, "box_rh"),
+    (13, "box_air_temp"),
+    (14, "temp2"),
+    (15, "temp3"),
+    (16, "temp4"),
+    (17, "wind_speed"),
+    (18, "wind_direction"),
+    (19, "rh_shield_freq"),
+    (20, "relative_humidity"),
+    (21, "air_temp_6_3"),
+    (22, "dewpoint"),
+    (23, "rtd_shield_freq"),
+    (24, "air_temp"),
+    (25, "solar_flux"),
+    (26, "precipitation"),
+    (27, "accumulated_precipitation"),
+    (28, "altimeter"),
 )
 
 
@@ -62,32 +64,44 @@ def configure_logging(level, logfn=None):
     ERR.setLevel(logging.INFO)
 
     if logfn:
-        maxsize = (20 * 1024 ** 3)  # 20 MB
-        rotating_file = logging.handlers.RotatingFileHandler(
-            logfn, mode='a', maxBytes=maxsize,
-            backupCount=5)
-        rotating_file.setFormatter(logging.Formatter('%(asctime)s: %(message)s'))
+        maxsize = 20 * 1024**3  # 20 MB
+        rotating_file = logging.handlers.RotatingFileHandler(logfn, mode="a", maxBytes=maxsize, backupCount=5)
+        rotating_file.setFormatter(logging.Formatter("%(asctime)s: %(message)s"))
         LOG.addHandler(rotating_file)
 
 
 def main():
     import argparse
-    parser = argparse.ArgumentParser(description=__doc__,
-                                     formatter_class=argparse.ArgumentDefaultsHelpFormatter)
-    parser.add_argument('--loglvl', dest='loglvl', default='warn',
-                        help='logging level',
-                        choices=(['trace', 'debug', 'info', 'warn', 'error']))
-    parser.add_argument('--logfn', help='Log to rotating file')
-    parser.add_argument('--debug', action='store_true',
-                        help='Don\'t submit records to the database, print them to stdout')
-    parser.add_argument('-s', '--station', dest='station', default='RIG Tower',
-                        help='Name of station to use to determine symbols')
-    parser.add_argument('--ldmp', action='store_true',
-                        help='Treat `src` file as a station name and read records from LoggerNet LDMP server (port: 1024)')
-    parser.add_argument('--tables', nargs='*', default=['1'],
-                        help="LoggerNet LDMP tables to read in")
-    parser.add_argument('dburl', help='Database to insert into')
-    parser.add_argument('src', help='Level 0 raw data file')
+
+    parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    parser.add_argument(
+        "--loglvl",
+        dest="loglvl",
+        default="warn",
+        help="logging level",
+        choices=(["trace", "debug", "info", "warn", "error"]),
+    )
+    parser.add_argument("--logfn", help="Log to rotating file")
+    parser.add_argument(
+        "--debug",
+        action="store_true",
+        help="Don't submit records to the database, print them to stdout",
+    )
+    parser.add_argument(
+        "-s",
+        "--station",
+        dest="station",
+        default="RIG Tower",
+        help="Name of station to use to determine symbols",
+    )
+    parser.add_argument(
+        "--ldmp",
+        action="store_true",
+        help="Treat `src` file as a station name and read records from LoggerNet LDMP server (port: 1024)",
+    )
+    parser.add_argument("--tables", nargs="*", default=["1"], help="LoggerNet LDMP tables to read in")
+    parser.add_argument("dburl", help="Database to insert into")
+    parser.add_argument("src", help="Level 0 raw data file")
 
     args = parser.parse_args()
     lvl = logging.getLevelName(args.loglvl.upper())
@@ -106,14 +120,15 @@ def main():
 
     if args.ldmp:
         from aosstower.level_00.parser import LDMPGenerator
+
         record_gen = LDMPGenerator(args.src, args.tables, symbol_names=[x[1] for x in _symbol_record_map])
     else:
-        src = open(args.src, "r")
+        src = open(args.src)
         record_gen = read_frames(src, tail=args.tail)
 
-    for idx, record in enumerate(record_gen):
-        data = (record['stamp'], record)
-        LOG.info("Sending record: %s", record['stamp'].isoformat(' '))
+    for _idx, record in enumerate(record_gen):
+        data = (record["stamp"], record)
+        LOG.info("Sending record: %s", record["stamp"].isoformat(" "))
         if args.debug:
             # we just want the above message
             continue
diff --git a/aosstower/level_00/parser.py b/aosstower/level_00/parser.py
index 5406b13add464d31876baa5e441ed3af7a5aee3b..58eb26a59c154d0823287121b5286b0c477a0cbc 100644
--- a/aosstower/level_00/parser.py
+++ b/aosstower/level_00/parser.py
@@ -39,24 +39,26 @@ we have 2 altimeter values but as far as I know altimeter2 is not used.
 XXX: Fill value in version 2 seems to be -99999.
 """
 
-import re
 import io
-import time
 import logging
+import re
+import time
 from datetime import datetime, timedelta
 
 from metobscommon.util.mytime import hhmm_to_offset
+
 from aosstower.schema import database
 
 LOG = logging.getLogger(__name__)
 
 
 class LineParseError(Exception):
-    """Error parsing line of frame data.
-    """
+    """Error parsing line of frame data."""
+
     @classmethod
     def raise_wrapped(cls, exception, msg=None):
         import sys
+
         traceback = sys.exc_info()[2]
         msg = msg or str(exception)
         exc = cls(msg)
@@ -65,56 +67,56 @@ class LineParseError(Exception):
 
 
 def _make_frame(data, new_symbols=None, rename_timestamp=False):
-    """Construct a frame from a list of tuples.
-    """
+    """Construct a frame from a list of tuples."""
     frame = {}
     for idx, (key, value) in enumerate(data):
-        if key in ['stamp', 'timestamp']:
-            frame['stamp' if rename_timestamp else key] = value
+        if key in ["stamp", "timestamp"]:
+            frame["stamp" if rename_timestamp else key] = value
             continue
         if key in database:
             try:
                 new_key = new_symbols[idx] if new_symbols and len(new_symbols) > idx else key
                 frame[new_key] = database[key].type(value)
             except (ValueError, TypeError):
-                raise LineParseError("error converting '%s' using %s",
-                                     value, database[key].type)
-    return frame 
+                raise LineParseError("error converting '%s' using %s", value, database[key].type)
+    return frame
 
 
-class ParserV0(object):
-    """Parses Version 0 data lines.
-    """
-    fill_value = -99999.
+class ParserV0:
+    """Parses Version 0 data lines."""
+
+    fill_value = -99999.0
 
     # maps v0 names to names in schema db
-    names = {'ACCURAIN': 'accum_precip',
-             'TEMP107_1': 'box_air_temp',
-             'TEMP107_2': 'air_temp_2',
-             'TEMP107_3': 'air_temp_3',
-             'TEMP107_4': 'air_temp_4',
-             'TEMP107_5': 'air_temp_5',
-             'LI200X': 'solar_flux',
-             'RH41372': 'rh',
-             'TEMP41372': 'air_temp',
-             'CS105': 'box_pressure',
-             'PAROSCI': 'pressure',
-             'WSPD05305': 'wind_speed',
-             'WDIR05305': 'wind_dir',
-             'CS10162': 'box_rh',
-             'RAIN380M': 'precip'}
+    names = {
+        "ACCURAIN": "accum_precip",
+        "TEMP107_1": "box_air_temp",
+        "TEMP107_2": "air_temp_2",
+        "TEMP107_3": "air_temp_3",
+        "TEMP107_4": "air_temp_4",
+        "TEMP107_5": "air_temp_5",
+        "LI200X": "solar_flux",
+        "RH41372": "rh",
+        "TEMP41372": "air_temp",
+        "CS105": "box_pressure",
+        "PAROSCI": "pressure",
+        "WSPD05305": "wind_speed",
+        "WDIR05305": "wind_dir",
+        "CS10162": "box_rh",
+        "RAIN380M": "precip",
+    }
 
     @staticmethod
     def maybe_mine(line):
-        return line.startswith('TIME')
+        return line.startswith("TIME")
 
     def make_frame(self, line):
         parts = line.split()
         if len(parts) != 32:
             raise LineParseError("Expected 32 components", line)
-        raw_data = [('version', 0)]
+        raw_data = [("version", 0)]
         for k1, v1 in zip(parts[0::2], parts[1::2]):
-            if k1 == 'TIME':
+            if k1 == "TIME":
                 continue
             if k1 in self.names:
                 raw_data.append((self.names[k1], v1))
@@ -123,31 +125,53 @@ class ParserV0(object):
         try:
             time_str = parts[1]
             unix_time = int(time_str)
-            raw_data.append(('stamp', datetime.utcfromtimestamp(unix_time)))
+            raw_data.append(("stamp", datetime.utcfromtimestamp(unix_time)))
         except (ValueError, TypeError):
             raise LineParseError("Could not parse stamp", line)
         return _make_frame(raw_data)
 
 
-class ParserV1V2(object):
-    """Parses Version 1 & 2 data lines.
-    """
-    fill_value = -99999.
-
-    names = ['station_id', 'year', 'doy', 'hhmm', 'sec', 'box_pressure',
-             'paro_air_temp_period', 'paro_pressure_period', 'paro_air_temp',
-             'pressure', 'paro_cal_sig', 'box_rh', 'box_air_temp',
-             'air_temp_2', 'air_temp_3', 'air_temp_4', 'wind_speed', 'wind_dir',
-             'rh_shield_freq', 'rh', 'air_temp_6_3m', 'dewpoint',
-             'rtd_shield_freq', 'air_temp', 'solar_flux', 'precip',
-             'accum_precip', 'altimeter']  # , 'altimeter2']
+class ParserV1V2:
+    """Parses Version 1 & 2 data lines."""
+
+    fill_value = -99999.0
+
+    names = [
+        "station_id",
+        "year",
+        "doy",
+        "hhmm",
+        "sec",
+        "box_pressure",
+        "paro_air_temp_period",
+        "paro_pressure_period",
+        "paro_air_temp",
+        "pressure",
+        "paro_cal_sig",
+        "box_rh",
+        "box_air_temp",
+        "air_temp_2",
+        "air_temp_3",
+        "air_temp_4",
+        "wind_speed",
+        "wind_dir",
+        "rh_shield_freq",
+        "rh",
+        "air_temp_6_3m",
+        "dewpoint",
+        "rtd_shield_freq",
+        "air_temp",
+        "solar_flux",
+        "precip",
+        "accum_precip",
+        "altimeter",
+    ]  # , 'altimeter2']
     # These are the new fields in the input files but unused by the rest of
     # the tower code. At the risk of breaking other pieces of software, these
     # are not included in the above list, but are documented here for future
     # reference.
     #
     # Altimeter2 (slightly different calculation, same units as Altimeter)
-    # LW_in (W/m^2)
     # tempPyrg (Kelvin, temperature of pyrgeometer)
     # pyrgTP (W/m^2, raw reading from the pyrgeometer thermopile)
     # pyrgTC (W/m^2, temperature correction for the pyrgeometer)
@@ -155,27 +179,27 @@ class ParserV1V2(object):
 
     @staticmethod
     def maybe_mine(line):
-        return re.search('^\d,\d{4},\d{1,3}', line) is not None
+        return re.search("^\\d,\\d{4},\\d{1,3}", line) is not None
 
     @staticmethod
     def _get_stamp(parts):
         year = int(parts[1])
         doy = int(parts[2])
-        dt = datetime.strptime('{:d}.{:03d}'.format(int(year), int(doy)), '%Y.%j')
+        dt = datetime.strptime(f"{int(year):d}.{int(doy):03d}", "%Y.%j")
         secs = hhmm_to_offset(parts[3])
         secs += float(parts[4])
-        secs -= (secs % 5)
+        secs -= secs % 5
         dt += timedelta(seconds=secs)
         return dt
 
     def make_frame(self, line):
-        parts = line.split(',')
+        parts = line.split(",")
         if len(parts) not in [28, 29, 33, 34]:
             raise LineParseError("Expected 28, 29, 33, or 34 parts", line)
         version = {28: 1, 29: 2, 33: 3, 34: 4}[len(parts)]
-        raw_data = [('version', version)] + list(zip(self.names, parts))
+        raw_data = [("version", version), *list(zip(self.names, parts))]
         try:
-            raw_data.append(('stamp', self._get_stamp(parts)))
+            raw_data.append(("stamp", self._get_stamp(parts)))
         except (TypeError, ValueError):
             raise LineParseError("Could not parse timesamp", line)
         return _make_frame(raw_data)
@@ -187,13 +211,11 @@ def read_frames(source, error_handler=lambda *a: None, tail=False):
 
     :param tail: starting from the end of the source (if 'seek' method) read lines forever
     """
-    if hasattr(source, 'readlines'):
-        fptr = source
-    else:
-        fptr = open(source)
+    fptr = source if hasattr(source, "readlines") else open(source)
     if tail and hasattr(fptr, "seek"):
         LOG.debug("Seeking to end of frame source")
         fptr.seek(0, io.SEEK_END)
+
         def gen():
             idx = 0
             while True:
@@ -203,7 +225,9 @@ def read_frames(source, error_handler=lambda *a: None, tail=False):
                     continue
                 yield idx, line
                 idx += 1
+
     else:
+
         def gen():
             for idx, line in enumerate(fptr):
                 if not line.strip():
@@ -211,7 +235,7 @@ def read_frames(source, error_handler=lambda *a: None, tail=False):
                 yield idx, line
 
     for idx, line in gen():
-        if line.startswith('#'):
+        if line.startswith("#"):
             continue
         for parser in [ParserV1V2(), ParserV0()]:
             if parser.maybe_mine(line):
@@ -228,19 +252,19 @@ def read_frames(source, error_handler=lambda *a: None, tail=False):
 
 
 def loggernet_to_tower(rec_dict, symbol_names):
-    """Convert loggernet record dictionary to our standard naming"""
+    """Convert loggernet record dictionary to our standard naming."""
     # assume that the next record after the traditional frame is the timestamp
-    old_symbols = ['timestamp'] + ParserV1V2.names
-    new_symbols = ['timestamp'] + symbol_names
+    old_symbols = ["timestamp", *ParserV1V2.names]
+    new_symbols = ["timestamp", *symbol_names]
     return _make_frame(zip(old_symbols, rec_dict.values()), new_symbols, rename_timestamp=True)
 
 
-class LDMPGenerator(object):
+class LDMPGenerator:
     """Class to manage receiving records from Loggernet LDMP server."""
 
-    def __init__(self, station_name, tables, symbol_names=ParserV1V2.names,
-                 host='localhost', port=1024):
+    def __init__(self, station_name, tables, symbol_names=ParserV1V2.names, host="localhost", port=1024):
         from metobscommon.archive.loggernet_receiver import LDMPReceiver
+
         self.station_name = station_name
         self.tables = tables
         self.symbol_names = symbol_names
@@ -248,10 +272,10 @@ class LDMPGenerator(object):
 
     def __iter__(self):
         from metobscommon.archive.loggernet_receiver import dict_records
+
         self.receiver.start()
         # This should be generated OrderedDicts
-        dict_rec_gen = dict_records(self.receiver, self.station_name,
-                                    self.tables)
+        dict_rec_gen = dict_records(self.receiver, self.station_name, self.tables)
         return (loggernet_to_tower(x, self.symbol_names) for x in dict_rec_gen)
 
     def close(self):
@@ -261,5 +285,5 @@ class LDMPGenerator(object):
         """Last effort to kill the background thread if not done already."""
         try:
             self.close()
-        except (ValueError, RuntimeError, IOError, OSError):
+        except (ValueError, RuntimeError, OSError):
             pass
diff --git a/aosstower/level_00/rrd.py b/aosstower/level_00/rrd.py
deleted file mode 100644
index 48ccaa3c9c101cd5e35423ac31901605758c28a4..0000000000000000000000000000000000000000
--- a/aosstower/level_00/rrd.py
+++ /dev/null
@@ -1,69 +0,0 @@
-"""The data model used for the MetObs widgets using tower data.
-"""
-
-import os
-from datetime import datetime, timedelta
-
-import rrdtool
-
-from metobscommon.util.calc import altimeter, dewpoint, wind_vector_components
-from metobscommon.util.mytime import to_epoch
-from aosstower import station
-
-
-# minimum set of records for the tower
-VARS = {'air_temp', 'rh', 'dewpoint',
-        'wind_speed', 'winddir_east', 'winddir_north',
-        'pressure', 'precip', 'accum_precip',
-        'solar_flux', 'altimeter'}
-
-
-def add_vector_winds(record):
-    east, north, spd = wind_vector_components(float(record['wind_speed']),
-                                              float(record['wind_dir']))
-
-    record['winddir_east'] = '%.3d' % east
-    record['winddir_north'] = '%.3d' % north
-    record['wind_speed'] = '%.3d' % spd
-
-
-def add_altimeter(record, elev=station.ELEVATION):
-    record['altimeter'] = '%.3d' % altimeter(float(record['pressure']), elev)
-
-
-def add_dewpoint(record):
-    record['dewpoint'] = '%.3d' % dewpoint(float(record['air_temp']),
-                                           float(record['rh']))
-
-
-def initialize_rrd(filepath, start=None, days=365, data_interval=5):
-    """Create a new empty RRD database.
-    """
-    assert not os.path.exists(filepath), "DB already exists"
-    start = start or (datetime.utcnow() - timedelta(days=days))
-    # normalize start to data interval
-    secs = to_epoch(start)
-    secs -= secs % data_interval
-
-    rrdtool.create(filepath,
-                   '--start={}'.format(secs),
-                   '--step={:d}'.format(data_interval),
-                   'DS:air_temp:GAUGE:10:-40:50',
-                   'DS:rh:GAUGE:10:0:100',
-                   'DS:dewpoint:GAUGE:10:0:100',
-                   'DS:wind_speed:GAUGE:10:0:100',
-                   'DS:winddir_north:GAUGE:10:-100:100',
-                   'DS:winddir_east:GAUGE:10:-100:100',
-                   'DS:pressure:GAUGE:10:0:1100',
-                   'DS:precip:GAUGE:10:0:100',
-                   'DS:accum_precip:GAUGE:10:0:100',
-                   'DS:solar_flux:GAUGE:10:0:1000',
-                   'DS:altimeter:GAUGE:10:0:100',
-                   # native resolution
-                   'RRA:AVERAGE:0.5:1:6307200',
-                   # 1 minute
-                   'RRA:AVERAGE:0.5:{:d}:525600'.format(60/data_interval),
-                   # 5 minute
-                   'RRA:AVERAGE:0.5:{:d}:105120'.format(300/data_interval),
-                   # 30 minute
-                   'RRA:AVERAGE:0.5:{:d}:17520'.format(1800/data_interval))
diff --git a/aosstower/level_b1/calc.py b/aosstower/level_b1/calc.py
index 4db8699e69db64927dac576539baf02b47a985b6..28bcda711c144603dc831988d4f0ae6eff3d4f21 100644
--- a/aosstower/level_b1/calc.py
+++ b/aosstower/level_b1/calc.py
@@ -10,8 +10,11 @@ except ImportError:
     pd = None
     Series = np.ndarray
 
-NaN = float('nan')
-is_nan = lambda a: a != a
+NaN = float("nan")
+
+
+def is_nan(a):
+    return a != a
 
 
 def knots_to_mps(knots):
@@ -19,8 +22,7 @@ def knots_to_mps(knots):
 
 
 def dewpoint(tempC, relhum):
-    """
-    Algorithm from Tom Whittaker tempC is the temperature in degrees Celsius,
+    """Algorithm from Tom Whittaker tempC is the temperature in degrees Celsius,
     relhum is the relative humidity as a percentage.
 
     :param tempC: temperature in celsius
@@ -32,8 +34,7 @@ def dewpoint(tempC, relhum):
     gasconst = 461.5
     latheat = 2500800.0
 
-    dp = 1.0 / (1.0 / (273.15 + tempC) - gasconst * np.log((0.0 + relhum) / 100) /
-                (latheat - tempC * 2397.5))
+    dp = 1.0 / (1.0 / (273.15 + tempC) - gasconst * np.log((0.0 + relhum) / 100) / (latheat - tempC * 2397.5))
 
     if pd is not None and isinstance(dp, pd.Series):
         return pd.concat([dp - 273.15, tempC], axis=1).min(axis=1)
@@ -41,37 +42,35 @@ def dewpoint(tempC, relhum):
 
 
 def relhum(airTempK, dewpointTempK):
-    """
-    Algorithm derived by David Hoese from the above
+    """Algorithm derived by David Hoese from the above
     dewpoint(tempC, relhum) function, both parameters are in Kelvin units.
 
     :param airTempK: air temperature in Kelvin
     :param dewpointTempK: dewpoint temp in Kelvin
     """
-    if airTempK == None or dewpointTempK == None:
+    if airTempK is None or dewpointTempK is None:
         return NaN
 
     gas_constant = 461.5
     latheat = 2500800.0
 
     # Only one section of the equation
-    latpart = (latheat - (airTempK - 273.15) * 2397.5)
+    latpart = latheat - (airTempK - 273.15) * 2397.5
     relativehum = 100 * math.e ** ((latpart / airTempK - latpart / dewpointTempK) / gas_constant)
 
     return relativehum
 
 
 def potentialtemp(airTempK, pressureMB):
-    """
-    Algorithm from David Hoese to calculate potential temperature.
+    """Algorithm from David Hoese to calculate potential temperature.
 
     :param airTempK: air temperature in Kelvin
     :param pressureMB: air pressure in millibars
     """
-    if airTempK == None or pressureMB == None:
+    if airTempK is None or pressureMB is None:
         return NaN
 
-    pT = airTempK * (pressureMB.max() / pressureMB) ** .286
+    pT = airTempK * (pressureMB.max() / pressureMB) ** 0.286
 
     return pT
 
@@ -86,11 +85,11 @@ def altimeter(p, alt):
 
     :returns: altimeter in inHg
     """
-    n = .190284
-    c1 = .0065 * pow(1013.25, n) / 288.
-    c2 = alt / pow((p - .3), n)
-    ff = pow(1. + c1 * c2, 1. / n)
-    return ((p - .3) * ff * 29.92 / 1013.25)
+    n = 0.190284
+    c1 = 0.0065 * pow(1013.25, n) / 288.0
+    c2 = alt / pow((p - 0.3), n)
+    ff = pow(1.0 + c1 * c2, 1.0 / n)
+    return (p - 0.3) * ff * 29.92 / 1013.25
 
 
 def dir2txt(val):
@@ -112,14 +111,16 @@ def dir2txt(val):
     assert val >= 0 and val < 360, "'%s' out of range" % val
     dirs = ("NNE", "NE", "ENE", "E", "ESE", "SE", "SSE", "S", "SSW", "SW", "WSW", "W", "WNW", "NW", "NNW")
 
-    if ((val >= 348.75 and val <= 360) or val >= 0 and val < 11.25): return "N"
+    if (val >= 348.75 and val <= 360) or val >= 0 and val < 11.25:
+        return "N"
 
     # 1/2 degree increment between the directions
-    i = 11.25;
+    i = 11.25
     for dir in dirs:
         if val >= i and val < (i + 22.5):
             return dir
         i += 22.5
+    return None
 
 
 def wind_vector_components(windspd, winddir):
diff --git a/aosstower/level_b1/nc.py b/aosstower/level_b1/nc.py
index 949b25822f196a50ec5e5c57f2fed07cbfa5e647..14fee4aa22c9a2f91d29f325d064c6d1cc2d4596 100644
--- a/aosstower/level_b1/nc.py
+++ b/aosstower/level_b1/nc.py
@@ -1,25 +1,30 @@
 #!/usr/bin/env python
-# encoding: utf8
-"""Generate AOSS Tower NetCDF4 files from Level 00 ASCII files.
-"""
+"""Generate AOSS Tower NetCDF4 files from Level 00 ASCII files."""
+import logging
 import os
+import platform
 import sys
-import logging
-import pandas as pd
 from datetime import datetime
-from netCDF4 import Dataset
+
 import numpy as np
-import platform
+import pandas as pd
+from metobscommon.util import calc
+from metobscommon.util.nc import (
+    calculate_wind_gust,
+    create_variables,
+    make_summary_dict,
+    summary_over_interval,
+    write_qc_for_unwritten,
+    write_vars,
+)
+from netCDF4 import Dataset
+
 from aosstower import schema
-from aosstower.station import station_info
 from aosstower.level_00 import parser
-from metobscommon.util import calc
-from metobscommon.util.nc import (make_summary_dict, write_vars,
-                                  calculate_wind_gust, summary_over_interval,
-                                  create_variables, write_qc_for_unwritten)
+from aosstower.station import station_info
 
 LOG = logging.getLogger(__name__)
-SOFTWARE_VERSION = '00'
+SOFTWARE_VERSION = "00"
 
 
 def _get_data(input_files):
@@ -28,7 +33,7 @@ def _get_data(input_files):
         try:
             yield from parser.read_frames(filename)
         except FileNotFoundError:
-            LOG.warning("Input data file was not found: {}".format(filename))
+            LOG.warning(f"Input data file was not found: {filename}")
             bad_files += 1
             continue
     if bad_files == len(input_files):
@@ -37,8 +42,8 @@ def _get_data(input_files):
 
 def get_data(input_files):
     frame = pd.DataFrame(_get_data(input_files))
-    frame = frame.set_index('stamp')
-    frame = frame.mask(frame == -99999.).fillna(value=np.nan)
+    frame = frame.set_index("stamp")
+    frame = frame.mask(frame == -99999.0).fillna(value=np.nan)
 
     for col_name in frame.columns:
         if col_name in schema.unit_conversions:
@@ -49,153 +54,201 @@ def get_data(input_files):
 
 def write_global_attributes(nc_file, input_sources, interval=None, datastream=None):
     # create global attributes
-    nc_file.source = 'surface observation'
-    nc_file.Conventions = 'ARM-1.2 CF-1.6'
-    nc_file.institution = 'University of Wisconsin - Madison (UW) Space Science and Engineering Center (SSEC)'
-    nc_file.featureType = 'timeSeries'
-    nc_file.data_level = 'b1'
+    nc_file.source = "surface observation"
+    nc_file.Conventions = "ARM-1.2 CF-1.6"
+    nc_file.institution = "University of Wisconsin - Madison (UW) Space Science and Engineering Center (SSEC)"
+    nc_file.featureType = "timeSeries"
+    nc_file.data_level = "b1"
 
     # monthly files end with .month.nc
     # these end with .day.nc
 
     if datastream:
         nc_file.datastream = datastream
-    elif interval in ['1D']:
+    elif interval in ["1D"]:
         # assume this is a monthly file, averaged daily
-        nc_file.datastream = 'aoss.tower.nc-1mo-1d.b1.v{software_version}'.format(software_version=SOFTWARE_VERSION)
-    elif interval in ['1T', '1min']:
+        nc_file.datastream = f"aoss.tower.nc-1mo-1d.b1.v{SOFTWARE_VERSION}"
+    elif interval in ["1T", "1min"]:
         # assume this is a daily file, averaged
-        nc_file.datastream = 'aoss.tower.nc-1d-1m.b1.v{software_version}'.format(software_version=SOFTWARE_VERSION)
+        nc_file.datastream = f"aoss.tower.nc-1d-1m.b1.v{SOFTWARE_VERSION}"
     nc_file.software_version = SOFTWARE_VERSION
     nc_file.command_line = " ".join(sys.argv)
 
     # generate history
-    nc_file.history = ' '.join(platform.uname()) + " " + os.path.basename(__file__)
+    nc_file.history = " ".join(platform.uname()) + " " + os.path.basename(__file__)
     nc_file.input_source = input_sources[0]
-    nc_file.input_sources = ', '.join(input_sources)
-
-
-def create_giant_netcdf(input_files, output_fn, zlib, chunk_size,
-                        start=None, end=None, interval_width=None,
-                        summary=False,
-                        database=schema.database_dict, datastream=None):
+    nc_file.input_sources = ", ".join(input_sources)
+
+
+def create_giant_netcdf(
+    input_files,
+    output_fn,
+    zlib,
+    chunk_size,
+    start=None,
+    end=None,
+    interval_width=None,
+    summary=False,
+    database=schema.database_dict,
+    datastream=None,
+):
     frame = get_data(input_files)
     if frame.empty:
         raise ValueError("No data found from input files: {}".format(", ".join(input_files)))
 
     # Add wind direction components so we can average wind direction properly
-    frame['wind_east'], frame['wind_north'], _ = calc.wind_vector_components(frame['wind_speed'], frame['wind_dir'])
+    frame["wind_east"], frame["wind_north"], _ = calc.wind_vector_components(frame["wind_speed"], frame["wind_dir"])
 
-    if 'air_temp' in frame and 'rh' in frame and \
-            ('dewpoint' in database or 'dewpoint_mean' in database):
-        LOG.info("'dewpoint' is missing from the input file, will calculate "
-                 "it from air temp and relative humidity")
-        frame['dewpoint'] = calc.dewpoint(frame['air_temp'], frame['rh'])
+    if "air_temp" in frame and "rh" in frame and ("dewpoint" in database or "dewpoint_mean" in database):
+        LOG.info("'dewpoint' is missing from the input file, will calculate " "it from air temp and relative humidity")
+        frame["dewpoint"] = calc.dewpoint(frame["air_temp"], frame["rh"])
 
     # round up each 1 minute group so data at time T is the average of data
     # from T - 1 (exclusive) to T (inclusive).
-    new_frame = frame.resample('5S', closed='right', loffset='5S').mean()
+    new_frame = frame.resample("5S", closed="right", loffset="5S").mean()
 
     # 2 minute rolling average of 5 second data (5 seconds * 24 = 120 seconds = 2 minutes)
-    winds_frame_5s = new_frame[['wind_speed', 'wind_east', 'wind_north']]
-    winds_frame_2m = winds_frame_5s.rolling('2T').mean()
-    winds_frame_2m['gust'] = calculate_wind_gust(winds_frame_5s['wind_speed'], winds_frame_2m['wind_speed'])
+    winds_frame_5s = new_frame[["wind_speed", "wind_east", "wind_north"]]
+    winds_frame_2m = winds_frame_5s.rolling("2T").mean()
+    winds_frame_2m["gust"] = calculate_wind_gust(winds_frame_5s["wind_speed"], winds_frame_2m["wind_speed"])
 
     # rolling average is used for mean output
     new_frame.update(winds_frame_2m)  # adds wind_speed, wind_east/north
-    new_frame['gust'] = winds_frame_2m['gust']
+    new_frame["gust"] = winds_frame_2m["gust"]
 
     # average the values
     if summary:
         frame = summary_over_interval(new_frame, interval_width)
     else:
-        frame = new_frame.resample(interval_width, closed='right', loffset=interval_width).mean()
-        frame['wind_dir'] = calc.wind_vector_degrees(frame['wind_east'], frame['wind_north'])
-        frame['gust'] = new_frame['gust'].resample(interval_width, closed='right', loffset=interval_width).max()
+        frame = new_frame.resample(interval_width, closed="right", loffset=interval_width).mean()
+        frame["wind_dir"] = calc.wind_vector_degrees(frame["wind_east"], frame["wind_north"])
+        frame["gust"] = new_frame["gust"].resample(interval_width, closed="right", loffset=interval_width).max()
     frame = frame.fillna(np.nan)
 
     if start and end:
-        frame = frame[start.strftime('%Y-%m-%d %H:%M:%S'): end.strftime('%Y-%m-%d %H:%M:%S')]
+        frame = frame[start.strftime("%Y-%m-%d %H:%M:%S") : end.strftime("%Y-%m-%d %H:%M:%S")]
 
-    if chunk_size and not isinstance(chunk_size, (list, tuple)):
-        chunk_sizes = [chunk_size]
-    else:
-        chunk_sizes = [frame.shape[0]]
+    chunk_sizes = [chunk_size] if chunk_size and not isinstance(chunk_size, (list, tuple)) else [frame.shape[0]]
 
-    first_stamp = datetime.strptime(str(frame.index[0]), '%Y-%m-%d %H:%M:%S')
+    first_stamp = datetime.strptime(str(frame.index[0]), "%Y-%m-%d %H:%M:%S")
     # NETCDF4_CLASSIC was chosen so that MFDataset reading would work. See:
     # http://unidata.github.io/netcdf4-python/#netCDF4.MFDataset
-    nc_file = Dataset(output_fn, 'w', format='NETCDF4_CLASSIC')
-    nc_file.createDimension('time', None)
-    nc_file.createDimension('max_len_station_name', 32)
+    nc_file = Dataset(output_fn, "w", format="NETCDF4_CLASSIC")
+    nc_file.createDimension("time", None)
+    nc_file.createDimension("max_len_station_name", 32)
     create_variables(nc_file, first_stamp, database, chunk_sizes, zlib)
     written_vars = write_vars(nc_file, frame, database, station_info)
     unwritten_vars = set(nc_file.variables.keys()) - set(written_vars)
-    written_vars.extend(write_qc_for_unwritten(nc_file.variables,
-                                               unwritten_vars))
+    written_vars.extend(write_qc_for_unwritten(nc_file.variables, unwritten_vars))
     # remove any of the qc_ variables we just added
     unwritten_vars = set(nc_file.variables.keys()) - set(written_vars)
     for unwritten in unwritten_vars:
-        LOG.warning("Variable created but no data written: {}".format(unwritten))
-
-    write_global_attributes(nc_file,
-                            [os.path.basename(x) for x in input_files],
-                            interval=interval_width,
-                            datastream=datastream)
+        LOG.warning(f"Variable created but no data written: {unwritten}")
+
+    write_global_attributes(
+        nc_file,
+        [os.path.basename(x) for x in input_files],
+        interval=interval_width,
+        datastream=datastream,
+    )
     nc_file.close()
     return nc_file
 
 
 def _dt_convert(datetime_str):
-    """Parse datetime string, return datetime object"""
+    """Parse datetime string, return datetime object."""
     try:
-        return datetime.strptime(datetime_str, '%Y%m%d')
+        return datetime.strptime(datetime_str, "%Y%m%d")
     except ValueError:
         try:
-            return datetime.strptime(datetime_str, '%Y-%m-%d')
+            return datetime.strptime(datetime_str, "%Y-%m-%d")
         except ValueError:
-            return datetime.strptime(datetime_str, '%Y-%m-%dT%H:%M:%S')
+            return datetime.strptime(datetime_str, "%Y-%m-%dT%H:%M:%S")
 
 
 def main():
     import argparse
+
     from metobscommon.archive import setup_logging
-    parser = argparse.ArgumentParser(description="Convert level_00 aoss tower data to level_b1",
-                                     fromfile_prefix_chars='@')
-
-    parser.add_argument('-v', '--verbose', action="count", default=int(os.environ.get("VERBOSITY", 2)),
-                        dest='verbosity',
-                        help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)')
-    parser.add_argument('-l', '--log-file', dest="log_filepath",
-                        help="Alternate name for log file, default is to not create a file")
-    parser.add_argument('-s', '--start-time', type=_dt_convert,
-                        help="Start time of massive netcdf file, if only -s is given, a netcdf file for only that day is given" +
-                             ". Formats allowed: \'YYYY-MM-DDTHH:MM:SS\', \'YYYY-MM-DD\'")
-    parser.add_argument('-e', '--end-time', type=_dt_convert,
-                        help='End time of massive netcdf file. Formats allowed:' +
-                                "\'YYYY-MM-DDTHH:MM:SS\', \'YYYY-MM-DD\'")
-    parser.add_argument('-n', '--interval', default='1T',
-                        help="""Width of the interval to average input data
+
+    parser = argparse.ArgumentParser(
+        description="Convert level_00 aoss tower data to level_b1",
+        fromfile_prefix_chars="@",
+    )
+
+    parser.add_argument(
+        "-v",
+        "--verbose",
+        action="count",
+        default=int(os.environ.get("VERBOSITY", 2)),
+        dest="verbosity",
+        help="each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)",
+    )
+    parser.add_argument(
+        "-l",
+        "--log-file",
+        dest="log_filepath",
+        help="Alternate name for log file, default is to not create a file",
+    )
+    parser.add_argument(
+        "-s",
+        "--start-time",
+        type=_dt_convert,
+        help="Start time of massive netcdf file, if only -s is given, a netcdf file for only that day is given"
+        + ". Formats allowed: 'YYYY-MM-DDTHH:MM:SS', 'YYYY-MM-DD'",
+    )
+    parser.add_argument(
+        "-e",
+        "--end-time",
+        type=_dt_convert,
+        help="End time of massive netcdf file. Formats allowed:" + "'YYYY-MM-DDTHH:MM:SS', 'YYYY-MM-DD'",
+    )
+    parser.add_argument(
+        "-n",
+        "--interval",
+        default="1T",
+        help="""Width of the interval to average input data
 over in Pandas offset format. If not specified, 1 minute averages are used.
 Use '1D' for daily or '5T' for 5 minute averages.
 See this page for more details:
-http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases""")
-    parser.add_argument('--summary', action='store_true',
-                        help="Create a file with _low, _mean, _high versions of every variable name")
-    parser.add_argument('-f', '--fields', nargs='+', default=schema.met_vars,
-                        help="Variable names to include in the NetCDF file (base name, no suffixes)")
-    parser.add_argument('--chunk-size', type=int, help='chunk size for the netCDF file')
-    parser.add_argument('-z', '--zlib', action='store_true', help='compress netCDF file with zlib')
-    parser.add_argument('--data-stream', help="'datastream' global attribute to put in output file")
-
-    parser.add_argument('-i', '--input', dest='input_files', required=True, nargs="+",
-                        help="aoss_tower level_00 paths. Use @filename to read a list of paths from that filename.")
-
-    parser.add_argument('-o', '--output', dest='output_files', required=True, nargs="+",
-                        help="""NetCDF filename(s) to create from input. If one
+http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases""",
+    )
+    parser.add_argument(
+        "--summary",
+        action="store_true",
+        help="Create a file with _low, _mean, _high versions of every variable name",
+    )
+    parser.add_argument(
+        "-f",
+        "--fields",
+        nargs="+",
+        default=schema.met_vars,
+        help="Variable names to include in the NetCDF file (base name, no suffixes)",
+    )
+    parser.add_argument("--chunk-size", type=int, help="chunk size for the netCDF file")
+    parser.add_argument("-z", "--zlib", action="store_true", help="compress netCDF file with zlib")
+    parser.add_argument("--data-stream", help="'datastream' global attribute to put in output file")
+
+    parser.add_argument(
+        "-i",
+        "--input",
+        dest="input_files",
+        required=True,
+        nargs="+",
+        help="aoss_tower level_00 paths. Use @filename to read a list of paths from that filename.",
+    )
+
+    parser.add_argument(
+        "-o",
+        "--output",
+        dest="output_files",
+        required=True,
+        nargs="+",
+        help="""NetCDF filename(s) to create from input. If one
 filename is specified then all input files are combined in to it. Otherwise
 each input file is mapped to the corresponding output file.
-""")
+""",
+    )
     args = parser.parse_args()
 
     levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
@@ -204,7 +257,7 @@ each input file is mapped to the corresponding output file.
     if args.start_time and not args.end_time:
         args.end_time = args.start_time.replace(hour=23, minute=59, second=59)
     elif not args.start_time and args.end_time:
-        raise ValueError('start time must be specified when end time is specified')
+        raise ValueError("start time must be specified when end time is specified")
 
     mini_database = {k: schema.database_dict[k] for k in args.fields}
     if args.summary:
@@ -213,7 +266,7 @@ each input file is mapped to the corresponding output file.
     # Case 1: All inputs to 1 output file
     # Case 2: Each input in to a separate output file
     if args.output_files and len(args.output_files) not in [1, len(args.input_files)]:
-        raise ValueError('Output filenames must be 1 or the same length as input files')
+        raise ValueError("Output filenames must be 1 or the same length as input files")
     elif args.output_files and len(args.output_files) == len(args.input_files):
         args.input_files = [[i] for i in args.input_files]
     else:
@@ -222,15 +275,23 @@ each input file is mapped to the corresponding output file.
     success = False
     for in_files, out_fn in zip(args.input_files, args.output_files):
         try:
-            create_giant_netcdf(in_files, out_fn, args.zlib,
-                                args.chunk_size, args.start_time,
-                                args.end_time, args.interval, args.summary,
-                                mini_database, args.data_stream)
+            create_giant_netcdf(
+                in_files,
+                out_fn,
+                args.zlib,
+                args.chunk_size,
+                args.start_time,
+                args.end_time,
+                args.interval,
+                args.summary,
+                mini_database,
+                args.data_stream,
+            )
             success = True
         except (ValueError, TypeError):
-            LOG.error("Could not generate NetCDF file for {}".format(in_files), exc_info=True)
+            LOG.error(f"Could not generate NetCDF file for {in_files}", exc_info=True)
     if not success:
-        raise IOError('All ASCII files were empty or could not be read')
+        raise OSError("All ASCII files were empty or could not be read")
 
 
 if __name__ == "__main__":
diff --git a/aosstower/level_b1/quicklook.py b/aosstower/level_b1/quicklook.py
index 6c8e7bab18557736ed6f9af29607b13479d8b762..b765d7deae422d4c97f95aaf644251625f835f4c 100644
--- a/aosstower/level_b1/quicklook.py
+++ b/aosstower/level_b1/quicklook.py
@@ -1,16 +1,18 @@
 import matplotlib
-matplotlib.use('agg')
 
+matplotlib.use("agg")
+
+import logging
+import math
 import os
 import sys
 from datetime import datetime, timedelta
-import logging
+
+import matplotlib.dates as md
+import matplotlib.pyplot as plt
+import numpy as np
 import pandas as pd
 from netCDF4 import MFDataset, MFTime
-import numpy as np
-import matplotlib.pyplot as plt
-import matplotlib.dates as md
-import math
 
 LOG = logging.getLogger(__name__)
 FIGURE_TITLE_SIZE = 13
@@ -18,11 +20,11 @@ TN_SIZE = (1, 1)
 
 # names of the plots used in title (default is `.title()` of plot name)
 TITLES = {
-    'air_temp': 'Air Temperature',
-    'td': 'Air and Dewpoint Temperature',
-    'rh': 'Relative Humidity',
-    'wind_dir': 'Wind Direction',
-    'accum_precip': 'Accumulated Precipitation Since 0Z',
+    "air_temp": "Air Temperature",
+    "td": "Air and Dewpoint Temperature",
+    "rh": "Relative Humidity",
+    "wind_dir": "Wind Direction",
+    "accum_precip": "Accumulated Precipitation Since 0Z",
 }
 
 
@@ -35,8 +37,9 @@ def get_subtitle_location(num_subplots):
     return 1 - 0.055 * num_subplots
 
 
-class PlotMaker(object):
-    """Object for making plots and storing/validating plot metadata"""
+class PlotMaker:
+    """Object for making plots and storing/validating plot metadata."""
+
     def __init__(self, name, dependencies, title=None, units=None):
         self.name = name
         self.deps = dependencies
@@ -47,7 +50,7 @@ class PlotMaker(object):
         self.units = units
 
     def missing_deps(self, frame):
-        """Get dependency variables missing from the provided frame"""
+        """Get dependency variables missing from the provided frame."""
         for var_name in self.deps:
             if var_name not in frame:
                 yield var_name
@@ -57,20 +60,22 @@ class PlotMaker(object):
         if delta < timedelta(hours=24).total_seconds():
             return start_time.strftime("%Y-%m-%d")
         else:
-            return "{:%Y-%m-%d %H:%M} to {:%Y-%m-%d %H:%M}".format(start_time, end_time)
+            return f"{start_time:%Y-%m-%d %H:%M} to {end_time:%Y-%m-%d %H:%M}"
 
     def get_title(self, frame, is_subplot, start_time, end_time):
         if self._title:
-            title_prefix = "AO&SS Building Tower " if not is_subplot else ''
-            title_name = TITLES.get(self.name, self.name.replace('_', ' ').title())
-            unit_str = '({})'.format(self.units) if self.units and is_subplot else ''
+            title_prefix = "AO&SS Building Tower " if not is_subplot else ""
+            title_name = TITLES.get(self.name, self.name.replace("_", " ").title())
+            unit_str = f"({self.units})" if self.units and is_subplot else ""
             date_string = self.get_date_string(start_time, end_time)
-            title = self._title.format(title_prefix=title_prefix,
-                                       title_name=title_name,
-                                       units=unit_str,
-                                       date_string=date_string)
+            title = self._title.format(
+                title_prefix=title_prefix,
+                title_name=title_name,
+                units=unit_str,
+                date_string=date_string,
+            )
         else:
-            title = ''
+            title = ""
         return title
 
     def get_yticks(self, ymin, ymax, num_plots):
@@ -81,11 +86,11 @@ class PlotMaker(object):
         return new_ticks
 
     def _get_ylabel(self, is_subplot=False):
-        y_label = TITLES.get(self.name, self.name.replace('_', ' ').title())
+        y_label = TITLES.get(self.name, self.name.replace("_", " ").title())
         if is_subplot:
             return None
         if self.units:
-            return "{} ({})".format(y_label, self.units)
+            return f"{y_label} ({self.units})"
         return y_label
 
     def _set_ylabel(self, ax, is_subplot=False):
@@ -95,12 +100,10 @@ class PlotMaker(object):
 
         if is_subplot:
             # put units on the top left of the plot axes
-            ax.text(.008, .9, self.units,
-                    horizontalalignment='left', va='top',
-                    transform=ax.transAxes, size=8)
+            ax.text(0.008, 0.9, self.units, horizontalalignment="left", va="top", transform=ax.transAxes, size=8)
 
     def _call_plot(self, frame, ax):
-        lines = ax.plot(frame.index, frame, 'k')
+        lines = ax.plot(frame.index, frame, "k")
         return lines
 
     def _set_ylim(self, frame, ax):
@@ -123,23 +126,19 @@ class PlotMaker(object):
             fig.suptitle(title, fontsize=FIGURE_TITLE_SIZE)
 
     def _get_axes(self, fig, is_subplot, shared_x=None):
-        if is_subplot:
-            ax = fig.add_subplot(*is_subplot, sharex=shared_x)
-        else:
-            ax = fig.add_subplot(111, sharex=shared_x)
+        ax = fig.add_subplot(*is_subplot, sharex=shared_x) if is_subplot else fig.add_subplot(111, sharex=shared_x)
         plt.sca(ax)
         return ax
 
     def _set_xlabel(self, ax, is_subplot):
         if not is_subplot:
-            ax.set_xlabel('Time (UTC)')
+            ax.set_xlabel("Time (UTC)")
 
     def _set_yticks(self, ax, ymin, ymax, is_subplot):
         if is_subplot:
             new_ticks = self.get_yticks(ymin, ymax, is_subplot[0])
-            # ax.yaxis.get_major_ticks()[-1].set_visible(False)
             ax.set_yticks(new_ticks)
-            ax.locator_params(axis='y', nbins=3)
+            ax.locator_params(axis="y", nbins=3)
         ax.yaxis.get_major_formatter().set_useOffset(False)
 
     def _set_xaxis_formatter(self, ax, start_time, end_time, is_subplot):
@@ -152,18 +151,20 @@ class PlotMaker(object):
         def _fmt(interval, x, pos=None):
             x_num = md.num2date(x).replace(tzinfo=None)
             delta_seconds = (x_num - start_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds()
-            num_hours = delta_seconds / 3600.
+            num_hours = delta_seconds / 3600.0
             if interval == md.HOURLY:
-                return "{:.0f}".format(num_hours)
+                return f"{num_hours:.0f}"
             elif interval == md.MINUTELY:
-                num_minutes = delta_seconds / 60.
-                num_minutes -= int(num_hours) * 60.
-                return "{:02.0f}:{:02.0f}".format(int(num_hours), num_minutes)
+                num_minutes = delta_seconds / 60.0
+                num_minutes -= int(num_hours) * 60.0
+                return f"{int(num_hours):02.0f}:{num_minutes:02.0f}"
             else:
                 return x.strftime("{%Y-%m-%d}")
+
         from functools import partial
-        xfmt.scaled[1. / md.MINUTES_PER_DAY] = plt.FuncFormatter(partial(_fmt, md.MINUTELY))
-        xfmt.scaled[1. / md.HOURS_PER_DAY] = plt.FuncFormatter(partial(_fmt, md.HOURLY))
+
+        xfmt.scaled[1.0 / md.MINUTES_PER_DAY] = plt.FuncFormatter(partial(_fmt, md.MINUTELY))
+        xfmt.scaled[1.0 / md.HOURS_PER_DAY] = plt.FuncFormatter(partial(_fmt, md.HOURLY))
         ax.xaxis.set_major_locator(xloc)
         ax.xaxis.set_major_formatter(xfmt)
 
@@ -197,10 +198,10 @@ class PlotMaker(object):
             # Remove any titles
             ax.set_title("")
             # remove y-axis lines
-            ax.spines['left'].set_visible(False)
-            ax.spines['right'].set_visible(False)
-            ax.spines['top'].set_visible(False)
-            ax.spines['bottom'].set_visible(False)
+            ax.spines["left"].set_visible(False)
+            ax.spines["right"].set_visible(False)
+            ax.spines["top"].set_visible(False)
+            ax.spines["bottom"].set_visible(False)
 
             for t in ax.texts:
                 t.set_visible(False)
@@ -210,11 +211,8 @@ class PlotMaker(object):
         self._convert_ax_to_thumbnail(fig)
         return fig
 
-    def create_plot(self, frame, fig, start_time=None, end_time=None,
-                    is_subplot=None, shared_x=None, title=None):
-        """
-
-        :param frame:
+    def create_plot(self, frame, fig, start_time=None, end_time=None, is_subplot=None, shared_x=None, title=None):
+        """:param frame:
         :param fig:
         :param is_subplot: None or (num plots, num columns, num_rows)
         :param shared_x:
@@ -222,20 +220,18 @@ class PlotMaker(object):
         """
         specific_frame = frame[[x for x in frame.columns if x in self.deps]]
         if frame.empty or specific_frame.empty or specific_frame.isnull().all().any():
-            raise ValueError("No valid data found or missing necessary data to make {}".format(self.name))
+            raise ValueError(f"No valid data found or missing necessary data to make {self.name}")
         if start_time is None:
             start_time = frame.index[0].to_pydatetime()
         if end_time is None:
             end_time = frame.index[-1].to_pydatetime()
 
         ax = self._get_axes(fig, is_subplot, shared_x)
-        self._set_title(frame, fig, ax,
-                        start_time=start_time, end_time=end_time,
-                        title=title, is_subplot=is_subplot)
+        self._set_title(frame, fig, ax, start_time=start_time, end_time=end_time, title=title, is_subplot=is_subplot)
 
         # make ticks show up on top and bottom inside and out of the axis line
-        ax.xaxis.set_tick_params(left=True, right=True, direction='inout')
-        lines = self._call_plot(specific_frame, ax)
+        ax.xaxis.set_tick_params(left=True, right=True, direction="inout")
+        self._call_plot(specific_frame, ax)
         ymin, ymax = self._set_ylim(specific_frame, ax)
 
         self._set_yticks(ax, ymin, ymax, is_subplot)
@@ -263,7 +259,7 @@ class TDPlotMaker(PlotMaker):
     def _call_plot(self, frame, ax):
         air_temp = self.deps[0]
         dewpoint = self.deps[1]
-        return ax.plot(frame.index, frame[air_temp], 'r', frame.index, frame[dewpoint], 'g')
+        return ax.plot(frame.index, frame[air_temp], "r", frame.index, frame[dewpoint], "g")
 
 
 class WindDirPlotMaker(PlotMaker):
@@ -274,7 +270,7 @@ class WindDirPlotMaker(PlotMaker):
         ax.yaxis.set_ticks([0, 90, 180, 270])
 
     def _call_plot(self, frame, ax):
-        lines = ax.plot(frame.index, frame, 'k.', markersize=3, linewidth=0)
+        lines = ax.plot(frame.index, frame, "k.", markersize=3, linewidth=0)
         return lines
 
 
@@ -283,32 +279,31 @@ class MeteorogramPlotMaker(PlotMaker):
         self.plot_deps = plot_deps
         self.thumbnail_deps = thumbnail_deps
         self.axes = {}
-        super(MeteorogramPlotMaker, self).__init__(name, dependencies, title=title)
+        super().__init__(name, dependencies, title=title)
 
     def _convert_ax_to_thumbnail(self, fig):
-        if hasattr(fig, '_my_axes'):
+        if hasattr(fig, "_my_axes"):
             for k, ax in fig._my_axes.items():
                 if k not in self.thumbnail_deps:
                     fig.delaxes(ax)
                     continue
 
-        super(MeteorogramPlotMaker, self)._convert_ax_to_thumbnail(fig)
+        super()._convert_ax_to_thumbnail(fig)
 
         for idx, ax in enumerate(fig.axes):
-            ax.spines['left'].set_visible(False)
-            ax.spines['right'].set_visible(False)
+            ax.spines["left"].set_visible(False)
+            ax.spines["right"].set_visible(False)
             if idx == 0:
-                ax.spines['top'].set_visible(False)
+                ax.spines["top"].set_visible(False)
             else:
-                ax.spines['top'].set_visible(True)
+                ax.spines["top"].set_visible(True)
 
             if idx == len(fig.axes) - 1:
-                ax.spines['bottom'].set_visible(False)
+                ax.spines["bottom"].set_visible(False)
             else:
-                ax.spines['bottom'].set_visible(True)
+                ax.spines["bottom"].set_visible(True)
 
-    def create_plot(self, frame, fig, start_time=None, end_time=None,
-                    is_subplot=False, shared_x=None, title=None):
+    def create_plot(self, frame, fig, start_time=None, end_time=None, is_subplot=False, shared_x=None, title=None):
         if is_subplot or shared_x:
             raise ValueError("Meteorogram Plot can not be a subplot or share X-axis")
 
@@ -326,45 +321,49 @@ class MeteorogramPlotMaker(PlotMaker):
         fig._my_axes = {}
         for idx, plot_name in enumerate(self.plot_deps):
             plot_maker = PLOT_TYPES.get(plot_name, PlotMaker(plot_name, (plot_name,)))
-            title_name = TITLES.get(plot_name, plot_name.replace('_', ' ').title())
-            ax = plot_maker.create_plot(frame, fig,
-                                        is_subplot=(num_plots, 1, idx + 1),
-                                        shared_x=shared_x,
-                                        title=title_name)
+            title_name = TITLES.get(plot_name, plot_name.replace("_", " ").title())
+            ax = plot_maker.create_plot(
+                frame,
+                fig,
+                is_subplot=(num_plots, 1, idx + 1),
+                shared_x=shared_x,
+                title=title_name,
+            )
             fig._my_axes[plot_name] = ax
             if idx == 0:
                 shared_x = ax
             if idx != num_plots - 1:
                 # Disable the x-axis ticks so we don't interfere with other subplots
-                kwargs = {'visible': False}
+                kwargs = {"visible": False}
                 for l in ax.get_xticklabels():
                     l.update(kwargs)
             # make the top y-tick label invisible
-            # ax.yaxis.get_major_ticks()[-1].label1.update({'visible': False})
 
-        ax.set_xlabel('Time (UTC)')
-        # fig.subplots_adjust(hspace=0, bottom=0.125)
+        ax.set_xlabel("Time (UTC)")
         fig.subplots_adjust(hspace=0)
         self._set_xaxis_formatter(ax, start_time, end_time, is_subplot)
 
         return ax
 
+
 # map plot name -> variable dependencies
 # if not listed then plot name is assumed to be the same as the variable needed
 PLOT_TYPES = {
-    'meteorogram': MeteorogramPlotMaker('meteorogram',
-                                        ('air_temp', 'dewpoint', 'pressure', 'wind_speed', 'wind_dir', 'accum_precip', 'solar_flux'),
-                                        ('td', 'pressure', 'wind_speed', 'wind_dir', 'accum_precip', 'solar_flux'),
-                                        ('td', 'wind_speed', 'wind_dir', 'accum_precip')),
-    'td': TDPlotMaker('td', ('air_temp', 'dewpoint'), units="°C"),  # air_temp and dewpoint in one plot
-    'wind_dir': WindDirPlotMaker('wind_dir', ('wind_dir',), units='°'),  # special tick labels
-    'rh': PlotMaker('rh', ('rh',), units='%'),
-    'air_temp': PlotMaker('air_temp', ('air_temp',), units='°C'),
-    'pressure': PlotMaker('pressure', ('pressure',), units='hPa'),
-    'dewpoint': PlotMaker('dewpoint', ('air_temp',), units='°C'),
-    'wind_speed': PlotMaker('wind_speed', ('wind_speed',), units='m/s'),
-    'accum_precip': PrecipPlotMaker('accum_precip', ('accum_precip',), units='mm'),
-    'solar_flux': PlotMaker('solar_flux', ('solar_flux',), units='W/m^2'),
+    "meteorogram": MeteorogramPlotMaker(
+        "meteorogram",
+        ("air_temp", "dewpoint", "pressure", "wind_speed", "wind_dir", "accum_precip", "solar_flux"),
+        ("td", "pressure", "wind_speed", "wind_dir", "accum_precip", "solar_flux"),
+        ("td", "wind_speed", "wind_dir", "accum_precip"),
+    ),
+    "td": TDPlotMaker("td", ("air_temp", "dewpoint"), units="°C"),  # air_temp and dewpoint in one plot
+    "wind_dir": WindDirPlotMaker("wind_dir", ("wind_dir",), units="°"),  # special tick labels
+    "rh": PlotMaker("rh", ("rh",), units="%"),
+    "air_temp": PlotMaker("air_temp", ("air_temp",), units="°C"),
+    "pressure": PlotMaker("pressure", ("pressure",), units="hPa"),
+    "dewpoint": PlotMaker("dewpoint", ("air_temp",), units="°C"),
+    "wind_speed": PlotMaker("wind_speed", ("wind_speed",), units="m/s"),
+    "accum_precip": PrecipPlotMaker("accum_precip", ("accum_precip",), units="mm"),
+    "solar_flux": PlotMaker("solar_flux", ("solar_flux",), units="W/m^2"),
 }
 
 
@@ -375,36 +374,34 @@ def get_data(input_files, columns):
     # get the data from the files
     for name in columns:
         if name not in files.variables:
-            LOG.warning("Unknown file variable: {}".format(name))
+            LOG.warning(f"Unknown file variable: {name}")
             continue
         data_dict[name] = files.variables[name][:]
-        data_dict['qc_' + name] = files.variables['qc_' + name][:]
+        data_dict["qc_" + name] = files.variables["qc_" + name][:]
 
     # convert base_time epoch format into date_time object
-    base_time = files.variables['base_time'][:]
+    base_time = files.variables["base_time"][:]
     base_time_obj = datetime(1970, 1, 1) + timedelta(seconds=int(base_time))
-    
+
     # convert per-file offsets to offsets based on the first file's base_time
-    offsets = MFTime(files.variables['time_offset'])[:]
+    offsets = MFTime(files.variables["time_offset"])[:]
     # for each offset, convert that into a datetime object
-    data_dict['stamps'] = [base_time_obj + timedelta(seconds=int(s)) for s in offsets]
+    data_dict["stamps"] = [base_time_obj + timedelta(seconds=int(s)) for s in offsets]
 
-    return pd.DataFrame(data_dict).set_index(['stamps'])
+    return pd.DataFrame(data_dict).set_index(["stamps"])
 
 
-def create_plot(plot_names, frame, output,
-                start_time=None, end_time=None, thumbnail=False):
-    """
-    
-    Args:
-        plot_names: 
-        frame: 
-        output: 
-        start_time: 
-        end_time: 
-        daily: Whether or not this plot should represent one day of data
+def create_plot(plot_names, frame, output, start_time=None, end_time=None, thumbnail=False):
+    """Args:
+        plot_names:
+        frame:
+        output:
+        start_time:
+        end_time:
+        daily: Whether or not this plot should represent one day of data.
 
-    Returns:
+    Returns
+    -------
 
     """
     if start_time is None:
@@ -417,10 +414,10 @@ def create_plot(plot_names, frame, output,
         var_names = []
         for var_name in plot_maker.deps:
             if var_name not in frame:
-                raise ValueError("Missing required variable '{}' for plot '{}'".format(var_name, name))
+                raise ValueError(f"Missing required variable '{var_name}' for plot '{name}'")
             var_names.append(var_name)
             # write NaNs where QC values are not 0
-            qc_name = 'qc_' + var_name
+            qc_name = "qc_" + var_name
             if qc_name in frame:
                 frame[var_name].mask(frame[qc_name] != 0)
                 var_names.append(qc_name)
@@ -431,62 +428,86 @@ def create_plot(plot_names, frame, output,
 
         fig = plt.figure()
         try:
-            ax = plot_maker.create_plot(plot_frame, fig, start_time=start_time, end_time=end_time)
+            plot_maker.create_plot(plot_frame, fig, start_time=start_time, end_time=end_time)
         except ValueError:
-            LOG.error("Could not make '{}'".format(name), exc_info=True)
+            LOG.error(f"Could not make '{name}'", exc_info=True)
             continue
 
         out_fn = output.format(plot_name=name, start_time=start_time, end_time=end_time)
-        LOG.info("Saving plot '{}' to filename '{}'".format(name, out_fn))
+        LOG.info(f"Saving plot '{name}' to filename '{out_fn}'")
         fig.savefig(out_fn)
 
         if thumbnail:
             stem, ext = os.path.splitext(out_fn)
-            out_fn = "{}_thumbnail{}".format(stem, ext)
+            out_fn = f"{stem}_thumbnail{ext}"
             plot_maker.convert_to_thumbnail(fig)
-            LOG.info("Saving thumbnail '{}' to filename '{}'".format(name, out_fn))
+            LOG.info(f"Saving thumbnail '{name}' to filename '{out_fn}'")
             fig.savefig(out_fn)
 
 
 def _dt_convert(datetime_str):
-    """Parse datetime string, return datetime object"""
+    """Parse datetime string, return datetime object."""
     try:
-        return datetime.strptime(datetime_str, '%Y%m%d')
+        return datetime.strptime(datetime_str, "%Y%m%d")
     except ValueError:
         try:
-            return datetime.strptime(datetime_str, '%Y-%m-%d')
+            return datetime.strptime(datetime_str, "%Y-%m-%d")
         except ValueError:
-            return datetime.strptime(datetime_str, '%Y-%m-%dT%H:%M:%S')
+            return datetime.strptime(datetime_str, "%Y-%m-%dT%H:%M:%S")
 
 
 def main():
     import argparse
+
     from metobscommon.archive import setup_logging
+
     parser = argparse.ArgumentParser(description="Use data from level_b1 netCDF files to create netCDF files")
-    parser.add_argument('-v', '--verbose', action='count',
-                       default=int(os.environ.get("VERBOSITY", 2)), 
-                       dest='verbosity',
-                       help=('each occurence increases verbosity 1 level through'
-                             + ' ERROR-WARNING-INFO-DEBUG (default INFO)'))
-    parser.add_argument('-l', '--log-file', dest="log_filepath",
-                        help="Alternate name for log file, default is to not create a file")
-    parser.add_argument('-s', '--start-time', type=_dt_convert,
-    help="Start time of plot. If only -s is given, a plot of " +
-        "only that day is created. Formats allowed: \'YYYY-MM-DDTHH:MM:SS\', \'YYYY-MM-DD\'")
-    parser.add_argument('-e', '--end-time', type=_dt_convert,
-    help="End time of plot. If only -e is given, a plot of only that day is " +
-          "created. Formats allowed: \'YYYY-MM-DDTHH:MM:SS\', \'YYYY-MM-DD\', \'YYYYMMDD\'")
-    parser.add_argument('--met-plots', nargs='+',
-                        help="Override plots to use in the combined meteorogram plot")
+    parser.add_argument(
+        "-v",
+        "--verbose",
+        action="count",
+        default=int(os.environ.get("VERBOSITY", 2)),
+        dest="verbosity",
+        help=("each occurence increases verbosity 1 level through" + " ERROR-WARNING-INFO-DEBUG (default INFO)"),
+    )
+    parser.add_argument(
+        "-l",
+        "--log-file",
+        dest="log_filepath",
+        help="Alternate name for log file, default is to not create a file",
+    )
+    parser.add_argument(
+        "-s",
+        "--start-time",
+        type=_dt_convert,
+        help="Start time of plot. If only -s is given, a plot of "
+        + "only that day is created. Formats allowed: 'YYYY-MM-DDTHH:MM:SS', 'YYYY-MM-DD'",
+    )
+    parser.add_argument(
+        "-e",
+        "--end-time",
+        type=_dt_convert,
+        help="End time of plot. If only -e is given, a plot of only that day is "
+        + "created. Formats allowed: 'YYYY-MM-DDTHH:MM:SS', 'YYYY-MM-DD', 'YYYYMMDD'",
+    )
+    parser.add_argument("--met-plots", nargs="+", help="Override plots to use in the combined meteorogram plot")
     parser.add_argument("input_files", nargs="+", help="aoss_tower_level_b1 files")
-    parser.add_argument('-o', '--output', default="{plot_name}_{start_time:%Y%m%d_%H%M%S}.png", help="filename pattern")
-    parser.add_argument('-t', '--thumbnail', action='store_true', help="if specified, script creates a thumbnail")
-    parser.add_argument('-p', '--plot-names', nargs="+",
-                        required=True,
-                        help="the variable names or plot types to create")
-    parser.add_argument('-d', '--daily', action='store_true',
-        help="creates a plot for every day. Usually used to create plots " +
-        "that will line up for aoss tower quicklooks page")
+    parser.add_argument("-o", "--output", default="{plot_name}_{start_time:%Y%m%d_%H%M%S}.png", help="filename pattern")
+    parser.add_argument("-t", "--thumbnail", action="store_true", help="if specified, script creates a thumbnail")
+    parser.add_argument(
+        "-p",
+        "--plot-names",
+        nargs="+",
+        required=True,
+        help="the variable names or plot types to create",
+    )
+    parser.add_argument(
+        "-d",
+        "--daily",
+        action="store_true",
+        help="creates a plot for every day. Usually used to create plots "
+        + "that will line up for aoss tower quicklooks page",
+    )
     args = parser.parse_args()
 
     levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
@@ -497,23 +518,23 @@ def main():
 
     # check the dependencies for the meteorogram
     if args.met_plots:
-        assert 'meteorogram' not in args.met_plots
-        PLOT_TYPES['meteorogram'].deps = args.met_plots
+        assert "meteorogram" not in args.met_plots
+        PLOT_TYPES["meteorogram"].deps = args.met_plots
 
     plot_deps = [PLOT_TYPES[k].deps if k in PLOT_TYPES else (k,) for k in args.plot_names]
-    plot_deps = list(set(d for deps in plot_deps for d in deps))
+    plot_deps = list({d for deps in plot_deps for d in deps})
     frame = get_data(args.input_files, plot_deps)
     bad_plot_names = set(args.plot_names) - (set(frame.columns) | set(PLOT_TYPES.keys()))
     if bad_plot_names:
         raise ValueError("Unknown plot name(s): {}".format(", ".join(bad_plot_names)))
 
-    #frame only contains data from start-end times
+    # frame only contains data from start-end times
     if args.start_time and args.end_time:
-        frame = frame[args.start_time: args.end_time]
+        frame = frame[args.start_time : args.end_time]
     elif args.start_time:
-        #frame only contains data from start-end of that day
+        # frame only contains data from start-end of that day
         end_time = args.start_time.replace(hour=23, minute=59, second=59, microsecond=999999)
-        frame = frame[args.start_time: end_time]
+        frame = frame[args.start_time : end_time]
 
     if not args.daily:
         # allow plotting methods to write inplace on a copy
@@ -532,5 +553,6 @@ def main():
 
         create_plot(args.plot_names, frame, args.output, start_time, end_time, args.thumbnail)
 
+
 if __name__ == "__main__":
     sys.exit(main())
diff --git a/aosstower/schema.py b/aosstower/schema.py
index afb060f57548729d0503cfe2a24accb78e9f58d2..154fe751fb82c571cf781b4ee03ce14911ae828e 100644
--- a/aosstower/schema.py
+++ b/aosstower/schema.py
@@ -1,278 +1,291 @@
-from numpy import float32
 from collections import namedtuple
 
+from numpy import float32
 
-Var = namedtuple('Var', ['type', 'standard_name', 'name', 'description', 'units', 'valid_min', 'valid_max', 'valid_delta'])
+Var = namedtuple(
+    "Var",
+    ["type", "standard_name", "name", "description", "units", "valid_min", "valid_max", "valid_delta"],
+)
 
 
-database = dict(
-    box_temp=Var(
+database = {
+    "box_temp": Var(
         float32,
-        'air_temperature',
-        'box_temp',
-        'Auxillary Temperature',
-        'degC',
+        "air_temperature",
+        "box_temp",
+        "Auxillary Temperature",
+        "degC",
         None,
         None,
         None,
-        ),
-    box_pressure=Var(
+    ),
+    "box_pressure": Var(
         float32,
-        'air_pressure',
-        'box_pressure',
-        'Pressure inside the data logger enclosure',
-        'hPa',
-        '850',
-        '1100',
+        "air_pressure",
+        "box_pressure",
+        "Pressure inside the data logger enclosure",
+        "hPa",
+        "850",
+        "1100",
         None,
     ),
-    paro_air_temp_period=Var(
+    "paro_air_temp_period": Var(
         float32,
         None,
-        'paro_air_temp_period',
+        "paro_air_temp_period",
         None,
-        '1',
+        "1",
         None,
         None,
         None,
-        ),
-    paro_pressure_period=Var(
+    ),
+    "paro_pressure_period": Var(
         float32,
         None,
-        'paro_pressure_period',
+        "paro_pressure_period",
         None,
-        '1',
+        "1",
         None,
         None,
         None,
     ),
-    paro_air_temp=Var(
+    "paro_air_temp": Var(
         float32,
-        'air_temperature',
-        'paro_air_temp',
+        "air_temperature",
+        "paro_air_temp",
         None,
-        'degC',
-        '-50', 
-        '50',
+        "degC",
+        "-50",
+        "50",
         None,
-        ),
-    pressure=Var(
+    ),
+    "pressure": Var(
         float32,
-        'air_pressure',
-        'pressure',
-        'Air pressure as measured from the PAROSCI pressure sensor',
-        'hPa',
-        '850',
-        '1100',
-        None,
-        ),
-    paro_cal_sig=Var(
+        "air_pressure",
+        "pressure",
+        "Air pressure as measured from the PAROSCI pressure sensor",
+        "hPa",
+        "850",
+        "1100",
+        None,
+    ),
+    "paro_cal_sig": Var(
         float32,
         None,
-        'paro_cal_sig',
+        "paro_cal_sig",
         None,
         None,
         None,
         None,
         None,
-        ),
-    box_rh=Var(
+    ),
+    "box_rh": Var(
         float32,
-        'relative_humidity',
-        'box_rh',
-        'Relative humidity inside the data logger enclosure',
-        '%',
-        '0',
-        '100',
-        None,
-        ),
-    box_air_temp=Var(
+        "relative_humidity",
+        "box_rh",
+        "Relative humidity inside the data logger enclosure",
+        "%",
+        "0",
+        "100",
+        None,
+    ),
+    "box_air_temp": Var(
         float32,
-        'air_temperature',
-        'box_air_temp',
-        'Air temperature inside the data logger enclosure',
-        'degC',
-        '-50',
-        '50',
-        None,
-        ),
-    air_temp_2=Var(
+        "air_temperature",
+        "box_air_temp",
+        "Air temperature inside the data logger enclosure",
+        "degC",
+        "-50",
+        "50",
+        None,
+    ),
+    "air_temp_2": Var(
         float32,
-        'air_temperature',
-        'air_temp_2',
-        'Auxillary air temperature',
-        'degC',
-        '-50',
-        '50',
-        None,
-        ),
-    air_temp_3=Var(
+        "air_temperature",
+        "air_temp_2",
+        "Auxillary air temperature",
+        "degC",
+        "-50",
+        "50",
+        None,
+    ),
+    "air_temp_3": Var(
         float32,
-        'air_temperature',
-        'air_temp_3',
-        'Auxillary air temperature',
-        'degC',
-        '-50',
-        '50',
-        None,
-        ),
-    air_temp_4=Var(
+        "air_temperature",
+        "air_temp_3",
+        "Auxillary air temperature",
+        "degC",
+        "-50",
+        "50",
+        None,
+    ),
+    "air_temp_4": Var(
         float32,
-        'air_temperature',
-        'air_temp_4',
-        'Auxillary air temperature',
-        'degC',
-        '-50',
-        '50',
-        None,
-        ),
-    air_temp_5=Var(
+        "air_temperature",
+        "air_temp_4",
+        "Auxillary air temperature",
+        "degC",
+        "-50",
+        "50",
+        None,
+    ),
+    "air_temp_5": Var(
         float32,
-        'air_temperature',
-        'air_temp_5',
-        'Auxillary air temperature',
-        'degC',
-        '-50',
-        '50',
-        None,
-        ),
-    wind_speed=Var(
+        "air_temperature",
+        "air_temp_5",
+        "Auxillary air temperature",
+        "degC",
+        "-50",
+        "50",
+        None,
+    ),
+    "wind_speed": Var(
         float32,
-        'wind_speed',
-        'wind_speed',
-        'Wind speed',
-        'm*s^-1',
-        '0',
-        '50',
-        None,
-        ),
-    wind_dir=Var(
+        "wind_speed",
+        "wind_speed",
+        "Wind speed",
+        "m*s^-1",
+        "0",
+        "50",
+        None,
+    ),
+    "wind_dir": Var(
         float32,
-        'wind_from_direction',
-        'wind_dir',
-        'Wind direction',
-        'degrees',
-        '0',
-        '360',
-        None,
-        ),
-    rh_shield_freq=Var(
+        "wind_from_direction",
+        "wind_dir",
+        "Wind direction",
+        "degrees",
+        "0",
+        "360",
+        None,
+    ),
+    "rh_shield_freq": Var(
         float32,
         None,
-        'rh_shield_freq',
+        "rh_shield_freq",
         None,
-        'hz',
+        "hz",
         None,
         None,
         None,
-        ),
-    rh=Var(
+    ),
+    "rh": Var(
         float32,
-        'relative_humidity',
-        'rh',
-        'Relative humidity',
-        '%',
-        '0',
-        '100',
-        None,
-        ),
-    air_temp_6_3m=Var(
+        "relative_humidity",
+        "rh",
+        "Relative humidity",
+        "%",
+        "0",
+        "100",
+        None,
+    ),
+    "air_temp_6_3m": Var(
         float32,
-        'air_temperature',
-        'air_temp_6_3m',
-        'Air temperature 6.3m from tower base',
-        'degC',
-        '-50',
-        '50',
-        None,
-        ),
-    dewpoint=Var(
+        "air_temperature",
+        "air_temp_6_3m",
+        "Air temperature 6.3m from tower base",
+        "degC",
+        "-50",
+        "50",
+        None,
+    ),
+    "dewpoint": Var(
         float32,
-        'dew_point_temperature',
-        'dewpoint',
-        'Calculated dewpoint temperature',
-        'degC',
-        '-50',
-        '50',
+        "dew_point_temperature",
+        "dewpoint",
+        "Calculated dewpoint temperature",
+        "degC",
+        "-50",
+        "50",
         None,
     ),
-    rtd_shield_freq=Var(
+    "rtd_shield_freq": Var(
         float32,
         None,
-        'rtd_shied_freq',
+        "rtd_shied_freq",
         None,
         None,
         None,
         None,
         None,
-        ),
-    air_temp=Var(
+    ),
+    "air_temp": Var(
         float32,
-        'air_temperature',
-        'air_temp',
-        'Air temperature',
-        'degC',
-        '-50',
-        '50',
-        None,
-        ),
-    solar_flux=Var(
+        "air_temperature",
+        "air_temp",
+        "Air temperature",
+        "degC",
+        "-50",
+        "50",
+        None,
+    ),
+    "solar_flux": Var(
         float32,
-        'solar_flux',
-        'solar_flux',
-        'Solar flux',
-        'w*m^-2',
-        '0',
-        '3000',
+        "solar_flux",
+        "solar_flux",
+        "Solar flux",
+        "w*m^-2",
+        "0",
+        "3000",
         None,
     ),
-    precip=Var(
+    "precip": Var(
         float32,
         None,
-        'precip',
-        'Precipitation',
-        'mm',
-        '0',
-        '254',
+        "precip",
+        "Precipitation",
+        "mm",
+        "0",
+        "254",
         None,
-        ),
-    accum_precip=Var(
+    ),
+    "accum_precip": Var(
         float32,
-        'accumulated_precipitation',
-        'accum_precip',
-        'Precipitation accumulated since 0Z',
-        'mm',  # data comes from instrument as inches but we typically want millimeters
-        '0',
-        '254',
-        None,
-        ),
-    altimeter=Var(
+        "accumulated_precipitation",
+        "accum_precip",
+        "Precipitation accumulated since 0Z",
+        "mm",  # data comes from instrument as inches but we typically want millimeters
+        "0",
+        "254",
+        None,
+    ),
+    "altimeter": Var(
         float32,
         None,
-        'altimeter',
+        "altimeter",
         None,
-        'inHg',
+        "inHg",
         None,
         None,
         None,
-        ),
-    gust=Var(
+    ),
+    "gust": Var(
         float32,
-        'wind_speed_of_gust',
-        'gust',
-        'Wind gust over the previous 2 minutes',
-        'm/s',
-        '0',
-        '50',
-        None,
-    )
-)
+        "wind_speed_of_gust",
+        "gust",
+        "Wind gust over the previous 2 minutes",
+        "m/s",
+        "0",
+        "50",
+        None,
+    ),
+}
 
 database_dict = {k: v._asdict() for k, v in database.items()}
 
-met_vars = {'air_temp', 'dewpoint', 'rh', 'solar_flux', 'pressure', 'precip', 'accum_precip',
-            'wind_speed', 'wind_dir', 'gust'}
+met_vars = {
+    "air_temp",
+    "dewpoint",
+    "rh",
+    "solar_flux",
+    "pressure",
+    "precip",
+    "accum_precip",
+    "wind_speed",
+    "wind_dir",
+    "gust",
+}
 engr_vars = set(database.keys()) - met_vars
 
 unit_conversions = {}
-unit_conversions['accum_precip'] = lambda x: x * 25.4
+unit_conversions["accum_precip"] = lambda x: x * 25.4
diff --git a/aosstower/station.py b/aosstower/station.py
index 62630ff69638987ac958574cdd9a3ca588d44a15..8494d48def77b06ce7f4f7f37edf053f44e29451 100644
--- a/aosstower/station.py
+++ b/aosstower/station.py
@@ -1,5 +1,4 @@
-"""Station metadata.
-"""
+"""Station metadata."""
 from datetime import timedelta
 
 # Time between data samples in seconds
@@ -17,11 +16,11 @@ LATITUDE = 43.070786
 LONGITUDE = -89.406939
 
 station_info = {
-    'site': 'mendota',
-    'inst': 'buoy',
-    'long_name': 'AOSS Tower',
-    'short_name': 'aoss.tower',
-    'alt': ELEVATION,
-    'lon': LONGITUDE,
-    'lat': LATITUDE,
-}
\ No newline at end of file
+    "site": "mendota",
+    "inst": "buoy",
+    "long_name": "AOSS Tower",
+    "short_name": "aoss.tower",
+    "alt": ELEVATION,
+    "lon": LONGITUDE,
+    "lat": LATITUDE,
+}
diff --git a/aosstower/tests/level_00/test_influxdb.py b/aosstower/tests/level_00/test_influxdb.py
index 7292c6b82db8a31aa5eaa50c8219ef621fa48665..ed7fbde52b48cabeb3f8318ea81a974864e7f531 100755
--- a/aosstower/tests/level_00/test_influxdb.py
+++ b/aosstower/tests/level_00/test_influxdb.py
@@ -1,8 +1,10 @@
 #!/usr/bin/env python
+import datetime
 import unittest
+
 import numpy as np
 import pandas as pd
-import datetime
+
 from aosstower.level_00.influxdb import Updater, construct_url, get_url_data
 
 
@@ -14,12 +16,34 @@ class TestCase:
 
 
 def create_data(size, data_interval=datetime.timedelta(seconds=5)):
-    return [{'wind_speed': i % 119, 'wind_dir': 0, 'box_pressure': i, 'paro_air_temp_period': i,
-             'paro_pressure_period': i, 'paro_air_temp': i, 'pressure': i, 'paro_cal_sig': i,
-             'box_air_temp': i, 'air_temp_2': i, 'air_temp_3': i, 'air_temp_4': i, 'rh_shield_freq': i, 'rel_hum': i,
-             'air_temp_6_3m': i, 'dewpoint': i, 'rtd_shied_freq': i, 'air_temp': i, 'solar_flux': i, 'precip': i,
-             'accum_precip': -99999, 'altimeter': i,
-             'timestamp': datetime.datetime(2019, 1, 1, 0, 3, 33) + data_interval * i} for i in range(size)]
+    return [
+        {
+            "wind_speed": i % 119,
+            "wind_dir": 0,
+            "box_pressure": i,
+            "paro_air_temp_period": i,
+            "paro_pressure_period": i,
+            "paro_air_temp": i,
+            "pressure": i,
+            "paro_cal_sig": i,
+            "box_air_temp": i,
+            "air_temp_2": i,
+            "air_temp_3": i,
+            "air_temp_4": i,
+            "rh_shield_freq": i,
+            "rel_hum": i,
+            "air_temp_6_3m": i,
+            "dewpoint": i,
+            "rtd_shied_freq": i,
+            "air_temp": i,
+            "solar_flux": i,
+            "precip": i,
+            "accum_precip": -99999,
+            "altimeter": i,
+            "timestamp": datetime.datetime(2019, 1, 1, 0, 3, 33) + data_interval * i,
+        }
+        for i in range(size)
+    ]
 
 
 def _isnan(x):
@@ -29,59 +53,161 @@ def _isnan(x):
 class TestInfluxdb(unittest.TestCase):
     def setUp(self):
         self.updater = Updater()
-        self.test_data = TestCase(create_data(209), [
-            {'wind_speed': 17, 'wind_dir': 0, 'box_pressure': 17, 'paro_air_temp_period': 17,
-             'paro_pressure_period': 17, 'paro_air_temp': 17, 'pressure': 17, 'paro_cal_sig': 17,
-             'box_air_temp': 17, 'air_temp_2': 17, 'air_temp_3': 17, 'air_temp_4': 17, 'rh_shield_freq': 17,
-             'rel_hum': 17, 'air_temp_6_3m': 17, 'dewpoint': 17, 'rtd_shied_freq': 17, 'air_temp': 17,
-             'solar_flux': 17, 'precip': 17, 'accum_precip': np.nan, 'altimeter': 17, 'wind_east': 0,
-             'wind_north': 17, 'wind_speed_2m': np.nan, 'wind_dir_2m': np.nan, 'gust_1m': np.nan, 'gust_10m': np.nan,
-             'index': pd.Timestamp('2019-01-01 00:04:58')},
-            {'wind_speed': 77, 'wind_dir': 0, 'box_pressure': 77, 'paro_air_temp_period': 77,
-             'paro_pressure_period': 77, 'paro_air_temp': 77, 'pressure': 77, 'paro_cal_sig': 77,
-             'box_air_temp': 77, 'air_temp_2': 77, 'air_temp_3': 77, 'air_temp_4': 77, 'rh_shield_freq': 77,
-             'rel_hum': 77, 'air_temp_6_3m': 77, 'dewpoint': 77, 'rtd_shied_freq': 77, 'air_temp': 77,
-             'solar_flux': 77, 'precip': 77, 'accum_precip': np.nan, 'altimeter': 77, 'wind_east': 0,
-             'wind_north': 77, 'wind_speed_2m': 65.5, 'wind_dir_2m': 0, 'gust_1m': 77, 'gust_10m': np.nan,
-             'index': pd.Timestamp('2019-01-01 00:09:58')},
-            {'wind_speed': 18, 'wind_dir': 0, 'box_pressure': 137, 'paro_air_temp_period': 137,
-             'paro_pressure_period': 137, 'paro_air_temp': 137, 'pressure': 137, 'paro_cal_sig': 137,
-             'box_air_temp': 137, 'air_temp_2': 137, 'air_temp_3': 137, 'air_temp_4': 137,
-             'rh_shield_freq': 137, 'rel_hum': 137, 'air_temp_6_3m': 137, 'dewpoint': 137,
-             'rtd_shied_freq': 137, 'air_temp': 137, 'solar_flux': 137, 'precip': 137, 'accum_precip': np.nan,
-             'altimeter': 137, 'wind_east': 0, 'wind_north': 18, 'wind_speed_2m': 31.291666666666668,
-             'wind_dir_2m': 0, 'gust_1m': np.nan, 'gust_10m': np.nan, 'index': pd.Timestamp('2019-01-01 00:14:58')},
-            {'wind_speed': 78, 'wind_dir': 0, 'box_pressure': 197, 'paro_air_temp_period': 197,
-             'paro_pressure_period': 197, 'paro_air_temp': 197, 'pressure': 197, 'paro_cal_sig': 197,
-             'box_air_temp': 197, 'air_temp_2': 197, 'air_temp_3': 197, 'air_temp_4': 197,
-             'rh_shield_freq': 197, 'rel_hum': 197, 'air_temp_6_3m': 197, 'dewpoint': 197,
-             'rtd_shied_freq': 197, 'air_temp': 197, 'solar_flux': 197, 'precip': 197, 'accum_precip': np.nan,
-             'altimeter': 197, 'wind_east': 0, 'wind_north': 78, 'wind_speed_2m': 66.5, 'wind_dir_2m': 0,
-             'gust_1m': 78, 'gust_10m': 118, 'index': pd.Timestamp('2019-01-01 00:19:58')}],
-                                  ['http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?'
-                                   'ID=&PASSWORD=&dateutc=2019-01-01+00%3A04%3A58&winddir=0.0&winddir_avg2m='
-                                   '&windspeedmph=38.02798&windspdmph_avg2m=&windgustmph=&windgustmph_10m='
-                                   '&humidity=17.0&tempf=62.6&baromin=0.5020096628001095&dewptf=62.6'
-                                   '&solarradiation=17.0&rainin=17.0&dailyrainin=&softwaretype=SSEC-RIG'
-                                   '&action=updateraw',
-                                   'http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?'
-                                   'ID=&PASSWORD=&dateutc=2019-01-01+00%3A09%3A58&winddir=0.0&winddir_avg2m=0.0'
-                                   '&windspeedmph=172.24438&windspdmph_avg2m=146.51957000000002&windgustmph=172.24438'
-                                   '&windgustmph_10m=&humidity=77.0&tempf=170.6&baromin=2.2738084726828487'
-                                   '&dewptf=170.6&solarradiation=77.0&rainin=77.0&dailyrainin=&softwaretype=SSEC-RIG'
-                                   '&action=updateraw',
-                                   'http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?'
-                                   'ID=&PASSWORD=&dateutc=2019-01-01+00%3A14%3A58&winddir=0.0&winddir_avg2m=0.0'
-                                   '&windspeedmph=40.264920000000004&windspdmph_avg2m=69.99758083333334&windgustmph='
-                                   '&windgustmph_10m=&humidity=137.0&tempf=278.6&baromin=4.045607282565588'
-                                   '&dewptf=278.6&solarradiation=137.0&rainin=137.0&dailyrainin=&softwaretype=SSEC-RIG'
-                                   '&action=updateraw',
-                                   'http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?'
-                                   'ID=&PASSWORD=&dateutc=2019-01-01+00%3A19%3A58&winddir=0.0&winddir_avg2m=0.0'
-                                   '&windspeedmph=174.48132&windspdmph_avg2m=148.75651000000002&windgustmph=174.48132'
-                                   '&windgustmph_10m=263.95892000000003&humidity=197.0&tempf=386.6'
-                                   '&baromin=5.817406092448327&dewptf=386.6&solarradiation=197.0&rainin=197.0'
-                                   '&dailyrainin=&softwaretype=SSEC-RIG&action=updateraw'])
+        self.test_data = TestCase(
+            create_data(209),
+            [
+                {
+                    "wind_speed": 17,
+                    "wind_dir": 0,
+                    "box_pressure": 17,
+                    "paro_air_temp_period": 17,
+                    "paro_pressure_period": 17,
+                    "paro_air_temp": 17,
+                    "pressure": 17,
+                    "paro_cal_sig": 17,
+                    "box_air_temp": 17,
+                    "air_temp_2": 17,
+                    "air_temp_3": 17,
+                    "air_temp_4": 17,
+                    "rh_shield_freq": 17,
+                    "rel_hum": 17,
+                    "air_temp_6_3m": 17,
+                    "dewpoint": 17,
+                    "rtd_shied_freq": 17,
+                    "air_temp": 17,
+                    "solar_flux": 17,
+                    "precip": 17,
+                    "accum_precip": np.nan,
+                    "altimeter": 17,
+                    "wind_east": 0,
+                    "wind_north": 17,
+                    "wind_speed_2m": np.nan,
+                    "wind_dir_2m": np.nan,
+                    "gust_1m": np.nan,
+                    "gust_10m": np.nan,
+                    "index": pd.Timestamp("2019-01-01 00:04:58"),
+                },
+                {
+                    "wind_speed": 77,
+                    "wind_dir": 0,
+                    "box_pressure": 77,
+                    "paro_air_temp_period": 77,
+                    "paro_pressure_period": 77,
+                    "paro_air_temp": 77,
+                    "pressure": 77,
+                    "paro_cal_sig": 77,
+                    "box_air_temp": 77,
+                    "air_temp_2": 77,
+                    "air_temp_3": 77,
+                    "air_temp_4": 77,
+                    "rh_shield_freq": 77,
+                    "rel_hum": 77,
+                    "air_temp_6_3m": 77,
+                    "dewpoint": 77,
+                    "rtd_shied_freq": 77,
+                    "air_temp": 77,
+                    "solar_flux": 77,
+                    "precip": 77,
+                    "accum_precip": np.nan,
+                    "altimeter": 77,
+                    "wind_east": 0,
+                    "wind_north": 77,
+                    "wind_speed_2m": 65.5,
+                    "wind_dir_2m": 0,
+                    "gust_1m": 77,
+                    "gust_10m": np.nan,
+                    "index": pd.Timestamp("2019-01-01 00:09:58"),
+                },
+                {
+                    "wind_speed": 18,
+                    "wind_dir": 0,
+                    "box_pressure": 137,
+                    "paro_air_temp_period": 137,
+                    "paro_pressure_period": 137,
+                    "paro_air_temp": 137,
+                    "pressure": 137,
+                    "paro_cal_sig": 137,
+                    "box_air_temp": 137,
+                    "air_temp_2": 137,
+                    "air_temp_3": 137,
+                    "air_temp_4": 137,
+                    "rh_shield_freq": 137,
+                    "rel_hum": 137,
+                    "air_temp_6_3m": 137,
+                    "dewpoint": 137,
+                    "rtd_shied_freq": 137,
+                    "air_temp": 137,
+                    "solar_flux": 137,
+                    "precip": 137,
+                    "accum_precip": np.nan,
+                    "altimeter": 137,
+                    "wind_east": 0,
+                    "wind_north": 18,
+                    "wind_speed_2m": 31.291666666666668,
+                    "wind_dir_2m": 0,
+                    "gust_1m": np.nan,
+                    "gust_10m": np.nan,
+                    "index": pd.Timestamp("2019-01-01 00:14:58"),
+                },
+                {
+                    "wind_speed": 78,
+                    "wind_dir": 0,
+                    "box_pressure": 197,
+                    "paro_air_temp_period": 197,
+                    "paro_pressure_period": 197,
+                    "paro_air_temp": 197,
+                    "pressure": 197,
+                    "paro_cal_sig": 197,
+                    "box_air_temp": 197,
+                    "air_temp_2": 197,
+                    "air_temp_3": 197,
+                    "air_temp_4": 197,
+                    "rh_shield_freq": 197,
+                    "rel_hum": 197,
+                    "air_temp_6_3m": 197,
+                    "dewpoint": 197,
+                    "rtd_shied_freq": 197,
+                    "air_temp": 197,
+                    "solar_flux": 197,
+                    "precip": 197,
+                    "accum_precip": np.nan,
+                    "altimeter": 197,
+                    "wind_east": 0,
+                    "wind_north": 78,
+                    "wind_speed_2m": 66.5,
+                    "wind_dir_2m": 0,
+                    "gust_1m": 78,
+                    "gust_10m": 118,
+                    "index": pd.Timestamp("2019-01-01 00:19:58"),
+                },
+            ],
+            [
+                "http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?"
+                "ID=&PASSWORD=&dateutc=2019-01-01+00%3A04%3A58&winddir=0.0&winddir_avg2m="
+                "&windspeedmph=38.02798&windspdmph_avg2m=&windgustmph=&windgustmph_10m="
+                "&humidity=17.0&tempf=62.6&baromin=0.5020096628001095&dewptf=62.6"
+                "&solarradiation=17.0&rainin=17.0&dailyrainin=&softwaretype=SSEC-RIG"
+                "&action=updateraw",
+                "http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?"
+                "ID=&PASSWORD=&dateutc=2019-01-01+00%3A09%3A58&winddir=0.0&winddir_avg2m=0.0"
+                "&windspeedmph=172.24438&windspdmph_avg2m=146.51957000000002&windgustmph=172.24438"
+                "&windgustmph_10m=&humidity=77.0&tempf=170.6&baromin=2.2738084726828487"
+                "&dewptf=170.6&solarradiation=77.0&rainin=77.0&dailyrainin=&softwaretype=SSEC-RIG"
+                "&action=updateraw",
+                "http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?"
+                "ID=&PASSWORD=&dateutc=2019-01-01+00%3A14%3A58&winddir=0.0&winddir_avg2m=0.0"
+                "&windspeedmph=40.264920000000004&windspdmph_avg2m=69.99758083333334&windgustmph="
+                "&windgustmph_10m=&humidity=137.0&tempf=278.6&baromin=4.045607282565588"
+                "&dewptf=278.6&solarradiation=137.0&rainin=137.0&dailyrainin=&softwaretype=SSEC-RIG"
+                "&action=updateraw",
+                "http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?"
+                "ID=&PASSWORD=&dateutc=2019-01-01+00%3A19%3A58&winddir=0.0&winddir_avg2m=0.0"
+                "&windspeedmph=174.48132&windspdmph_avg2m=148.75651000000002&windgustmph=174.48132"
+                "&windgustmph_10m=263.95892000000003&humidity=197.0&tempf=386.6"
+                "&baromin=5.817406092448327&dewptf=386.6&solarradiation=197.0&rainin=197.0"
+                "&dailyrainin=&softwaretype=SSEC-RIG&action=updateraw",
+            ],
+        )
 
     def test_updater(self):
         output = []
@@ -89,23 +215,23 @@ class TestInfluxdb(unittest.TestCase):
             avg = self.updater.rolling_average(record)
             if avg is not None:
                 output.append({key: avg[key][-1] for key in avg})
-                output[-1]['index'] = avg.index[-1]
-                self.assertGreaterEqual(len(self.test_data.expected_avg), len(output))
-                self.assertEqual(len(self.test_data.expected_avg[len(output) - 1]), len(output[-1]))
+                output[-1]["index"] = avg.index[-1]
+                assert len(self.test_data.expected_avg) >= len(output)
+                assert len(self.test_data.expected_avg[len(output) - 1]) == len(output[-1])
                 for key in output[-1]:
                     if not (_isnan(output[-1][key]) and _isnan(self.test_data.expected_avg[len(output) - 1][key])):
-                        self.assertEqual(self.test_data.expected_avg[len(output) - 1][key], output[-1][key])
-        self.assertEqual(len(self.test_data.expected_avg), len(output))
+                        assert self.test_data.expected_avg[len(output) - 1][key] == output[-1][key]
+        assert len(self.test_data.expected_avg) == len(output)
 
     def test_construct_url(self):
         output = []
         for record in self.test_data.input:
             avg = self.updater.rolling_average(record)
             if avg is not None:
-                output.append(construct_url(get_url_data(avg, '', '')))
-                self.assertGreaterEqual(len(self.test_data.expected_url), len(output))
-                self.assertEqual(self.test_data.expected_url[len(output) - 1], output[-1])
-        self.assertEqual(len(self.test_data.expected_url), len(output))
+                output.append(construct_url(get_url_data(avg, "", "")))
+                assert len(self.test_data.expected_url) >= len(output)
+                assert self.test_data.expected_url[len(output) - 1] == output[-1]
+        assert len(self.test_data.expected_url) == len(output)
 
 
 def suite():
diff --git a/aosstower/tests/level_00/test_parser.py b/aosstower/tests/level_00/test_parser.py
index 90d4b086a7557732b088b5172c0d123392283d15..4a069e8267e8fb56c501acd746c50d8cabe8ee1d 100644
--- a/aosstower/tests/level_00/test_parser.py
+++ b/aosstower/tests/level_00/test_parser.py
@@ -3,61 +3,65 @@ from datetime import datetime
 
 
 class ParserV0Tests(unittest.TestCase):
-
-    line = ("TIME 0 ACCURAIN 0.000000 TEMP107_4 8.139600 "
-            "LI200X 0.066020 TEMP107_1 9.307800 RH41372 92.064000 "
-            "TEMP107_5 -99999.000000 CS105 970.100000 PAROSCI 971.428000 "
-            "WSPD05305 8.663000 TEMP107_3 8.368400 CS10162 65.653000 "
-            "RAIN380M 0.000000 TEMP107_2 8.287800 TEMP41372 8.202300 "
-            "WDIR05305 143.380000\n")
+    line = (
+        "TIME 0 ACCURAIN 0.000000 TEMP107_4 8.139600 "
+        "LI200X 0.066020 TEMP107_1 9.307800 RH41372 92.064000 "
+        "TEMP107_5 -99999.000000 CS105 970.100000 PAROSCI 971.428000 "
+        "WSPD05305 8.663000 TEMP107_3 8.368400 CS10162 65.653000 "
+        "RAIN380M 0.000000 TEMP107_2 8.287800 TEMP41372 8.202300 "
+        "WDIR05305 143.380000\n"
+    )
 
     def _cut(self):
         from aosstower.level_00.parser import ParserV0
+
         return ParserV0()
 
     def test_maybe_mine(self):
         parser = self._cut()
-        self.assertTrue(parser.maybe_mine(self.line))
+        assert parser.maybe_mine(self.line)
 
-        bad_line = 'xx' + self.line
-        self.assertFalse(parser.maybe_mine(bad_line))
+        bad_line = "xx" + self.line
+        assert not parser.maybe_mine(bad_line)
 
     def test_record_format(self):
         parser = self._cut()
 
         record = parser.make_frame(self.line)
 
-        self.assertIn('stamp', record)
-        self.assertEqual(record['stamp'], datetime(1970, 1, 1))
+        assert "stamp" in record
+        assert record["stamp"] == datetime(1970, 1, 1)
 
 
 class ParserV1V2Tests(unittest.TestCase):
-
-    line = ("1,1970,1,0000,0,976.59,5.8564,30.085,25.893,977.36,58732,"
-            "47.375,24.234,23.865,22.615,37.219,6.9222,67.398,145.2,45.581,"
-            "22.669,10.417,145.2,22.665,163.94,0,0,30.015\n")
+    line = (
+        "1,1970,1,0000,0,976.59,5.8564,30.085,25.893,977.36,58732,"
+        "47.375,24.234,23.865,22.615,37.219,6.9222,67.398,145.2,45.581,"
+        "22.669,10.417,145.2,22.665,163.94,0,0,30.015\n"
+    )
 
     def _cut(self):
         from aosstower.level_00.parser import ParserV1V2
+
         return ParserV1V2()
 
     def test_maybe_mine(self):
         parser = self._cut()
-        self.assertTrue(parser.maybe_mine(self.line))
+        assert parser.maybe_mine(self.line)
 
-        bad_line = 'xx,' + self.line.strip()
-        self.assertFalse(parser.maybe_mine(bad_line))
+        bad_line = "xx," + self.line.strip()
+        assert not parser.maybe_mine(bad_line)
 
     def test_record_format(self):
         parser = self._cut()
 
         record = parser.make_frame(self.line)
 
-        self.assertIn('stamp', record)
-        self.assertEqual(record['stamp'], datetime(1970, 1, 1))
+        assert "stamp" in record
+        assert record["stamp"] == datetime(1970, 1, 1)
 
     def test_record_supports_v1_and_v2(self):
         parser = self._cut()
 
         parser.make_frame(self.line)
-        parser.make_frame(self.line.strip() + ',999\n')
+        parser.make_frame(self.line.strip() + ",999\n")
diff --git a/aosstower/tests/level_b1/test_nc.py b/aosstower/tests/level_b1/test_nc.py
index 07149e93aa8fe737db0ed13e4b58f3a5d7178617..9a75a172926a1eefd14a2f5060a2e3ab72bdaa87 100644
--- a/aosstower/tests/level_b1/test_nc.py
+++ b/aosstower/tests/level_b1/test_nc.py
@@ -1,15 +1,16 @@
 #!/usr/bin/env python
-# encoding: utf8
 """Test basic NetCDF generation."""
 
 import os
 from datetime import datetime
+
 import numpy as np
 
 
 def get_nc_schema_database(fields=None):
     """Get a version of the NetCDF schema that mimics the nc script."""
     from aosstower import schema
+
     if fields is None:
         fields = schema.met_vars
     mini_database = {k: schema.database_dict[k] for k in fields}
@@ -18,19 +19,23 @@ def get_nc_schema_database(fields=None):
 
 def test_nc_basic1(tmpdir):
     """Test basic usage of the NetCDF generation."""
+    from netCDF4 import Dataset
+
     from aosstower.level_b1.nc import create_giant_netcdf
     from aosstower.tests.utils import get_cached_level_00
-    from netCDF4 import Dataset
 
     input_files = list(get_cached_level_00(num_files=2))
-    nc_out = tmpdir.join('test.nc')
+    nc_out = tmpdir.join("test.nc")
     create_giant_netcdf(
-        input_files, str(nc_out), True, None,
+        input_files,
+        str(nc_out),
+        True,
+        None,
         start=datetime(2020, 1, 2, 0, 0, 0),
-        interval_width='1T',
+        interval_width="1T",
         database=get_nc_schema_database(),
     )
     assert os.path.isfile(nc_out)
-    with Dataset(nc_out, 'r') as nc:
-        sflux = nc['solar_flux'][:]
+    with Dataset(nc_out, "r") as nc:
+        sflux = nc["solar_flux"][:]
         assert np.count_nonzero(sflux.mask) == 2
diff --git a/aosstower/tests/utils.py b/aosstower/tests/utils.py
index d6fedde4af41d88b10b85599a78f9b17bebab856..f43dbf8e8d98d8968ffdb9450d65d3037227c4ff 100644
--- a/aosstower/tests/utils.py
+++ b/aosstower/tests/utils.py
@@ -1,11 +1,10 @@
 #!/usr/bin/env python
-# encoding: utf8
 """Utilities for running tests."""
 
 import os
 import urllib.request
 
-CACHE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'test_data')
+CACHE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "test_data")
 
 
 def get_cached_level_00(num_files=2):
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000000000000000000000000000000000000..9a9d4ffde4b4a73f5cef9036994a5d8f1062e15d
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,81 @@
+[build-system]
+requires = ["setuptools>=45", "wheel", "setuptools_scm[toml]>=6.2", 'setuptools_scm_git_archive']
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools_scm]
+write_to = "aosstower/version.py"
+
+[tool.isort]
+sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
+profile = "black"
+skip_gitignore = true
+default_section = "THIRDPARTY"
+known_first_party = "aosstower"
+line_length = 120
+skip = [".gitignore", "aosstower/version.py"]
+
+[tool.black]
+line-length = 120
+exclude = '''
+(
+  \.git
+  | build
+  | dist
+  | aosstower/version\.py
+)
+
+'''
+
+[tool.ruff]
+line-length = 120
+select = [
+    "E", "F", "W", "C90", "I", "N", "D", "UP", "YTT", "S", "BLE", "B", "A",
+    "COM", "C4", "T10", "EXE", "ISC", "ICN", "INP", "PIE", "T20", "PYI", "PT",
+    "Q", "RSE", "RET", "SLF", "SIM", "TID", "TCH", "ARG", "PTH", "ERA", "PD",
+    "PGH", "PL", "TRY", "NPY", "RUF",
+]
+ignore = ["D100", "D101", "D102", "D103", "D104", "D105", "D106", "D107", "D203", "D213", "B008"]
+
+[tool.ruff.per-file-ignores]
+"aosstower/tests/*" = ["S", "PLR2004"]
+
+[tool.mypy]
+python_version = "3.10"
+
+[tool.coverage]
+relative_files = true
+omit = ["aosstower/version.py"]
+
+[project]
+name = "aosstower"
+authors = [
+    {name = "David Hoese", email = "david.hoese@ssec.wisc.edu"},
+]
+description = "UW AOSS Rooftop Instrument Group Met Tower"
+readme = "README.md"
+keywords = ["metobs"]
+license = {text = "MIT"}
+classifiers = [
+    "Framework :: Flask",
+    "Programming Language :: Python :: 3",
+    "Development Status :: 3 - Alpha",
+    "Intended Audience :: Science/Research",
+    "License :: OSI Approved :: MIT License",
+    "Operating System :: OS Independent",
+    "Programming Language :: Python",
+    "Topic :: Scientific/Engineering",
+]
+requires-python = ">=3.10"
+dependencies = [
+    "numpy",
+    "pandas",
+]
+dynamic = ["version"]
+
+[project.optional-dependencies]
+tests = [
+    "pytest",
+]
+
+[tool.setuptools]
+packages = ["aosstower"]
diff --git a/scripts/README.rst b/scripts/README.rst
index ec660e15c3c6a9513da10682b0ddde3e1f2213eb..4b3ee2221c918e8ead153c833fcd2415f0fb2efa 100644
--- a/scripts/README.rst
+++ b/scripts/README.rst
@@ -15,4 +15,4 @@ environment on the processing server. Normally this is something like:
 All scripts in this directory should be command line executable except
 for the "metobs_config.sh" script which is meant as a shared configuration
 script between all the processing scripts. See individual scripts for
-a better description of their purpose.
\ No newline at end of file
+a better description of their purpose.
diff --git a/scripts/archive_tower.sh b/scripts/archive_tower.sh
index e60fbbca5b708d3de9fe6d67e44363b5b64858b7..f47be5b879840e7dc374c4f261bab80dc866500f 100755
--- a/scripts/archive_tower.sh
+++ b/scripts/archive_tower.sh
@@ -44,4 +44,4 @@ fi
 
     log_info "Done"
 
-) 200>$LOCK
\ No newline at end of file
+) 200>$LOCK
diff --git a/scripts/insert_influx.py b/scripts/insert_influx.py
deleted file mode 100644
index 03bfa9e4695d0fd2d14b869dab9004d138011830..0000000000000000000000000000000000000000
--- a/scripts/insert_influx.py
+++ /dev/null
@@ -1,56 +0,0 @@
-import logging
-from datetime import datetime
-
-import requests
-
-from metobs.data import wind_vector_components
-from aosstower.level_00.parser import read_frames
-
-LOG = logging.getLogger(__name__)
-
-
-def _mktime(frame):
-    return int((frame['stamp'] - datetime(1970, 1, 1)).total_seconds() * 10**9)
-
-
-def frame_records(frame):
-    nanos = _mktime(frame)
-    if 'wind_speed' in frame and 'wind_dir' in frame:
-        spd = frame.pop('wind_speed')
-        dr = frame.pop('wind_dir')
-        e, n, _ = wind_vector_components(spd, dr)
-        frame['wind_east'] = e
-        frame['wind_north'] = n
-    for name, value in frame.items():
-        if name == 'stamp':
-            continue
-        valstr = '{}i'.format(value) if isinstance(value, int) else str(value)
-        yield '{},inst=tower,site=aoss value={} {}'.format(name, valstr, nanos)
-
-
-def file_records(filepath):
-    lines = []
-    for frame in read_frames(filepath):
-        lines += frame_records(frame)
-    return lines
-
-
-def insert(filepath):
-    LOG.info('reading %s', filepath)
-    lines = file_records(filepath)
-    LOG.info("posting %d record from %s...", len(lines), filepath)
-    resp = requests.post(
-        'http://bikini.ssec.wisc.edu:8086/write?db=metobs',
-        data='\n'.join(lines))
-    resp.raise_for_status()
-
-
-if __name__ == '__main__':
-    import argparse
-    parser = argparse.ArgumentParser()
-    parser.add_argument('asciifile')
-    args = parser.parse_args()
-
-    logging.basicConfig(level=logging.DEBUG, format='%(asctime)s -- %(message)s')
-    logging.getLogger('requests').setLevel(logging.WARN)
-    insert(args.asciifile)
diff --git a/scripts/make_database.py b/scripts/make_database.py
deleted file mode 100755
index eed9d02baf953355b97bf9371ca743c93bb32275..0000000000000000000000000000000000000000
--- a/scripts/make_database.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python
-import os
-import sys
-import logging
-from datetime import datetime
-
-from metobscommon.model import RrdModel, ModelError
-from aosstower.level_00.parser import read_records
-from aosstower.level_00.rrd import initialize_rrd
-
-LOG = logging
-
-
-if __name__ == '__main__':
-
-    import argparse
-    parser = argparse.ArgumentParser()
-    parser.add_argument('-o', '--outdb', dest='outdb', default='out.db', 
-            help='Destination RRD database. Must not exist')
-    argdt = lambda v: datetime.strptime(v, '%Y-%m-%d')
-    parser.add_argument('-s', '--db-start', type=argdt, default=datetime.now(),
-            help='Reference start date for database (YYYY-MM-DD)')
-    parser.add_argument('-d', '--db-days', type=int, default=366,
-            help='Size of DB in days')
-    parser.add_argument('-i', dest='files', type=argparse.FileType('r'),
-            help="File containing list of time sorted input data files")
-
-    args = parser.parse_args()
-
-    logging.basicConfig(level=logging.INFO)
-
-    if os.path.exists(args.outdb):
-        os.remove(args.outdb)
-    LOG.info("initializing database at start=%s for %d days",
-             args.db_start, args.db_days)
-    initialize_rrd(args.outdb, args.db_start, days=args.db_days)
-    rrd = RrdModel(args.outdb)
-
-    if args.files is None:
-        LOG.info("files list not provided, reading from stdin")
-        LOG.info("Enter time ordered data files, one per line, ^D when done")
-        args.files = sys.stdin
-
-
-    for each in args.files.readlines():
-        fpath = each.strip()
-        if not os.path.exists(fpath):
-            LOG.warn("%s does not exist", fpath)
-            continue
-
-        LOG.info("adding %s", fpath)
-        for record in read_records(fpath):
-            try:
-                rrd.add_record(record['stamp'], record)
-            except ModelError as err:
-                LOG.error("Insert failed: %s", err)
-
diff --git a/scripts/metobs_config.sh b/scripts/metobs_config.sh
index 26f0a01ea1523ae2129d0ac5c2a3b4edeba8b6fa..0b7facc08a831f00daab5bd5b9ccde0b8ed92e5a 100644
--- a/scripts/metobs_config.sh
+++ b/scripts/metobs_config.sh
@@ -57,4 +57,4 @@ cache_level_b1_file() {
     echo "${TOWER_CACHE_DIR}/level_b1/version_00/${year}/${month}/${day}/aoss_tower.${year}-${month}-${day}.nc"
     # old:
 #    echo "${TOWER_CACHE_DIR}/level_b1/version_00/${year}/${month}/aoss_tower.${year}-${month}-${day}.nc"
-}
\ No newline at end of file
+}
diff --git a/scripts/start_tower_influxdb_ingest.sh b/scripts/start_tower_influxdb_ingest.sh
index cf41756841abbd390d88c184e4e31e0083073360..07d39eded42397fc8f39930892ba8c4dc65812f6 100755
--- a/scripts/start_tower_influxdb_ingest.sh
+++ b/scripts/start_tower_influxdb_ingest.sh
@@ -17,4 +17,4 @@ if [ -n "$SCRN" ]; then
 else
     screen -S $NAME -d -m $PYENV/bin/python -m aosstower.level_00.influxdb -vvv --weather-underground --ldmp RIGTower
     echo Started in screen $(screen -list | grep $NAME | awk '{print $1}')
-fi
\ No newline at end of file
+fi
diff --git a/scripts/start_tower_mcast.sh b/scripts/start_tower_mcast.sh
index 88f4935d53c80e37d4f69eb9d227d4b63b0892c3..ba6fb7bc2af658a14c0ff8c041658f8f5196714a 100755
--- a/scripts/start_tower_mcast.sh
+++ b/scripts/start_tower_mcast.sh
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 #
-# Runs the tower ingest in a screen session named 'ceilo_ingest'. 
+# Runs the tower ingest in a screen session named 'ceilo_ingest'.
 #
 # If a screen session is already running with that name, exit, otherwise start
 # a new detached screen session named 'ceilo_ingest'.
@@ -14,7 +14,7 @@ NAME=tower_mcast
 CONFIG=$HOME/tower/tower_mcast.cfg
 PYENV=$HOME/env
 SCRN=$(screen -list | grep $NAME | awk '{print $1}')
-if [ -n "$SCRN" ]; then 
+if [ -n "$SCRN" ]; then
 echo Screen already running: $SCRN
 else
 screen -S $NAME -d -m $PYENV/bin/python -m metobs.tower.util.json_gen -vvv \
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..207d834481ffc0ddfb3e546969225eddf3047efa
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,8 @@
+[flake8]
+max-line-length = 120
+ignore = D100,D101,D102,D103,D104,D106,D107,W503,E203,B008
+
+[coverage:run]
+relative_files = True
+omit =
+    metobsapi/version.py
diff --git a/setup.py b/setup.py
deleted file mode 100644
index 06ce0968587f7c4b0a37e5a55562fbf717ac8865..0000000000000000000000000000000000000000
--- a/setup.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from setuptools import setup, find_packages
-
-setup(
-    name='AossTower',
-    version='0.2',
-    description='UW AOSS Rooftop Instrument Group Met Tower',
-    url='http://metobs.ssec.wisc.edu',
-    install_requires=[
-        'numpy',
-    ],
-    dependency_links=['http://larch.ssec.wisc.edu/cgi-bin/repos.cgi'],
-    packages=find_packages(exclude=['aosstower.tests']),
-    include_package_data=True,
-)