Skip to content
Snippets Groups Projects
Verified Commit 5c0473bd authored by David Hoese's avatar David Hoese
Browse files

Add initial modern standards checks (pre-commit failing)

parent 8934c661
No related branches found
No related tags found
No related merge requests found
Showing
with 1222 additions and 896 deletions
[bandit]
skips: B506
exclude: metobsapi/tests
ref-names: $Format:%D$
.git_archival.txt export-subst
...@@ -144,3 +144,5 @@ com_crashlytics_export_strings.xml ...@@ -144,3 +144,5 @@ com_crashlytics_export_strings.xml
crashlytics.properties crashlytics.properties
crashlytics-build.properties crashlytics-build.properties
fabric.properties fabric.properties
aosstower/version.py
exclude: '^$'
fail_fast: false
repos:
- repo: https://github.com/psf/black
rev: 23.1.0
hooks:
- id: black
language_version: python3
args:
- --target-version=py310
- repo: https://github.com/pycqa/isort
rev: 5.12.0
hooks:
- id: isort
language_version: python3
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: 'v0.0.254'
hooks:
- id: ruff
# - repo: https://github.com/PyCQA/flake8
# rev: 6.0.0
# hooks:
# - id: flake8
# additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear, mccabe]
# args: [--max-complexity, "10"]
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- repo: https://github.com/scop/pre-commit-shfmt
rev: v3.6.0-1
hooks:
# Choose one of:
- id: shfmt # native (requires Go to build)
args: ["-i", "4"]
#- id: shfmt-docker # Docker image (requires Docker to run)
#
# - repo: https://github.com/PyCQA/bandit
# rev: '1.7.4'
# hooks:
# - id: bandit
# args: [--ini, .bandit]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: 'v1.0.1' # Use the sha / tag you want to point at
hooks:
- id: mypy
additional_dependencies:
- types-docutils
- types-pkg-resources
- types-PyYAML
- types-requests
Copyright (c) 2023 SSEC Developers
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
# AOSS Tower
...@@ -4,7 +4,6 @@ from aosstower import station ...@@ -4,7 +4,6 @@ from aosstower import station
class Frame(MutableMapping): class Frame(MutableMapping):
def __init__(self, width=station.DATA_INTERVAL): def __init__(self, width=station.DATA_INTERVAL):
self._data = {} self._data = {}
self.width = width self.width = width
......
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf8 """Insert Tower data in to an InfluxDB for real time use."""
"""Insert Tower data in to an InfluxDB for real time use.
"""
import logging import logging
import logging.handlers import logging.handlers
import time
import sys import sys
import requests import time
from datetime import timedelta from datetime import timedelta
from urllib.parse import urlencode from urllib.parse import urlencode
import numpy as np
import pandas as pd
import requests
from metobscommon import influxdb from metobscommon import influxdb
from aosstower.level_00.parser import read_frames
from metobscommon.util import calc from metobscommon.util import calc
from metobscommon.util.nc import calculate_wind_gust from metobscommon.util.nc import calculate_wind_gust
import numpy as np
import pandas as pd from aosstower.level_00.parser import read_frames
import warnings
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
# map station name to InfluxDB tags # map station name to InfluxDB tags
...@@ -25,37 +23,37 @@ STATIONS = { ...@@ -25,37 +23,37 @@ STATIONS = {
} }
# parser symbols to influxdb symbols # parser symbols to influxdb symbols
SYMBOL_CONVERSIONS = { SYMBOL_CONVERSIONS = {
'stamp': 'timestamp', "stamp": "timestamp",
'box_temp': 'box_temp', "box_temp": "box_temp",
'box_pressure': 'box_pressure', "box_pressure": "box_pressure",
'paro_air_temp_period': 'paro_air_temp_period', "paro_air_temp_period": "paro_air_temp_period",
'paro_pressure_period': 'paro_pressure_period', "paro_pressure_period": "paro_pressure_period",
'paro_air_temp': 'paro_air_temp', "paro_air_temp": "paro_air_temp",
'pressure': 'pressure', "pressure": "pressure",
'paro_cal_sig': 'paro_cal_sig', "paro_cal_sig": "paro_cal_sig",
'box_rh': 'box_rh', "box_rh": "box_rh",
'box_air_temp': 'box_air_temp', "box_air_temp": "box_air_temp",
'air_temp_2': 'air_temp_2', "air_temp_2": "air_temp_2",
'air_temp_3': 'air_temp_3', "air_temp_3": "air_temp_3",
'air_temp_4': 'air_temp_4', "air_temp_4": "air_temp_4",
'air_temp_5': 'air_temp_5', "air_temp_5": "air_temp_5",
'wind_speed': 'wind_speed', "wind_speed": "wind_speed",
'wind_dir': 'wind_dir', "wind_dir": "wind_dir",
'rh_shield_freq': 'rh_shield_freq', "rh_shield_freq": "rh_shield_freq",
'rh': 'rel_hum', "rh": "rel_hum",
'air_temp_6_3m': 'air_temp_6_3m', "air_temp_6_3m": "air_temp_6_3m",
'dewpoint': 'dewpoint', "dewpoint": "dewpoint",
'rtd_shield_freq': 'rtd_shied_freq', "rtd_shield_freq": "rtd_shied_freq",
'air_temp': 'air_temp', "air_temp": "air_temp",
'solar_flux': 'solar_flux', "solar_flux": "solar_flux",
'precip': 'precip', "precip": "precip",
'accum_precip': 'accum_precip', "accum_precip": "accum_precip",
'altimeter': 'altimeter', "altimeter": "altimeter",
} }
SYMBOLS = list(SYMBOL_CONVERSIONS.values()) SYMBOLS = list(SYMBOL_CONVERSIONS.values())
class Updater(object): class Updater:
"""Append weather record (taken as a dict) and do averages when enough data is ready. """Append weather record (taken as a dict) and do averages when enough data is ready.
At least 12 minutes of data is required to do averaging for gust_10m: 10 minutes of wind gusts, At least 12 minutes of data is required to do averaging for gust_10m: 10 minutes of wind gusts,
...@@ -64,15 +62,16 @@ class Updater(object): ...@@ -64,15 +62,16 @@ class Updater(object):
This class is created once at startup and calls rolling_average every time new data is available which tries to do This class is created once at startup and calls rolling_average every time new data is available which tries to do
averaging every submit_interval of data added. averaging every submit_interval of data added.
""" """
def __init__(self, data_interval=timedelta(seconds=5), submit_interval=timedelta(minutes=5)): def __init__(self, data_interval=timedelta(seconds=5), submit_interval=timedelta(minutes=5)):
"""intervals are timedelta objects.""" """intervals are timedelta objects."""
self.data = {'timestamp': np.array([])} self.data = {"timestamp": np.array([])}
self.data_interval = data_interval.total_seconds() self.data_interval = data_interval.total_seconds()
self.submit_interval = submit_interval.total_seconds() self.submit_interval = submit_interval.total_seconds()
def rolling_average(self, record): def rolling_average(self, record):
# Keeps data within 12 minutes. # Keeps data within 12 minutes.
time_mask = self.data['timestamp'] > record['timestamp'] - timedelta(minutes=12) time_mask = self.data["timestamp"] > record["timestamp"] - timedelta(minutes=12)
# Appending to a DataFrame is slow. Instead, this adds to a np array in chunks and passes it to the DataFrame. # Appending to a DataFrame is slow. Instead, this adds to a np array in chunks and passes it to the DataFrame.
for key in record: for key in record:
if self.data.get(key) is None: if self.data.get(key) is None:
...@@ -85,43 +84,45 @@ class Updater(object): ...@@ -85,43 +84,45 @@ class Updater(object):
# current minute was 13 and the submit interval was 5 minutes, then data would be submitted at 18, 23, 28, # current minute was 13 and the submit interval was 5 minutes, then data would be submitted at 18, 23, 28,
# etc. If data collection went down and the next recorded current minute was 14, then data would be submitted # etc. If data collection went down and the next recorded current minute was 14, then data would be submitted
# at 19, 24, 29, etc (inconsistent). # at 19, 24, 29, etc (inconsistent).
reference = pd.datetime(record['timestamp'].year, 1, 1) reference = pd.datetime(record["timestamp"].year, 1, 1)
progress = (record['timestamp'] - reference).total_seconds() % self.submit_interval progress = (record["timestamp"] - reference).total_seconds() % self.submit_interval
# If data hits or will pass over a submit_interval interval, return data. # If data hits or will pass over a submit_interval interval, return data.
if progress == 0 or progress > self.submit_interval - self.data_interval: if progress == 0 or progress > self.submit_interval - self.data_interval:
return self._calculate_averages() return self._calculate_averages()
return None
def _calculate_averages(self): def _calculate_averages(self):
frame = pd.DataFrame(self.data).set_index('timestamp') frame = pd.DataFrame(self.data).set_index("timestamp")
frame = frame.mask(frame == -99999.) frame = frame.mask(frame == -99999.0)
# Add wind direction components so we can average wind direction properly. # Add wind direction components so we can average wind direction properly.
frame['wind_east'], frame['wind_north'], _ = calc.wind_vector_components(frame['wind_speed'], frame["wind_east"], frame["wind_north"], _ = calc.wind_vector_components(frame["wind_speed"], frame["wind_dir"])
frame['wind_dir']) frame["wind_dir"] = calc.wind_vector_degrees(frame["wind_east"], frame["wind_north"])
frame['wind_dir'] = calc.wind_vector_degrees(frame['wind_east'], frame['wind_north'])
if 'air_temp' in frame and 'rh' in frame and ('dewpoint' in frame or 'dewpoint_mean' in frame): if "air_temp" in frame and "rh" in frame and ("dewpoint" in frame or "dewpoint_mean" in frame):
LOG.info("'dewpoint' is missing from the input file, will calculate " LOG.info(
"it from air temp and relative humidity") "'dewpoint' is missing from the input file, will calculate " "it from air temp and relative humidity",
frame['dewpoint'] = calc.dewpoint(frame['air_temp'], frame['rh']) )
frame["dewpoint"] = calc.dewpoint(frame["air_temp"], frame["rh"])
# https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases # https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases
# 2 minute rolling average. # 2 minute rolling average.
winds_frame_2m = frame[['wind_speed', 'wind_east', 'wind_north']].rolling('2T', closed='right').mean() winds_frame_2m = frame[["wind_speed", "wind_east", "wind_north"]].rolling("2T", closed="right").mean()
# Makes 2 minute averages nans if given less than 2 minutes of data. # Makes 2 minute averages nans if given less than 2 minutes of data.
if len(frame[frame.index > frame.index[-1] - timedelta(minutes=2)]) < 120 / self.data_interval: if len(frame[frame.index > frame.index[-1] - timedelta(minutes=2)]) < 120 / self.data_interval:
winds_frame_2m = winds_frame_2m.mask(winds_frame_2m['wind_speed'] > -1) winds_frame_2m = winds_frame_2m.mask(winds_frame_2m["wind_speed"] > -1)
frame['wind_speed_2m'] = winds_frame_2m['wind_speed'] frame["wind_speed_2m"] = winds_frame_2m["wind_speed"]
frame['wind_dir_2m'] = calc.wind_vector_degrees(winds_frame_2m['wind_east'], winds_frame_2m['wind_north']) frame["wind_dir_2m"] = calc.wind_vector_degrees(winds_frame_2m["wind_east"], winds_frame_2m["wind_north"])
# 1 minute rolling peaks # 1 minute rolling peaks
wind_peak_1m = frame['wind_speed'].rolling(window='1T', closed='right').max() wind_peak_1m = frame["wind_speed"].rolling(window="1T", closed="right").max()
# criteria for a fast wind to be considered a wind gust. Note that it needs winds_frame_2m['wind_speed']. # criteria for a fast wind to be considered a wind gust. Note that it needs winds_frame_2m['wind_speed'].
gust_mask = (winds_frame_2m['wind_speed'] >= calc.knots_to_mps(9.)) &\ gust_mask = (winds_frame_2m["wind_speed"] >= calc.knots_to_mps(9.0)) & (
(wind_peak_1m >= winds_frame_2m['wind_speed'] + calc.knots_to_mps(5.)) wind_peak_1m >= winds_frame_2m["wind_speed"] + calc.knots_to_mps(5.0)
frame['gust_1m'] = wind_peak_1m.mask(~gust_mask) )
frame['gust_10m'] = calculate_wind_gust(frame['wind_speed'], winds_frame_2m['wind_speed']) frame["gust_1m"] = wind_peak_1m.mask(~gust_mask)
frame["gust_10m"] = calculate_wind_gust(frame["wind_speed"], winds_frame_2m["wind_speed"])
# Makes 10 minute gusts before 12 minutes nans because data is insufficient. # Makes 10 minute gusts before 12 minutes nans because data is insufficient.
if len(frame) < 720 / self.data_interval: if len(frame) < 720 / self.data_interval:
frame['gust_10m'] = frame['gust_10m'].mask(frame['gust_10m'] > -1.) frame["gust_10m"] = frame["gust_10m"].mask(frame["gust_10m"] > -1.0)
return frame.fillna(value=np.nan) return frame.fillna(value=np.nan)
...@@ -138,9 +139,9 @@ def construct_url(data): ...@@ -138,9 +139,9 @@ def construct_url(data):
# Sends null data as empty string to show that recording worked, but that the data is nonexistent. # Sends null data as empty string to show that recording worked, but that the data is nonexistent.
for key, val in data.items(): for key, val in data.items():
if val is None or isinstance(val, float) and np.isnan(val): if val is None or isinstance(val, float) and np.isnan(val):
data[key] = '' data[key] = ""
# Makes url be "url escaped". # Makes url be "url escaped".
return 'http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?' + urlencode(data) return "http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?" + urlencode(data)
def get_url_data(avg, wu_id, wu_pw): def get_url_data(avg, wu_id, wu_pw):
...@@ -149,67 +150,105 @@ def get_url_data(avg, wu_id, wu_pw): ...@@ -149,67 +150,105 @@ def get_url_data(avg, wu_id, wu_pw):
# Previously at: # Previously at:
# https://feedback.weather.com/customer/en/portal/articles/2924682-pws-upload-protocol?b_id=17298 # https://feedback.weather.com/customer/en/portal/articles/2924682-pws-upload-protocol?b_id=17298
timestamp = avg.index[-1] timestamp = avg.index[-1]
wind_dir = avg['wind_dir'][-1] wind_dir = avg["wind_dir"][-1]
wind_dir_2m = avg['wind_dir_2m'][-1] wind_dir_2m = avg["wind_dir_2m"][-1]
rel_hum = avg['rel_hum'][-1] rel_hum = avg["rel_hum"][-1]
solar_flux = avg['solar_flux'][-1] solar_flux = avg["solar_flux"][-1]
precip = avg['precip'][-1] precip = avg["precip"][-1]
accum_precip = avg['accum_precip'][-1] accum_precip = avg["accum_precip"][-1]
# Converts from m/s to mph. # Converts from m/s to mph.
wind_speed = avg['wind_speed'][-1] * 2.23694 wind_speed = avg["wind_speed"][-1] * 2.23694
wind_speed_2m = avg['wind_speed_2m'][-1] * 2.23694 wind_speed_2m = avg["wind_speed_2m"][-1] * 2.23694
gust_1m = avg['gust_1m'][-1] * 2.23694 gust_1m = avg["gust_1m"][-1] * 2.23694
gust_10m = avg['gust_10m'][-1] * 2.23694 gust_10m = avg["gust_10m"][-1] * 2.23694
# Converts degrees Celsius to degrees Fahrenheit # Converts degrees Celsius to degrees Fahrenheit
air_temp = avg['air_temp'][-1] * 9. / 5. + 32. air_temp = avg["air_temp"][-1] * 9.0 / 5.0 + 32.0
dewpoint = avg['dewpoint'][-1] * 9. / 5. + 32. dewpoint = avg["dewpoint"][-1] * 9.0 / 5.0 + 32.0
# hpa to barometric pressure inches # hpa to barometric pressure inches
pressure = avg['pressure'][-1] * 0.02952998016471232 pressure = avg["pressure"][-1] * 0.02952998016471232
return {'ID': wu_id, 'PASSWORD': wu_pw, 'dateutc': timestamp, 'winddir': wind_dir, 'winddir_avg2m': wind_dir_2m, return {
'windspeedmph': wind_speed, 'windspdmph_avg2m': wind_speed_2m, 'windgustmph': gust_1m, "ID": wu_id,
'windgustmph_10m': gust_10m, 'humidity': rel_hum, 'tempf': air_temp, 'baromin': pressure, "PASSWORD": wu_pw,
'dewptf': dewpoint, 'solarradiation': solar_flux, 'rainin': precip, 'dailyrainin': accum_precip, "dateutc": timestamp,
'softwaretype': 'SSEC-RIG', 'action': 'updateraw'} "winddir": wind_dir,
"winddir_avg2m": wind_dir_2m,
"windspeedmph": wind_speed,
"windspdmph_avg2m": wind_speed_2m,
"windgustmph": gust_1m,
"windgustmph_10m": gust_10m,
"humidity": rel_hum,
"tempf": air_temp,
"baromin": pressure,
"dewptf": dewpoint,
"solarradiation": solar_flux,
"rainin": precip,
"dailyrainin": accum_precip,
"softwaretype": "SSEC-RIG",
"action": "updateraw",
}
def main(): def main():
import argparse import argparse
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--logfn', help='Log to rotating file (Not Implemented)') parser.add_argument("--logfn", help="Log to rotating file (Not Implemented)")
parser.add_argument('--debug', action='store_true', parser.add_argument(
help='Don\'t submit records to the database, print them to stdout') "--debug",
parser.add_argument('-t', '--tail', action='store_true', action="store_true",
help=('Tail file forever, not returning. This will start at the end ' help="Don't submit records to the database, print them to stdout",
'of the file and insert any new data added after starting')) )
parser.add_argument('--ldmp', action='store_true', parser.add_argument(
help='Treat `src` file as a station name and read records from' "-t",
'LoggerNet LDMP server (port: 1024)') "--tail",
parser.add_argument('--tables', nargs='*', default=['1'], action="store_true",
help="LoggerNet LDMP tables to read in") help=(
parser.add_argument("--host", default=influxdb.DB_HOST, "Tail file forever, not returning. This will start at the end "
help="Hostname of database connection") "of the file and insert any new data added after starting"
parser.add_argument("--port", default=influxdb.DB_PORT, ),
help="Port of database connection") )
parser.add_argument("--dbname", default=influxdb.DB_NAME, parser.add_argument(
help="Name of database to modify") "--ldmp",
parser.add_argument('-s', '--station', dest='station', default='AOSS Tower', choices=STATIONS.keys(), action="store_true",
help='Name of station to use to determine symbols') help="Treat `src` file as a station name and read records from" "LoggerNet LDMP server (port: 1024)",
parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0, )
help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG') parser.add_argument("--tables", nargs="*", default=["1"], help="LoggerNet LDMP tables to read in")
parser.add_argument('--sleep-interval', type=float, parser.add_argument("--host", default=influxdb.DB_HOST, help="Hostname of database connection")
help="Seconds to wait between submitting each record") parser.add_argument("--port", default=influxdb.DB_PORT, help="Port of database connection")
parser.add_argument('--weather-underground', action='store_true', parser.add_argument("--dbname", default=influxdb.DB_NAME, help="Name of database to modify")
help="Send new records to wunderground.com") parser.add_argument(
parser.add_argument('--wu-id', default='KWIMADIS52', "-s",
help='Weather underground station ID') "--station",
parser.add_argument('--wu-password-file', default='/home/metobs/wunderground_password.txt', dest="station",
help='File containing the password for the weather underground upload') default="AOSS Tower",
parser.add_argument('--bulk', type=int, default=1, choices=STATIONS.keys(),
help="Number of records to buffer before sending to " help="Name of station to use to determine symbols",
"the database. For large inserts this should be " )
"between 5000 to 10000. Default: 1") parser.add_argument(
parser.add_argument('src', help='Level 0 raw data file or station name ' "-v",
'for LDMP reading') "--verbose",
dest="verbosity",
action="count",
default=0,
help="each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG",
)
parser.add_argument("--sleep-interval", type=float, help="Seconds to wait between submitting each record")
parser.add_argument("--weather-underground", action="store_true", help="Send new records to wunderground.com")
parser.add_argument("--wu-id", default="KWIMADIS52", help="Weather underground station ID")
parser.add_argument(
"--wu-password-file",
default="/home/metobs/wunderground_password.txt",
help="File containing the password for the weather underground upload",
)
parser.add_argument(
"--bulk",
type=int,
default=1,
help="Number of records to buffer before sending to "
"the database. For large inserts this should be "
"between 5000 to 10000. Default: 1",
)
parser.add_argument("src", help="Level 0 raw data file or station name " "for LDMP reading")
args = parser.parse_args() args = parser.parse_args()
levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
...@@ -222,13 +261,14 @@ def main(): ...@@ -222,13 +261,14 @@ def main():
wu_pw = None wu_pw = None
if args.weather_underground: if args.weather_underground:
wu_pw = open(args.wu_password_file, 'r').read().strip() wu_pw = open(args.wu_password_file).read().strip()
if args.ldmp: if args.ldmp:
from aosstower.level_00.parser import LDMPGenerator from aosstower.level_00.parser import LDMPGenerator
record_gen = LDMPGenerator(args.src, args.tables) record_gen = LDMPGenerator(args.src, args.tables)
else: else:
src = open(args.src, "r") src = open(args.src)
record_gen = read_frames(src, tail=args.tail) record_gen = read_frames(src, tail=args.tail)
try: try:
...@@ -251,8 +291,13 @@ def main(): ...@@ -251,8 +291,13 @@ def main():
try: try:
resp = requests.post(url, timeout=15) resp = requests.post(url, timeout=15)
if resp.status_code != 200: if resp.status_code != 200:
LOG.warning('Data failed to upload to {0} with status code {1}: {2}'.format( LOG.warning(
url, resp.status_code, resp.text)) "Data failed to upload to {} with status code {}: {}".format(
url,
resp.status_code,
resp.text,
),
)
else: else:
LOG.info("Upload successful") LOG.info("Upload successful")
except requests.Timeout: except requests.Timeout:
...@@ -263,7 +308,7 @@ def main(): ...@@ -263,7 +308,7 @@ def main():
if args.sleep_interval: if args.sleep_interval:
time.sleep(args.sleep_interval) time.sleep(args.sleep_interval)
except (RuntimeError, ValueError, KeyError, requests.RequestException): except (RuntimeError, ValueError, KeyError, requests.RequestException):
if hasattr(record_gen, 'close'): if hasattr(record_gen, "close"):
record_gen.close() record_gen.close()
......
...@@ -10,43 +10,45 @@ to make sure errors are logged: ...@@ -10,43 +10,45 @@ to make sure errors are logged:
import logging import logging
import logging.handlers import logging.handlers
import sys import sys
from metobscommon import legacy_db from metobscommon import legacy_db
from aosstower.level_00.parser import read_frames from aosstower.level_00.parser import read_frames
logging.addLevelName(9, 'TRACE') logging.addLevelName(9, "TRACE")
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
ERR = logging.getLogger(__name__ + '.error') ERR = logging.getLogger(__name__ + ".error")
_symbol_record_map = ( _symbol_record_map = (
(1, 'station'), (1, "station"),
(2, 'year'), (2, "year"),
(3, 'day'), (3, "day"),
(4, 'hour_minute'), (4, "hour_minute"),
(5, 'seconds'), (5, "seconds"),
(6, 'box_pressure'), (6, "box_pressure"),
(7, 'parosci_air_temp_period'), (7, "parosci_air_temp_period"),
(8, 'parosci_pressure_period'), (8, "parosci_pressure_period"),
(9, 'parosci_air_temp'), (9, "parosci_air_temp"),
(10, 'pressure'), (10, "pressure"),
(11, 'parosci_calc_sig'), (11, "parosci_calc_sig"),
(12, 'box_rh'), (12, "box_rh"),
(13, 'box_air_temp'), (13, "box_air_temp"),
(14, 'temp2'), (14, "temp2"),
(15, 'temp3'), (15, "temp3"),
(16, 'temp4'), (16, "temp4"),
(17, 'wind_speed'), (17, "wind_speed"),
(18, 'wind_direction'), (18, "wind_direction"),
(19, 'rh_shield_freq'), (19, "rh_shield_freq"),
(20, 'relative_humidity'), (20, "relative_humidity"),
(21, 'air_temp_6_3'), (21, "air_temp_6_3"),
(22, 'dewpoint'), (22, "dewpoint"),
(23, 'rtd_shield_freq'), (23, "rtd_shield_freq"),
(24, 'air_temp'), (24, "air_temp"),
(25, 'solar_flux'), (25, "solar_flux"),
(26, 'precipitation'), (26, "precipitation"),
(27, 'accumulated_precipitation'), (27, "accumulated_precipitation"),
(28, 'altimeter'), (28, "altimeter"),
) )
...@@ -62,32 +64,44 @@ def configure_logging(level, logfn=None): ...@@ -62,32 +64,44 @@ def configure_logging(level, logfn=None):
ERR.setLevel(logging.INFO) ERR.setLevel(logging.INFO)
if logfn: if logfn:
maxsize = (20 * 1024 ** 3) # 20 MB maxsize = 20 * 1024**3 # 20 MB
rotating_file = logging.handlers.RotatingFileHandler( rotating_file = logging.handlers.RotatingFileHandler(logfn, mode="a", maxBytes=maxsize, backupCount=5)
logfn, mode='a', maxBytes=maxsize, rotating_file.setFormatter(logging.Formatter("%(asctime)s: %(message)s"))
backupCount=5)
rotating_file.setFormatter(logging.Formatter('%(asctime)s: %(message)s'))
LOG.addHandler(rotating_file) LOG.addHandler(rotating_file)
def main(): def main():
import argparse import argparse
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--loglvl', dest='loglvl', default='warn', parser.add_argument(
help='logging level', "--loglvl",
choices=(['trace', 'debug', 'info', 'warn', 'error'])) dest="loglvl",
parser.add_argument('--logfn', help='Log to rotating file') default="warn",
parser.add_argument('--debug', action='store_true', help="logging level",
help='Don\'t submit records to the database, print them to stdout') choices=(["trace", "debug", "info", "warn", "error"]),
parser.add_argument('-s', '--station', dest='station', default='RIG Tower', )
help='Name of station to use to determine symbols') parser.add_argument("--logfn", help="Log to rotating file")
parser.add_argument('--ldmp', action='store_true', parser.add_argument(
help='Treat `src` file as a station name and read records from LoggerNet LDMP server (port: 1024)') "--debug",
parser.add_argument('--tables', nargs='*', default=['1'], action="store_true",
help="LoggerNet LDMP tables to read in") help="Don't submit records to the database, print them to stdout",
parser.add_argument('dburl', help='Database to insert into') )
parser.add_argument('src', help='Level 0 raw data file') parser.add_argument(
"-s",
"--station",
dest="station",
default="RIG Tower",
help="Name of station to use to determine symbols",
)
parser.add_argument(
"--ldmp",
action="store_true",
help="Treat `src` file as a station name and read records from LoggerNet LDMP server (port: 1024)",
)
parser.add_argument("--tables", nargs="*", default=["1"], help="LoggerNet LDMP tables to read in")
parser.add_argument("dburl", help="Database to insert into")
parser.add_argument("src", help="Level 0 raw data file")
args = parser.parse_args() args = parser.parse_args()
lvl = logging.getLevelName(args.loglvl.upper()) lvl = logging.getLevelName(args.loglvl.upper())
...@@ -106,14 +120,15 @@ def main(): ...@@ -106,14 +120,15 @@ def main():
if args.ldmp: if args.ldmp:
from aosstower.level_00.parser import LDMPGenerator from aosstower.level_00.parser import LDMPGenerator
record_gen = LDMPGenerator(args.src, args.tables, symbol_names=[x[1] for x in _symbol_record_map]) record_gen = LDMPGenerator(args.src, args.tables, symbol_names=[x[1] for x in _symbol_record_map])
else: else:
src = open(args.src, "r") src = open(args.src)
record_gen = read_frames(src, tail=args.tail) record_gen = read_frames(src, tail=args.tail)
for idx, record in enumerate(record_gen): for _idx, record in enumerate(record_gen):
data = (record['stamp'], record) data = (record["stamp"], record)
LOG.info("Sending record: %s", record['stamp'].isoformat(' ')) LOG.info("Sending record: %s", record["stamp"].isoformat(" "))
if args.debug: if args.debug:
# we just want the above message # we just want the above message
continue continue
......
...@@ -39,24 +39,26 @@ we have 2 altimeter values but as far as I know altimeter2 is not used. ...@@ -39,24 +39,26 @@ we have 2 altimeter values but as far as I know altimeter2 is not used.
XXX: Fill value in version 2 seems to be -99999. XXX: Fill value in version 2 seems to be -99999.
""" """
import re
import io import io
import time
import logging import logging
import re
import time
from datetime import datetime, timedelta from datetime import datetime, timedelta
from metobscommon.util.mytime import hhmm_to_offset from metobscommon.util.mytime import hhmm_to_offset
from aosstower.schema import database from aosstower.schema import database
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
class LineParseError(Exception): class LineParseError(Exception):
"""Error parsing line of frame data. """Error parsing line of frame data."""
"""
@classmethod @classmethod
def raise_wrapped(cls, exception, msg=None): def raise_wrapped(cls, exception, msg=None):
import sys import sys
traceback = sys.exc_info()[2] traceback = sys.exc_info()[2]
msg = msg or str(exception) msg = msg or str(exception)
exc = cls(msg) exc = cls(msg)
...@@ -65,56 +67,56 @@ class LineParseError(Exception): ...@@ -65,56 +67,56 @@ class LineParseError(Exception):
def _make_frame(data, new_symbols=None, rename_timestamp=False): def _make_frame(data, new_symbols=None, rename_timestamp=False):
"""Construct a frame from a list of tuples. """Construct a frame from a list of tuples."""
"""
frame = {} frame = {}
for idx, (key, value) in enumerate(data): for idx, (key, value) in enumerate(data):
if key in ['stamp', 'timestamp']: if key in ["stamp", "timestamp"]:
frame['stamp' if rename_timestamp else key] = value frame["stamp" if rename_timestamp else key] = value
continue continue
if key in database: if key in database:
try: try:
new_key = new_symbols[idx] if new_symbols and len(new_symbols) > idx else key new_key = new_symbols[idx] if new_symbols and len(new_symbols) > idx else key
frame[new_key] = database[key].type(value) frame[new_key] = database[key].type(value)
except (ValueError, TypeError): except (ValueError, TypeError):
raise LineParseError("error converting '%s' using %s", raise LineParseError("error converting '%s' using %s", value, database[key].type)
value, database[key].type) return frame
return frame
class ParserV0(object): class ParserV0:
"""Parses Version 0 data lines. """Parses Version 0 data lines."""
"""
fill_value = -99999. fill_value = -99999.0
# maps v0 names to names in schema db # maps v0 names to names in schema db
names = {'ACCURAIN': 'accum_precip', names = {
'TEMP107_1': 'box_air_temp', "ACCURAIN": "accum_precip",
'TEMP107_2': 'air_temp_2', "TEMP107_1": "box_air_temp",
'TEMP107_3': 'air_temp_3', "TEMP107_2": "air_temp_2",
'TEMP107_4': 'air_temp_4', "TEMP107_3": "air_temp_3",
'TEMP107_5': 'air_temp_5', "TEMP107_4": "air_temp_4",
'LI200X': 'solar_flux', "TEMP107_5": "air_temp_5",
'RH41372': 'rh', "LI200X": "solar_flux",
'TEMP41372': 'air_temp', "RH41372": "rh",
'CS105': 'box_pressure', "TEMP41372": "air_temp",
'PAROSCI': 'pressure', "CS105": "box_pressure",
'WSPD05305': 'wind_speed', "PAROSCI": "pressure",
'WDIR05305': 'wind_dir', "WSPD05305": "wind_speed",
'CS10162': 'box_rh', "WDIR05305": "wind_dir",
'RAIN380M': 'precip'} "CS10162": "box_rh",
"RAIN380M": "precip",
}
@staticmethod @staticmethod
def maybe_mine(line): def maybe_mine(line):
return line.startswith('TIME') return line.startswith("TIME")
def make_frame(self, line): def make_frame(self, line):
parts = line.split() parts = line.split()
if len(parts) != 32: if len(parts) != 32:
raise LineParseError("Expected 32 components", line) raise LineParseError("Expected 32 components", line)
raw_data = [('version', 0)] raw_data = [("version", 0)]
for k1, v1 in zip(parts[0::2], parts[1::2]): for k1, v1 in zip(parts[0::2], parts[1::2]):
if k1 == 'TIME': if k1 == "TIME":
continue continue
if k1 in self.names: if k1 in self.names:
raw_data.append((self.names[k1], v1)) raw_data.append((self.names[k1], v1))
...@@ -123,31 +125,53 @@ class ParserV0(object): ...@@ -123,31 +125,53 @@ class ParserV0(object):
try: try:
time_str = parts[1] time_str = parts[1]
unix_time = int(time_str) unix_time = int(time_str)
raw_data.append(('stamp', datetime.utcfromtimestamp(unix_time))) raw_data.append(("stamp", datetime.utcfromtimestamp(unix_time)))
except (ValueError, TypeError): except (ValueError, TypeError):
raise LineParseError("Could not parse stamp", line) raise LineParseError("Could not parse stamp", line)
return _make_frame(raw_data) return _make_frame(raw_data)
class ParserV1V2(object): class ParserV1V2:
"""Parses Version 1 & 2 data lines. """Parses Version 1 & 2 data lines."""
"""
fill_value = -99999. fill_value = -99999.0
names = ['station_id', 'year', 'doy', 'hhmm', 'sec', 'box_pressure', names = [
'paro_air_temp_period', 'paro_pressure_period', 'paro_air_temp', "station_id",
'pressure', 'paro_cal_sig', 'box_rh', 'box_air_temp', "year",
'air_temp_2', 'air_temp_3', 'air_temp_4', 'wind_speed', 'wind_dir', "doy",
'rh_shield_freq', 'rh', 'air_temp_6_3m', 'dewpoint', "hhmm",
'rtd_shield_freq', 'air_temp', 'solar_flux', 'precip', "sec",
'accum_precip', 'altimeter'] # , 'altimeter2'] "box_pressure",
"paro_air_temp_period",
"paro_pressure_period",
"paro_air_temp",
"pressure",
"paro_cal_sig",
"box_rh",
"box_air_temp",
"air_temp_2",
"air_temp_3",
"air_temp_4",
"wind_speed",
"wind_dir",
"rh_shield_freq",
"rh",
"air_temp_6_3m",
"dewpoint",
"rtd_shield_freq",
"air_temp",
"solar_flux",
"precip",
"accum_precip",
"altimeter",
] # , 'altimeter2']
# These are the new fields in the input files but unused by the rest of # These are the new fields in the input files but unused by the rest of
# the tower code. At the risk of breaking other pieces of software, these # the tower code. At the risk of breaking other pieces of software, these
# are not included in the above list, but are documented here for future # are not included in the above list, but are documented here for future
# reference. # reference.
# #
# Altimeter2 (slightly different calculation, same units as Altimeter) # Altimeter2 (slightly different calculation, same units as Altimeter)
# LW_in (W/m^2)
# tempPyrg (Kelvin, temperature of pyrgeometer) # tempPyrg (Kelvin, temperature of pyrgeometer)
# pyrgTP (W/m^2, raw reading from the pyrgeometer thermopile) # pyrgTP (W/m^2, raw reading from the pyrgeometer thermopile)
# pyrgTC (W/m^2, temperature correction for the pyrgeometer) # pyrgTC (W/m^2, temperature correction for the pyrgeometer)
...@@ -155,27 +179,27 @@ class ParserV1V2(object): ...@@ -155,27 +179,27 @@ class ParserV1V2(object):
@staticmethod @staticmethod
def maybe_mine(line): def maybe_mine(line):
return re.search('^\d,\d{4},\d{1,3}', line) is not None return re.search("^\\d,\\d{4},\\d{1,3}", line) is not None
@staticmethod @staticmethod
def _get_stamp(parts): def _get_stamp(parts):
year = int(parts[1]) year = int(parts[1])
doy = int(parts[2]) doy = int(parts[2])
dt = datetime.strptime('{:d}.{:03d}'.format(int(year), int(doy)), '%Y.%j') dt = datetime.strptime(f"{int(year):d}.{int(doy):03d}", "%Y.%j")
secs = hhmm_to_offset(parts[3]) secs = hhmm_to_offset(parts[3])
secs += float(parts[4]) secs += float(parts[4])
secs -= (secs % 5) secs -= secs % 5
dt += timedelta(seconds=secs) dt += timedelta(seconds=secs)
return dt return dt
def make_frame(self, line): def make_frame(self, line):
parts = line.split(',') parts = line.split(",")
if len(parts) not in [28, 29, 33, 34]: if len(parts) not in [28, 29, 33, 34]:
raise LineParseError("Expected 28, 29, 33, or 34 parts", line) raise LineParseError("Expected 28, 29, 33, or 34 parts", line)
version = {28: 1, 29: 2, 33: 3, 34: 4}[len(parts)] version = {28: 1, 29: 2, 33: 3, 34: 4}[len(parts)]
raw_data = [('version', version)] + list(zip(self.names, parts)) raw_data = [("version", version), *list(zip(self.names, parts))]
try: try:
raw_data.append(('stamp', self._get_stamp(parts))) raw_data.append(("stamp", self._get_stamp(parts)))
except (TypeError, ValueError): except (TypeError, ValueError):
raise LineParseError("Could not parse timesamp", line) raise LineParseError("Could not parse timesamp", line)
return _make_frame(raw_data) return _make_frame(raw_data)
...@@ -187,13 +211,11 @@ def read_frames(source, error_handler=lambda *a: None, tail=False): ...@@ -187,13 +211,11 @@ def read_frames(source, error_handler=lambda *a: None, tail=False):
:param tail: starting from the end of the source (if 'seek' method) read lines forever :param tail: starting from the end of the source (if 'seek' method) read lines forever
""" """
if hasattr(source, 'readlines'): fptr = source if hasattr(source, "readlines") else open(source)
fptr = source
else:
fptr = open(source)
if tail and hasattr(fptr, "seek"): if tail and hasattr(fptr, "seek"):
LOG.debug("Seeking to end of frame source") LOG.debug("Seeking to end of frame source")
fptr.seek(0, io.SEEK_END) fptr.seek(0, io.SEEK_END)
def gen(): def gen():
idx = 0 idx = 0
while True: while True:
...@@ -203,7 +225,9 @@ def read_frames(source, error_handler=lambda *a: None, tail=False): ...@@ -203,7 +225,9 @@ def read_frames(source, error_handler=lambda *a: None, tail=False):
continue continue
yield idx, line yield idx, line
idx += 1 idx += 1
else: else:
def gen(): def gen():
for idx, line in enumerate(fptr): for idx, line in enumerate(fptr):
if not line.strip(): if not line.strip():
...@@ -211,7 +235,7 @@ def read_frames(source, error_handler=lambda *a: None, tail=False): ...@@ -211,7 +235,7 @@ def read_frames(source, error_handler=lambda *a: None, tail=False):
yield idx, line yield idx, line
for idx, line in gen(): for idx, line in gen():
if line.startswith('#'): if line.startswith("#"):
continue continue
for parser in [ParserV1V2(), ParserV0()]: for parser in [ParserV1V2(), ParserV0()]:
if parser.maybe_mine(line): if parser.maybe_mine(line):
...@@ -228,19 +252,19 @@ def read_frames(source, error_handler=lambda *a: None, tail=False): ...@@ -228,19 +252,19 @@ def read_frames(source, error_handler=lambda *a: None, tail=False):
def loggernet_to_tower(rec_dict, symbol_names): def loggernet_to_tower(rec_dict, symbol_names):
"""Convert loggernet record dictionary to our standard naming""" """Convert loggernet record dictionary to our standard naming."""
# assume that the next record after the traditional frame is the timestamp # assume that the next record after the traditional frame is the timestamp
old_symbols = ['timestamp'] + ParserV1V2.names old_symbols = ["timestamp", *ParserV1V2.names]
new_symbols = ['timestamp'] + symbol_names new_symbols = ["timestamp", *symbol_names]
return _make_frame(zip(old_symbols, rec_dict.values()), new_symbols, rename_timestamp=True) return _make_frame(zip(old_symbols, rec_dict.values()), new_symbols, rename_timestamp=True)
class LDMPGenerator(object): class LDMPGenerator:
"""Class to manage receiving records from Loggernet LDMP server.""" """Class to manage receiving records from Loggernet LDMP server."""
def __init__(self, station_name, tables, symbol_names=ParserV1V2.names, def __init__(self, station_name, tables, symbol_names=ParserV1V2.names, host="localhost", port=1024):
host='localhost', port=1024):
from metobscommon.archive.loggernet_receiver import LDMPReceiver from metobscommon.archive.loggernet_receiver import LDMPReceiver
self.station_name = station_name self.station_name = station_name
self.tables = tables self.tables = tables
self.symbol_names = symbol_names self.symbol_names = symbol_names
...@@ -248,10 +272,10 @@ class LDMPGenerator(object): ...@@ -248,10 +272,10 @@ class LDMPGenerator(object):
def __iter__(self): def __iter__(self):
from metobscommon.archive.loggernet_receiver import dict_records from metobscommon.archive.loggernet_receiver import dict_records
self.receiver.start() self.receiver.start()
# This should be generated OrderedDicts # This should be generated OrderedDicts
dict_rec_gen = dict_records(self.receiver, self.station_name, dict_rec_gen = dict_records(self.receiver, self.station_name, self.tables)
self.tables)
return (loggernet_to_tower(x, self.symbol_names) for x in dict_rec_gen) return (loggernet_to_tower(x, self.symbol_names) for x in dict_rec_gen)
def close(self): def close(self):
...@@ -261,5 +285,5 @@ class LDMPGenerator(object): ...@@ -261,5 +285,5 @@ class LDMPGenerator(object):
"""Last effort to kill the background thread if not done already.""" """Last effort to kill the background thread if not done already."""
try: try:
self.close() self.close()
except (ValueError, RuntimeError, IOError, OSError): except (ValueError, RuntimeError, OSError):
pass pass
"""The data model used for the MetObs widgets using tower data.
"""
import os
from datetime import datetime, timedelta
import rrdtool
from metobscommon.util.calc import altimeter, dewpoint, wind_vector_components
from metobscommon.util.mytime import to_epoch
from aosstower import station
# minimum set of records for the tower
VARS = {'air_temp', 'rh', 'dewpoint',
'wind_speed', 'winddir_east', 'winddir_north',
'pressure', 'precip', 'accum_precip',
'solar_flux', 'altimeter'}
def add_vector_winds(record):
east, north, spd = wind_vector_components(float(record['wind_speed']),
float(record['wind_dir']))
record['winddir_east'] = '%.3d' % east
record['winddir_north'] = '%.3d' % north
record['wind_speed'] = '%.3d' % spd
def add_altimeter(record, elev=station.ELEVATION):
record['altimeter'] = '%.3d' % altimeter(float(record['pressure']), elev)
def add_dewpoint(record):
record['dewpoint'] = '%.3d' % dewpoint(float(record['air_temp']),
float(record['rh']))
def initialize_rrd(filepath, start=None, days=365, data_interval=5):
"""Create a new empty RRD database.
"""
assert not os.path.exists(filepath), "DB already exists"
start = start or (datetime.utcnow() - timedelta(days=days))
# normalize start to data interval
secs = to_epoch(start)
secs -= secs % data_interval
rrdtool.create(filepath,
'--start={}'.format(secs),
'--step={:d}'.format(data_interval),
'DS:air_temp:GAUGE:10:-40:50',
'DS:rh:GAUGE:10:0:100',
'DS:dewpoint:GAUGE:10:0:100',
'DS:wind_speed:GAUGE:10:0:100',
'DS:winddir_north:GAUGE:10:-100:100',
'DS:winddir_east:GAUGE:10:-100:100',
'DS:pressure:GAUGE:10:0:1100',
'DS:precip:GAUGE:10:0:100',
'DS:accum_precip:GAUGE:10:0:100',
'DS:solar_flux:GAUGE:10:0:1000',
'DS:altimeter:GAUGE:10:0:100',
# native resolution
'RRA:AVERAGE:0.5:1:6307200',
# 1 minute
'RRA:AVERAGE:0.5:{:d}:525600'.format(60/data_interval),
# 5 minute
'RRA:AVERAGE:0.5:{:d}:105120'.format(300/data_interval),
# 30 minute
'RRA:AVERAGE:0.5:{:d}:17520'.format(1800/data_interval))
...@@ -10,8 +10,11 @@ except ImportError: ...@@ -10,8 +10,11 @@ except ImportError:
pd = None pd = None
Series = np.ndarray Series = np.ndarray
NaN = float('nan') NaN = float("nan")
is_nan = lambda a: a != a
def is_nan(a):
return a != a
def knots_to_mps(knots): def knots_to_mps(knots):
...@@ -19,8 +22,7 @@ def knots_to_mps(knots): ...@@ -19,8 +22,7 @@ def knots_to_mps(knots):
def dewpoint(tempC, relhum): def dewpoint(tempC, relhum):
""" """Algorithm from Tom Whittaker tempC is the temperature in degrees Celsius,
Algorithm from Tom Whittaker tempC is the temperature in degrees Celsius,
relhum is the relative humidity as a percentage. relhum is the relative humidity as a percentage.
:param tempC: temperature in celsius :param tempC: temperature in celsius
...@@ -32,8 +34,7 @@ def dewpoint(tempC, relhum): ...@@ -32,8 +34,7 @@ def dewpoint(tempC, relhum):
gasconst = 461.5 gasconst = 461.5
latheat = 2500800.0 latheat = 2500800.0
dp = 1.0 / (1.0 / (273.15 + tempC) - gasconst * np.log((0.0 + relhum) / 100) / dp = 1.0 / (1.0 / (273.15 + tempC) - gasconst * np.log((0.0 + relhum) / 100) / (latheat - tempC * 2397.5))
(latheat - tempC * 2397.5))
if pd is not None and isinstance(dp, pd.Series): if pd is not None and isinstance(dp, pd.Series):
return pd.concat([dp - 273.15, tempC], axis=1).min(axis=1) return pd.concat([dp - 273.15, tempC], axis=1).min(axis=1)
...@@ -41,37 +42,35 @@ def dewpoint(tempC, relhum): ...@@ -41,37 +42,35 @@ def dewpoint(tempC, relhum):
def relhum(airTempK, dewpointTempK): def relhum(airTempK, dewpointTempK):
""" """Algorithm derived by David Hoese from the above
Algorithm derived by David Hoese from the above
dewpoint(tempC, relhum) function, both parameters are in Kelvin units. dewpoint(tempC, relhum) function, both parameters are in Kelvin units.
:param airTempK: air temperature in Kelvin :param airTempK: air temperature in Kelvin
:param dewpointTempK: dewpoint temp in Kelvin :param dewpointTempK: dewpoint temp in Kelvin
""" """
if airTempK == None or dewpointTempK == None: if airTempK is None or dewpointTempK is None:
return NaN return NaN
gas_constant = 461.5 gas_constant = 461.5
latheat = 2500800.0 latheat = 2500800.0
# Only one section of the equation # Only one section of the equation
latpart = (latheat - (airTempK - 273.15) * 2397.5) latpart = latheat - (airTempK - 273.15) * 2397.5
relativehum = 100 * math.e ** ((latpart / airTempK - latpart / dewpointTempK) / gas_constant) relativehum = 100 * math.e ** ((latpart / airTempK - latpart / dewpointTempK) / gas_constant)
return relativehum return relativehum
def potentialtemp(airTempK, pressureMB): def potentialtemp(airTempK, pressureMB):
""" """Algorithm from David Hoese to calculate potential temperature.
Algorithm from David Hoese to calculate potential temperature.
:param airTempK: air temperature in Kelvin :param airTempK: air temperature in Kelvin
:param pressureMB: air pressure in millibars :param pressureMB: air pressure in millibars
""" """
if airTempK == None or pressureMB == None: if airTempK is None or pressureMB is None:
return NaN return NaN
pT = airTempK * (pressureMB.max() / pressureMB) ** .286 pT = airTempK * (pressureMB.max() / pressureMB) ** 0.286
return pT return pT
...@@ -86,11 +85,11 @@ def altimeter(p, alt): ...@@ -86,11 +85,11 @@ def altimeter(p, alt):
:returns: altimeter in inHg :returns: altimeter in inHg
""" """
n = .190284 n = 0.190284
c1 = .0065 * pow(1013.25, n) / 288. c1 = 0.0065 * pow(1013.25, n) / 288.0
c2 = alt / pow((p - .3), n) c2 = alt / pow((p - 0.3), n)
ff = pow(1. + c1 * c2, 1. / n) ff = pow(1.0 + c1 * c2, 1.0 / n)
return ((p - .3) * ff * 29.92 / 1013.25) return (p - 0.3) * ff * 29.92 / 1013.25
def dir2txt(val): def dir2txt(val):
...@@ -112,14 +111,16 @@ def dir2txt(val): ...@@ -112,14 +111,16 @@ def dir2txt(val):
assert val >= 0 and val < 360, "'%s' out of range" % val assert val >= 0 and val < 360, "'%s' out of range" % val
dirs = ("NNE", "NE", "ENE", "E", "ESE", "SE", "SSE", "S", "SSW", "SW", "WSW", "W", "WNW", "NW", "NNW") dirs = ("NNE", "NE", "ENE", "E", "ESE", "SE", "SSE", "S", "SSW", "SW", "WSW", "W", "WNW", "NW", "NNW")
if ((val >= 348.75 and val <= 360) or val >= 0 and val < 11.25): return "N" if (val >= 348.75 and val <= 360) or val >= 0 and val < 11.25:
return "N"
# 1/2 degree increment between the directions # 1/2 degree increment between the directions
i = 11.25; i = 11.25
for dir in dirs: for dir in dirs:
if val >= i and val < (i + 22.5): if val >= i and val < (i + 22.5):
return dir return dir
i += 22.5 i += 22.5
return None
def wind_vector_components(windspd, winddir): def wind_vector_components(windspd, winddir):
......
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf8 """Generate AOSS Tower NetCDF4 files from Level 00 ASCII files."""
"""Generate AOSS Tower NetCDF4 files from Level 00 ASCII files. import logging
"""
import os import os
import platform
import sys import sys
import logging
import pandas as pd
from datetime import datetime from datetime import datetime
from netCDF4 import Dataset
import numpy as np import numpy as np
import platform import pandas as pd
from metobscommon.util import calc
from metobscommon.util.nc import (
calculate_wind_gust,
create_variables,
make_summary_dict,
summary_over_interval,
write_qc_for_unwritten,
write_vars,
)
from netCDF4 import Dataset
from aosstower import schema from aosstower import schema
from aosstower.station import station_info
from aosstower.level_00 import parser from aosstower.level_00 import parser
from metobscommon.util import calc from aosstower.station import station_info
from metobscommon.util.nc import (make_summary_dict, write_vars,
calculate_wind_gust, summary_over_interval,
create_variables, write_qc_for_unwritten)
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
SOFTWARE_VERSION = '00' SOFTWARE_VERSION = "00"
def _get_data(input_files): def _get_data(input_files):
...@@ -28,7 +33,7 @@ def _get_data(input_files): ...@@ -28,7 +33,7 @@ def _get_data(input_files):
try: try:
yield from parser.read_frames(filename) yield from parser.read_frames(filename)
except FileNotFoundError: except FileNotFoundError:
LOG.warning("Input data file was not found: {}".format(filename)) LOG.warning(f"Input data file was not found: {filename}")
bad_files += 1 bad_files += 1
continue continue
if bad_files == len(input_files): if bad_files == len(input_files):
...@@ -37,8 +42,8 @@ def _get_data(input_files): ...@@ -37,8 +42,8 @@ def _get_data(input_files):
def get_data(input_files): def get_data(input_files):
frame = pd.DataFrame(_get_data(input_files)) frame = pd.DataFrame(_get_data(input_files))
frame = frame.set_index('stamp') frame = frame.set_index("stamp")
frame = frame.mask(frame == -99999.).fillna(value=np.nan) frame = frame.mask(frame == -99999.0).fillna(value=np.nan)
for col_name in frame.columns: for col_name in frame.columns:
if col_name in schema.unit_conversions: if col_name in schema.unit_conversions:
...@@ -49,153 +54,201 @@ def get_data(input_files): ...@@ -49,153 +54,201 @@ def get_data(input_files):
def write_global_attributes(nc_file, input_sources, interval=None, datastream=None): def write_global_attributes(nc_file, input_sources, interval=None, datastream=None):
# create global attributes # create global attributes
nc_file.source = 'surface observation' nc_file.source = "surface observation"
nc_file.Conventions = 'ARM-1.2 CF-1.6' nc_file.Conventions = "ARM-1.2 CF-1.6"
nc_file.institution = 'University of Wisconsin - Madison (UW) Space Science and Engineering Center (SSEC)' nc_file.institution = "University of Wisconsin - Madison (UW) Space Science and Engineering Center (SSEC)"
nc_file.featureType = 'timeSeries' nc_file.featureType = "timeSeries"
nc_file.data_level = 'b1' nc_file.data_level = "b1"
# monthly files end with .month.nc # monthly files end with .month.nc
# these end with .day.nc # these end with .day.nc
if datastream: if datastream:
nc_file.datastream = datastream nc_file.datastream = datastream
elif interval in ['1D']: elif interval in ["1D"]:
# assume this is a monthly file, averaged daily # assume this is a monthly file, averaged daily
nc_file.datastream = 'aoss.tower.nc-1mo-1d.b1.v{software_version}'.format(software_version=SOFTWARE_VERSION) nc_file.datastream = f"aoss.tower.nc-1mo-1d.b1.v{SOFTWARE_VERSION}"
elif interval in ['1T', '1min']: elif interval in ["1T", "1min"]:
# assume this is a daily file, averaged # assume this is a daily file, averaged
nc_file.datastream = 'aoss.tower.nc-1d-1m.b1.v{software_version}'.format(software_version=SOFTWARE_VERSION) nc_file.datastream = f"aoss.tower.nc-1d-1m.b1.v{SOFTWARE_VERSION}"
nc_file.software_version = SOFTWARE_VERSION nc_file.software_version = SOFTWARE_VERSION
nc_file.command_line = " ".join(sys.argv) nc_file.command_line = " ".join(sys.argv)
# generate history # generate history
nc_file.history = ' '.join(platform.uname()) + " " + os.path.basename(__file__) nc_file.history = " ".join(platform.uname()) + " " + os.path.basename(__file__)
nc_file.input_source = input_sources[0] nc_file.input_source = input_sources[0]
nc_file.input_sources = ', '.join(input_sources) nc_file.input_sources = ", ".join(input_sources)
def create_giant_netcdf(input_files, output_fn, zlib, chunk_size, def create_giant_netcdf(
start=None, end=None, interval_width=None, input_files,
summary=False, output_fn,
database=schema.database_dict, datastream=None): zlib,
chunk_size,
start=None,
end=None,
interval_width=None,
summary=False,
database=schema.database_dict,
datastream=None,
):
frame = get_data(input_files) frame = get_data(input_files)
if frame.empty: if frame.empty:
raise ValueError("No data found from input files: {}".format(", ".join(input_files))) raise ValueError("No data found from input files: {}".format(", ".join(input_files)))
# Add wind direction components so we can average wind direction properly # Add wind direction components so we can average wind direction properly
frame['wind_east'], frame['wind_north'], _ = calc.wind_vector_components(frame['wind_speed'], frame['wind_dir']) frame["wind_east"], frame["wind_north"], _ = calc.wind_vector_components(frame["wind_speed"], frame["wind_dir"])
if 'air_temp' in frame and 'rh' in frame and \ if "air_temp" in frame and "rh" in frame and ("dewpoint" in database or "dewpoint_mean" in database):
('dewpoint' in database or 'dewpoint_mean' in database): LOG.info("'dewpoint' is missing from the input file, will calculate " "it from air temp and relative humidity")
LOG.info("'dewpoint' is missing from the input file, will calculate " frame["dewpoint"] = calc.dewpoint(frame["air_temp"], frame["rh"])
"it from air temp and relative humidity")
frame['dewpoint'] = calc.dewpoint(frame['air_temp'], frame['rh'])
# round up each 1 minute group so data at time T is the average of data # round up each 1 minute group so data at time T is the average of data
# from T - 1 (exclusive) to T (inclusive). # from T - 1 (exclusive) to T (inclusive).
new_frame = frame.resample('5S', closed='right', loffset='5S').mean() new_frame = frame.resample("5S", closed="right", loffset="5S").mean()
# 2 minute rolling average of 5 second data (5 seconds * 24 = 120 seconds = 2 minutes) # 2 minute rolling average of 5 second data (5 seconds * 24 = 120 seconds = 2 minutes)
winds_frame_5s = new_frame[['wind_speed', 'wind_east', 'wind_north']] winds_frame_5s = new_frame[["wind_speed", "wind_east", "wind_north"]]
winds_frame_2m = winds_frame_5s.rolling('2T').mean() winds_frame_2m = winds_frame_5s.rolling("2T").mean()
winds_frame_2m['gust'] = calculate_wind_gust(winds_frame_5s['wind_speed'], winds_frame_2m['wind_speed']) winds_frame_2m["gust"] = calculate_wind_gust(winds_frame_5s["wind_speed"], winds_frame_2m["wind_speed"])
# rolling average is used for mean output # rolling average is used for mean output
new_frame.update(winds_frame_2m) # adds wind_speed, wind_east/north new_frame.update(winds_frame_2m) # adds wind_speed, wind_east/north
new_frame['gust'] = winds_frame_2m['gust'] new_frame["gust"] = winds_frame_2m["gust"]
# average the values # average the values
if summary: if summary:
frame = summary_over_interval(new_frame, interval_width) frame = summary_over_interval(new_frame, interval_width)
else: else:
frame = new_frame.resample(interval_width, closed='right', loffset=interval_width).mean() frame = new_frame.resample(interval_width, closed="right", loffset=interval_width).mean()
frame['wind_dir'] = calc.wind_vector_degrees(frame['wind_east'], frame['wind_north']) frame["wind_dir"] = calc.wind_vector_degrees(frame["wind_east"], frame["wind_north"])
frame['gust'] = new_frame['gust'].resample(interval_width, closed='right', loffset=interval_width).max() frame["gust"] = new_frame["gust"].resample(interval_width, closed="right", loffset=interval_width).max()
frame = frame.fillna(np.nan) frame = frame.fillna(np.nan)
if start and end: if start and end:
frame = frame[start.strftime('%Y-%m-%d %H:%M:%S'): end.strftime('%Y-%m-%d %H:%M:%S')] frame = frame[start.strftime("%Y-%m-%d %H:%M:%S") : end.strftime("%Y-%m-%d %H:%M:%S")]
if chunk_size and not isinstance(chunk_size, (list, tuple)): chunk_sizes = [chunk_size] if chunk_size and not isinstance(chunk_size, (list, tuple)) else [frame.shape[0]]
chunk_sizes = [chunk_size]
else:
chunk_sizes = [frame.shape[0]]
first_stamp = datetime.strptime(str(frame.index[0]), '%Y-%m-%d %H:%M:%S') first_stamp = datetime.strptime(str(frame.index[0]), "%Y-%m-%d %H:%M:%S")
# NETCDF4_CLASSIC was chosen so that MFDataset reading would work. See: # NETCDF4_CLASSIC was chosen so that MFDataset reading would work. See:
# http://unidata.github.io/netcdf4-python/#netCDF4.MFDataset # http://unidata.github.io/netcdf4-python/#netCDF4.MFDataset
nc_file = Dataset(output_fn, 'w', format='NETCDF4_CLASSIC') nc_file = Dataset(output_fn, "w", format="NETCDF4_CLASSIC")
nc_file.createDimension('time', None) nc_file.createDimension("time", None)
nc_file.createDimension('max_len_station_name', 32) nc_file.createDimension("max_len_station_name", 32)
create_variables(nc_file, first_stamp, database, chunk_sizes, zlib) create_variables(nc_file, first_stamp, database, chunk_sizes, zlib)
written_vars = write_vars(nc_file, frame, database, station_info) written_vars = write_vars(nc_file, frame, database, station_info)
unwritten_vars = set(nc_file.variables.keys()) - set(written_vars) unwritten_vars = set(nc_file.variables.keys()) - set(written_vars)
written_vars.extend(write_qc_for_unwritten(nc_file.variables, written_vars.extend(write_qc_for_unwritten(nc_file.variables, unwritten_vars))
unwritten_vars))
# remove any of the qc_ variables we just added # remove any of the qc_ variables we just added
unwritten_vars = set(nc_file.variables.keys()) - set(written_vars) unwritten_vars = set(nc_file.variables.keys()) - set(written_vars)
for unwritten in unwritten_vars: for unwritten in unwritten_vars:
LOG.warning("Variable created but no data written: {}".format(unwritten)) LOG.warning(f"Variable created but no data written: {unwritten}")
write_global_attributes(nc_file, write_global_attributes(
[os.path.basename(x) for x in input_files], nc_file,
interval=interval_width, [os.path.basename(x) for x in input_files],
datastream=datastream) interval=interval_width,
datastream=datastream,
)
nc_file.close() nc_file.close()
return nc_file return nc_file
def _dt_convert(datetime_str): def _dt_convert(datetime_str):
"""Parse datetime string, return datetime object""" """Parse datetime string, return datetime object."""
try: try:
return datetime.strptime(datetime_str, '%Y%m%d') return datetime.strptime(datetime_str, "%Y%m%d")
except ValueError: except ValueError:
try: try:
return datetime.strptime(datetime_str, '%Y-%m-%d') return datetime.strptime(datetime_str, "%Y-%m-%d")
except ValueError: except ValueError:
return datetime.strptime(datetime_str, '%Y-%m-%dT%H:%M:%S') return datetime.strptime(datetime_str, "%Y-%m-%dT%H:%M:%S")
def main(): def main():
import argparse import argparse
from metobscommon.archive import setup_logging from metobscommon.archive import setup_logging
parser = argparse.ArgumentParser(description="Convert level_00 aoss tower data to level_b1",
fromfile_prefix_chars='@') parser = argparse.ArgumentParser(
description="Convert level_00 aoss tower data to level_b1",
parser.add_argument('-v', '--verbose', action="count", default=int(os.environ.get("VERBOSITY", 2)), fromfile_prefix_chars="@",
dest='verbosity', )
help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)')
parser.add_argument('-l', '--log-file', dest="log_filepath", parser.add_argument(
help="Alternate name for log file, default is to not create a file") "-v",
parser.add_argument('-s', '--start-time', type=_dt_convert, "--verbose",
help="Start time of massive netcdf file, if only -s is given, a netcdf file for only that day is given" + action="count",
". Formats allowed: \'YYYY-MM-DDTHH:MM:SS\', \'YYYY-MM-DD\'") default=int(os.environ.get("VERBOSITY", 2)),
parser.add_argument('-e', '--end-time', type=_dt_convert, dest="verbosity",
help='End time of massive netcdf file. Formats allowed:' + help="each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)",
"\'YYYY-MM-DDTHH:MM:SS\', \'YYYY-MM-DD\'") )
parser.add_argument('-n', '--interval', default='1T', parser.add_argument(
help="""Width of the interval to average input data "-l",
"--log-file",
dest="log_filepath",
help="Alternate name for log file, default is to not create a file",
)
parser.add_argument(
"-s",
"--start-time",
type=_dt_convert,
help="Start time of massive netcdf file, if only -s is given, a netcdf file for only that day is given"
+ ". Formats allowed: 'YYYY-MM-DDTHH:MM:SS', 'YYYY-MM-DD'",
)
parser.add_argument(
"-e",
"--end-time",
type=_dt_convert,
help="End time of massive netcdf file. Formats allowed:" + "'YYYY-MM-DDTHH:MM:SS', 'YYYY-MM-DD'",
)
parser.add_argument(
"-n",
"--interval",
default="1T",
help="""Width of the interval to average input data
over in Pandas offset format. If not specified, 1 minute averages are used. over in Pandas offset format. If not specified, 1 minute averages are used.
Use '1D' for daily or '5T' for 5 minute averages. Use '1D' for daily or '5T' for 5 minute averages.
See this page for more details: See this page for more details:
http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases""") http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases""",
parser.add_argument('--summary', action='store_true', )
help="Create a file with _low, _mean, _high versions of every variable name") parser.add_argument(
parser.add_argument('-f', '--fields', nargs='+', default=schema.met_vars, "--summary",
help="Variable names to include in the NetCDF file (base name, no suffixes)") action="store_true",
parser.add_argument('--chunk-size', type=int, help='chunk size for the netCDF file') help="Create a file with _low, _mean, _high versions of every variable name",
parser.add_argument('-z', '--zlib', action='store_true', help='compress netCDF file with zlib') )
parser.add_argument('--data-stream', help="'datastream' global attribute to put in output file") parser.add_argument(
"-f",
parser.add_argument('-i', '--input', dest='input_files', required=True, nargs="+", "--fields",
help="aoss_tower level_00 paths. Use @filename to read a list of paths from that filename.") nargs="+",
default=schema.met_vars,
parser.add_argument('-o', '--output', dest='output_files', required=True, nargs="+", help="Variable names to include in the NetCDF file (base name, no suffixes)",
help="""NetCDF filename(s) to create from input. If one )
parser.add_argument("--chunk-size", type=int, help="chunk size for the netCDF file")
parser.add_argument("-z", "--zlib", action="store_true", help="compress netCDF file with zlib")
parser.add_argument("--data-stream", help="'datastream' global attribute to put in output file")
parser.add_argument(
"-i",
"--input",
dest="input_files",
required=True,
nargs="+",
help="aoss_tower level_00 paths. Use @filename to read a list of paths from that filename.",
)
parser.add_argument(
"-o",
"--output",
dest="output_files",
required=True,
nargs="+",
help="""NetCDF filename(s) to create from input. If one
filename is specified then all input files are combined in to it. Otherwise filename is specified then all input files are combined in to it. Otherwise
each input file is mapped to the corresponding output file. each input file is mapped to the corresponding output file.
""") """,
)
args = parser.parse_args() args = parser.parse_args()
levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
...@@ -204,7 +257,7 @@ each input file is mapped to the corresponding output file. ...@@ -204,7 +257,7 @@ each input file is mapped to the corresponding output file.
if args.start_time and not args.end_time: if args.start_time and not args.end_time:
args.end_time = args.start_time.replace(hour=23, minute=59, second=59) args.end_time = args.start_time.replace(hour=23, minute=59, second=59)
elif not args.start_time and args.end_time: elif not args.start_time and args.end_time:
raise ValueError('start time must be specified when end time is specified') raise ValueError("start time must be specified when end time is specified")
mini_database = {k: schema.database_dict[k] for k in args.fields} mini_database = {k: schema.database_dict[k] for k in args.fields}
if args.summary: if args.summary:
...@@ -213,7 +266,7 @@ each input file is mapped to the corresponding output file. ...@@ -213,7 +266,7 @@ each input file is mapped to the corresponding output file.
# Case 1: All inputs to 1 output file # Case 1: All inputs to 1 output file
# Case 2: Each input in to a separate output file # Case 2: Each input in to a separate output file
if args.output_files and len(args.output_files) not in [1, len(args.input_files)]: if args.output_files and len(args.output_files) not in [1, len(args.input_files)]:
raise ValueError('Output filenames must be 1 or the same length as input files') raise ValueError("Output filenames must be 1 or the same length as input files")
elif args.output_files and len(args.output_files) == len(args.input_files): elif args.output_files and len(args.output_files) == len(args.input_files):
args.input_files = [[i] for i in args.input_files] args.input_files = [[i] for i in args.input_files]
else: else:
...@@ -222,15 +275,23 @@ each input file is mapped to the corresponding output file. ...@@ -222,15 +275,23 @@ each input file is mapped to the corresponding output file.
success = False success = False
for in_files, out_fn in zip(args.input_files, args.output_files): for in_files, out_fn in zip(args.input_files, args.output_files):
try: try:
create_giant_netcdf(in_files, out_fn, args.zlib, create_giant_netcdf(
args.chunk_size, args.start_time, in_files,
args.end_time, args.interval, args.summary, out_fn,
mini_database, args.data_stream) args.zlib,
args.chunk_size,
args.start_time,
args.end_time,
args.interval,
args.summary,
mini_database,
args.data_stream,
)
success = True success = True
except (ValueError, TypeError): except (ValueError, TypeError):
LOG.error("Could not generate NetCDF file for {}".format(in_files), exc_info=True) LOG.error(f"Could not generate NetCDF file for {in_files}", exc_info=True)
if not success: if not success:
raise IOError('All ASCII files were empty or could not be read') raise OSError("All ASCII files were empty or could not be read")
if __name__ == "__main__": if __name__ == "__main__":
......
This diff is collapsed.
from numpy import float32
from collections import namedtuple from collections import namedtuple
from numpy import float32
Var = namedtuple('Var', ['type', 'standard_name', 'name', 'description', 'units', 'valid_min', 'valid_max', 'valid_delta']) Var = namedtuple(
"Var",
["type", "standard_name", "name", "description", "units", "valid_min", "valid_max", "valid_delta"],
)
database = dict( database = {
box_temp=Var( "box_temp": Var(
float32, float32,
'air_temperature', "air_temperature",
'box_temp', "box_temp",
'Auxillary Temperature', "Auxillary Temperature",
'degC', "degC",
None, None,
None, None,
None, None,
), ),
box_pressure=Var( "box_pressure": Var(
float32, float32,
'air_pressure', "air_pressure",
'box_pressure', "box_pressure",
'Pressure inside the data logger enclosure', "Pressure inside the data logger enclosure",
'hPa', "hPa",
'850', "850",
'1100', "1100",
None, None,
), ),
paro_air_temp_period=Var( "paro_air_temp_period": Var(
float32, float32,
None, None,
'paro_air_temp_period', "paro_air_temp_period",
None, None,
'1', "1",
None, None,
None, None,
None, None,
), ),
paro_pressure_period=Var( "paro_pressure_period": Var(
float32, float32,
None, None,
'paro_pressure_period', "paro_pressure_period",
None, None,
'1', "1",
None, None,
None, None,
None, None,
), ),
paro_air_temp=Var( "paro_air_temp": Var(
float32, float32,
'air_temperature', "air_temperature",
'paro_air_temp', "paro_air_temp",
None, None,
'degC', "degC",
'-50', "-50",
'50', "50",
None, None,
), ),
pressure=Var( "pressure": Var(
float32, float32,
'air_pressure', "air_pressure",
'pressure', "pressure",
'Air pressure as measured from the PAROSCI pressure sensor', "Air pressure as measured from the PAROSCI pressure sensor",
'hPa', "hPa",
'850', "850",
'1100', "1100",
None, None,
), ),
paro_cal_sig=Var( "paro_cal_sig": Var(
float32, float32,
None, None,
'paro_cal_sig', "paro_cal_sig",
None, None,
None, None,
None, None,
None, None,
None, None,
), ),
box_rh=Var( "box_rh": Var(
float32, float32,
'relative_humidity', "relative_humidity",
'box_rh', "box_rh",
'Relative humidity inside the data logger enclosure', "Relative humidity inside the data logger enclosure",
'%', "%",
'0', "0",
'100', "100",
None, None,
), ),
box_air_temp=Var( "box_air_temp": Var(
float32, float32,
'air_temperature', "air_temperature",
'box_air_temp', "box_air_temp",
'Air temperature inside the data logger enclosure', "Air temperature inside the data logger enclosure",
'degC', "degC",
'-50', "-50",
'50', "50",
None, None,
), ),
air_temp_2=Var( "air_temp_2": Var(
float32, float32,
'air_temperature', "air_temperature",
'air_temp_2', "air_temp_2",
'Auxillary air temperature', "Auxillary air temperature",
'degC', "degC",
'-50', "-50",
'50', "50",
None, None,
), ),
air_temp_3=Var( "air_temp_3": Var(
float32, float32,
'air_temperature', "air_temperature",
'air_temp_3', "air_temp_3",
'Auxillary air temperature', "Auxillary air temperature",
'degC', "degC",
'-50', "-50",
'50', "50",
None, None,
), ),
air_temp_4=Var( "air_temp_4": Var(
float32, float32,
'air_temperature', "air_temperature",
'air_temp_4', "air_temp_4",
'Auxillary air temperature', "Auxillary air temperature",
'degC', "degC",
'-50', "-50",
'50', "50",
None, None,
), ),
air_temp_5=Var( "air_temp_5": Var(
float32, float32,
'air_temperature', "air_temperature",
'air_temp_5', "air_temp_5",
'Auxillary air temperature', "Auxillary air temperature",
'degC', "degC",
'-50', "-50",
'50', "50",
None, None,
), ),
wind_speed=Var( "wind_speed": Var(
float32, float32,
'wind_speed', "wind_speed",
'wind_speed', "wind_speed",
'Wind speed', "Wind speed",
'm*s^-1', "m*s^-1",
'0', "0",
'50', "50",
None, None,
), ),
wind_dir=Var( "wind_dir": Var(
float32, float32,
'wind_from_direction', "wind_from_direction",
'wind_dir', "wind_dir",
'Wind direction', "Wind direction",
'degrees', "degrees",
'0', "0",
'360', "360",
None, None,
), ),
rh_shield_freq=Var( "rh_shield_freq": Var(
float32, float32,
None, None,
'rh_shield_freq', "rh_shield_freq",
None, None,
'hz', "hz",
None, None,
None, None,
None, None,
), ),
rh=Var( "rh": Var(
float32, float32,
'relative_humidity', "relative_humidity",
'rh', "rh",
'Relative humidity', "Relative humidity",
'%', "%",
'0', "0",
'100', "100",
None, None,
), ),
air_temp_6_3m=Var( "air_temp_6_3m": Var(
float32, float32,
'air_temperature', "air_temperature",
'air_temp_6_3m', "air_temp_6_3m",
'Air temperature 6.3m from tower base', "Air temperature 6.3m from tower base",
'degC', "degC",
'-50', "-50",
'50', "50",
None, None,
), ),
dewpoint=Var( "dewpoint": Var(
float32, float32,
'dew_point_temperature', "dew_point_temperature",
'dewpoint', "dewpoint",
'Calculated dewpoint temperature', "Calculated dewpoint temperature",
'degC', "degC",
'-50', "-50",
'50', "50",
None, None,
), ),
rtd_shield_freq=Var( "rtd_shield_freq": Var(
float32, float32,
None, None,
'rtd_shied_freq', "rtd_shied_freq",
None, None,
None, None,
None, None,
None, None,
None, None,
), ),
air_temp=Var( "air_temp": Var(
float32, float32,
'air_temperature', "air_temperature",
'air_temp', "air_temp",
'Air temperature', "Air temperature",
'degC', "degC",
'-50', "-50",
'50', "50",
None, None,
), ),
solar_flux=Var( "solar_flux": Var(
float32, float32,
'solar_flux', "solar_flux",
'solar_flux', "solar_flux",
'Solar flux', "Solar flux",
'w*m^-2', "w*m^-2",
'0', "0",
'3000', "3000",
None, None,
), ),
precip=Var( "precip": Var(
float32, float32,
None, None,
'precip', "precip",
'Precipitation', "Precipitation",
'mm', "mm",
'0', "0",
'254', "254",
None, None,
), ),
accum_precip=Var( "accum_precip": Var(
float32, float32,
'accumulated_precipitation', "accumulated_precipitation",
'accum_precip', "accum_precip",
'Precipitation accumulated since 0Z', "Precipitation accumulated since 0Z",
'mm', # data comes from instrument as inches but we typically want millimeters "mm", # data comes from instrument as inches but we typically want millimeters
'0', "0",
'254', "254",
None, None,
), ),
altimeter=Var( "altimeter": Var(
float32, float32,
None, None,
'altimeter', "altimeter",
None, None,
'inHg', "inHg",
None, None,
None, None,
None, None,
), ),
gust=Var( "gust": Var(
float32, float32,
'wind_speed_of_gust', "wind_speed_of_gust",
'gust', "gust",
'Wind gust over the previous 2 minutes', "Wind gust over the previous 2 minutes",
'm/s', "m/s",
'0', "0",
'50', "50",
None, None,
) ),
) }
database_dict = {k: v._asdict() for k, v in database.items()} database_dict = {k: v._asdict() for k, v in database.items()}
met_vars = {'air_temp', 'dewpoint', 'rh', 'solar_flux', 'pressure', 'precip', 'accum_precip', met_vars = {
'wind_speed', 'wind_dir', 'gust'} "air_temp",
"dewpoint",
"rh",
"solar_flux",
"pressure",
"precip",
"accum_precip",
"wind_speed",
"wind_dir",
"gust",
}
engr_vars = set(database.keys()) - met_vars engr_vars = set(database.keys()) - met_vars
unit_conversions = {} unit_conversions = {}
unit_conversions['accum_precip'] = lambda x: x * 25.4 unit_conversions["accum_precip"] = lambda x: x * 25.4
"""Station metadata. """Station metadata."""
"""
from datetime import timedelta from datetime import timedelta
# Time between data samples in seconds # Time between data samples in seconds
...@@ -17,11 +16,11 @@ LATITUDE = 43.070786 ...@@ -17,11 +16,11 @@ LATITUDE = 43.070786
LONGITUDE = -89.406939 LONGITUDE = -89.406939
station_info = { station_info = {
'site': 'mendota', "site": "mendota",
'inst': 'buoy', "inst": "buoy",
'long_name': 'AOSS Tower', "long_name": "AOSS Tower",
'short_name': 'aoss.tower', "short_name": "aoss.tower",
'alt': ELEVATION, "alt": ELEVATION,
'lon': LONGITUDE, "lon": LONGITUDE,
'lat': LATITUDE, "lat": LATITUDE,
} }
\ No newline at end of file
#!/usr/bin/env python #!/usr/bin/env python
import datetime
import unittest import unittest
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import datetime
from aosstower.level_00.influxdb import Updater, construct_url, get_url_data from aosstower.level_00.influxdb import Updater, construct_url, get_url_data
...@@ -14,12 +16,34 @@ class TestCase: ...@@ -14,12 +16,34 @@ class TestCase:
def create_data(size, data_interval=datetime.timedelta(seconds=5)): def create_data(size, data_interval=datetime.timedelta(seconds=5)):
return [{'wind_speed': i % 119, 'wind_dir': 0, 'box_pressure': i, 'paro_air_temp_period': i, return [
'paro_pressure_period': i, 'paro_air_temp': i, 'pressure': i, 'paro_cal_sig': i, {
'box_air_temp': i, 'air_temp_2': i, 'air_temp_3': i, 'air_temp_4': i, 'rh_shield_freq': i, 'rel_hum': i, "wind_speed": i % 119,
'air_temp_6_3m': i, 'dewpoint': i, 'rtd_shied_freq': i, 'air_temp': i, 'solar_flux': i, 'precip': i, "wind_dir": 0,
'accum_precip': -99999, 'altimeter': i, "box_pressure": i,
'timestamp': datetime.datetime(2019, 1, 1, 0, 3, 33) + data_interval * i} for i in range(size)] "paro_air_temp_period": i,
"paro_pressure_period": i,
"paro_air_temp": i,
"pressure": i,
"paro_cal_sig": i,
"box_air_temp": i,
"air_temp_2": i,
"air_temp_3": i,
"air_temp_4": i,
"rh_shield_freq": i,
"rel_hum": i,
"air_temp_6_3m": i,
"dewpoint": i,
"rtd_shied_freq": i,
"air_temp": i,
"solar_flux": i,
"precip": i,
"accum_precip": -99999,
"altimeter": i,
"timestamp": datetime.datetime(2019, 1, 1, 0, 3, 33) + data_interval * i,
}
for i in range(size)
]
def _isnan(x): def _isnan(x):
...@@ -29,59 +53,161 @@ def _isnan(x): ...@@ -29,59 +53,161 @@ def _isnan(x):
class TestInfluxdb(unittest.TestCase): class TestInfluxdb(unittest.TestCase):
def setUp(self): def setUp(self):
self.updater = Updater() self.updater = Updater()
self.test_data = TestCase(create_data(209), [ self.test_data = TestCase(
{'wind_speed': 17, 'wind_dir': 0, 'box_pressure': 17, 'paro_air_temp_period': 17, create_data(209),
'paro_pressure_period': 17, 'paro_air_temp': 17, 'pressure': 17, 'paro_cal_sig': 17, [
'box_air_temp': 17, 'air_temp_2': 17, 'air_temp_3': 17, 'air_temp_4': 17, 'rh_shield_freq': 17, {
'rel_hum': 17, 'air_temp_6_3m': 17, 'dewpoint': 17, 'rtd_shied_freq': 17, 'air_temp': 17, "wind_speed": 17,
'solar_flux': 17, 'precip': 17, 'accum_precip': np.nan, 'altimeter': 17, 'wind_east': 0, "wind_dir": 0,
'wind_north': 17, 'wind_speed_2m': np.nan, 'wind_dir_2m': np.nan, 'gust_1m': np.nan, 'gust_10m': np.nan, "box_pressure": 17,
'index': pd.Timestamp('2019-01-01 00:04:58')}, "paro_air_temp_period": 17,
{'wind_speed': 77, 'wind_dir': 0, 'box_pressure': 77, 'paro_air_temp_period': 77, "paro_pressure_period": 17,
'paro_pressure_period': 77, 'paro_air_temp': 77, 'pressure': 77, 'paro_cal_sig': 77, "paro_air_temp": 17,
'box_air_temp': 77, 'air_temp_2': 77, 'air_temp_3': 77, 'air_temp_4': 77, 'rh_shield_freq': 77, "pressure": 17,
'rel_hum': 77, 'air_temp_6_3m': 77, 'dewpoint': 77, 'rtd_shied_freq': 77, 'air_temp': 77, "paro_cal_sig": 17,
'solar_flux': 77, 'precip': 77, 'accum_precip': np.nan, 'altimeter': 77, 'wind_east': 0, "box_air_temp": 17,
'wind_north': 77, 'wind_speed_2m': 65.5, 'wind_dir_2m': 0, 'gust_1m': 77, 'gust_10m': np.nan, "air_temp_2": 17,
'index': pd.Timestamp('2019-01-01 00:09:58')}, "air_temp_3": 17,
{'wind_speed': 18, 'wind_dir': 0, 'box_pressure': 137, 'paro_air_temp_period': 137, "air_temp_4": 17,
'paro_pressure_period': 137, 'paro_air_temp': 137, 'pressure': 137, 'paro_cal_sig': 137, "rh_shield_freq": 17,
'box_air_temp': 137, 'air_temp_2': 137, 'air_temp_3': 137, 'air_temp_4': 137, "rel_hum": 17,
'rh_shield_freq': 137, 'rel_hum': 137, 'air_temp_6_3m': 137, 'dewpoint': 137, "air_temp_6_3m": 17,
'rtd_shied_freq': 137, 'air_temp': 137, 'solar_flux': 137, 'precip': 137, 'accum_precip': np.nan, "dewpoint": 17,
'altimeter': 137, 'wind_east': 0, 'wind_north': 18, 'wind_speed_2m': 31.291666666666668, "rtd_shied_freq": 17,
'wind_dir_2m': 0, 'gust_1m': np.nan, 'gust_10m': np.nan, 'index': pd.Timestamp('2019-01-01 00:14:58')}, "air_temp": 17,
{'wind_speed': 78, 'wind_dir': 0, 'box_pressure': 197, 'paro_air_temp_period': 197, "solar_flux": 17,
'paro_pressure_period': 197, 'paro_air_temp': 197, 'pressure': 197, 'paro_cal_sig': 197, "precip": 17,
'box_air_temp': 197, 'air_temp_2': 197, 'air_temp_3': 197, 'air_temp_4': 197, "accum_precip": np.nan,
'rh_shield_freq': 197, 'rel_hum': 197, 'air_temp_6_3m': 197, 'dewpoint': 197, "altimeter": 17,
'rtd_shied_freq': 197, 'air_temp': 197, 'solar_flux': 197, 'precip': 197, 'accum_precip': np.nan, "wind_east": 0,
'altimeter': 197, 'wind_east': 0, 'wind_north': 78, 'wind_speed_2m': 66.5, 'wind_dir_2m': 0, "wind_north": 17,
'gust_1m': 78, 'gust_10m': 118, 'index': pd.Timestamp('2019-01-01 00:19:58')}], "wind_speed_2m": np.nan,
['http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?' "wind_dir_2m": np.nan,
'ID=&PASSWORD=&dateutc=2019-01-01+00%3A04%3A58&winddir=0.0&winddir_avg2m=' "gust_1m": np.nan,
'&windspeedmph=38.02798&windspdmph_avg2m=&windgustmph=&windgustmph_10m=' "gust_10m": np.nan,
'&humidity=17.0&tempf=62.6&baromin=0.5020096628001095&dewptf=62.6' "index": pd.Timestamp("2019-01-01 00:04:58"),
'&solarradiation=17.0&rainin=17.0&dailyrainin=&softwaretype=SSEC-RIG' },
'&action=updateraw', {
'http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?' "wind_speed": 77,
'ID=&PASSWORD=&dateutc=2019-01-01+00%3A09%3A58&winddir=0.0&winddir_avg2m=0.0' "wind_dir": 0,
'&windspeedmph=172.24438&windspdmph_avg2m=146.51957000000002&windgustmph=172.24438' "box_pressure": 77,
'&windgustmph_10m=&humidity=77.0&tempf=170.6&baromin=2.2738084726828487' "paro_air_temp_period": 77,
'&dewptf=170.6&solarradiation=77.0&rainin=77.0&dailyrainin=&softwaretype=SSEC-RIG' "paro_pressure_period": 77,
'&action=updateraw', "paro_air_temp": 77,
'http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?' "pressure": 77,
'ID=&PASSWORD=&dateutc=2019-01-01+00%3A14%3A58&winddir=0.0&winddir_avg2m=0.0' "paro_cal_sig": 77,
'&windspeedmph=40.264920000000004&windspdmph_avg2m=69.99758083333334&windgustmph=' "box_air_temp": 77,
'&windgustmph_10m=&humidity=137.0&tempf=278.6&baromin=4.045607282565588' "air_temp_2": 77,
'&dewptf=278.6&solarradiation=137.0&rainin=137.0&dailyrainin=&softwaretype=SSEC-RIG' "air_temp_3": 77,
'&action=updateraw', "air_temp_4": 77,
'http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?' "rh_shield_freq": 77,
'ID=&PASSWORD=&dateutc=2019-01-01+00%3A19%3A58&winddir=0.0&winddir_avg2m=0.0' "rel_hum": 77,
'&windspeedmph=174.48132&windspdmph_avg2m=148.75651000000002&windgustmph=174.48132' "air_temp_6_3m": 77,
'&windgustmph_10m=263.95892000000003&humidity=197.0&tempf=386.6' "dewpoint": 77,
'&baromin=5.817406092448327&dewptf=386.6&solarradiation=197.0&rainin=197.0' "rtd_shied_freq": 77,
'&dailyrainin=&softwaretype=SSEC-RIG&action=updateraw']) "air_temp": 77,
"solar_flux": 77,
"precip": 77,
"accum_precip": np.nan,
"altimeter": 77,
"wind_east": 0,
"wind_north": 77,
"wind_speed_2m": 65.5,
"wind_dir_2m": 0,
"gust_1m": 77,
"gust_10m": np.nan,
"index": pd.Timestamp("2019-01-01 00:09:58"),
},
{
"wind_speed": 18,
"wind_dir": 0,
"box_pressure": 137,
"paro_air_temp_period": 137,
"paro_pressure_period": 137,
"paro_air_temp": 137,
"pressure": 137,
"paro_cal_sig": 137,
"box_air_temp": 137,
"air_temp_2": 137,
"air_temp_3": 137,
"air_temp_4": 137,
"rh_shield_freq": 137,
"rel_hum": 137,
"air_temp_6_3m": 137,
"dewpoint": 137,
"rtd_shied_freq": 137,
"air_temp": 137,
"solar_flux": 137,
"precip": 137,
"accum_precip": np.nan,
"altimeter": 137,
"wind_east": 0,
"wind_north": 18,
"wind_speed_2m": 31.291666666666668,
"wind_dir_2m": 0,
"gust_1m": np.nan,
"gust_10m": np.nan,
"index": pd.Timestamp("2019-01-01 00:14:58"),
},
{
"wind_speed": 78,
"wind_dir": 0,
"box_pressure": 197,
"paro_air_temp_period": 197,
"paro_pressure_period": 197,
"paro_air_temp": 197,
"pressure": 197,
"paro_cal_sig": 197,
"box_air_temp": 197,
"air_temp_2": 197,
"air_temp_3": 197,
"air_temp_4": 197,
"rh_shield_freq": 197,
"rel_hum": 197,
"air_temp_6_3m": 197,
"dewpoint": 197,
"rtd_shied_freq": 197,
"air_temp": 197,
"solar_flux": 197,
"precip": 197,
"accum_precip": np.nan,
"altimeter": 197,
"wind_east": 0,
"wind_north": 78,
"wind_speed_2m": 66.5,
"wind_dir_2m": 0,
"gust_1m": 78,
"gust_10m": 118,
"index": pd.Timestamp("2019-01-01 00:19:58"),
},
],
[
"http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?"
"ID=&PASSWORD=&dateutc=2019-01-01+00%3A04%3A58&winddir=0.0&winddir_avg2m="
"&windspeedmph=38.02798&windspdmph_avg2m=&windgustmph=&windgustmph_10m="
"&humidity=17.0&tempf=62.6&baromin=0.5020096628001095&dewptf=62.6"
"&solarradiation=17.0&rainin=17.0&dailyrainin=&softwaretype=SSEC-RIG"
"&action=updateraw",
"http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?"
"ID=&PASSWORD=&dateutc=2019-01-01+00%3A09%3A58&winddir=0.0&winddir_avg2m=0.0"
"&windspeedmph=172.24438&windspdmph_avg2m=146.51957000000002&windgustmph=172.24438"
"&windgustmph_10m=&humidity=77.0&tempf=170.6&baromin=2.2738084726828487"
"&dewptf=170.6&solarradiation=77.0&rainin=77.0&dailyrainin=&softwaretype=SSEC-RIG"
"&action=updateraw",
"http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?"
"ID=&PASSWORD=&dateutc=2019-01-01+00%3A14%3A58&winddir=0.0&winddir_avg2m=0.0"
"&windspeedmph=40.264920000000004&windspdmph_avg2m=69.99758083333334&windgustmph="
"&windgustmph_10m=&humidity=137.0&tempf=278.6&baromin=4.045607282565588"
"&dewptf=278.6&solarradiation=137.0&rainin=137.0&dailyrainin=&softwaretype=SSEC-RIG"
"&action=updateraw",
"http://weatherstation.wunderground.com/weatherstation/updateweatherstation.php?"
"ID=&PASSWORD=&dateutc=2019-01-01+00%3A19%3A58&winddir=0.0&winddir_avg2m=0.0"
"&windspeedmph=174.48132&windspdmph_avg2m=148.75651000000002&windgustmph=174.48132"
"&windgustmph_10m=263.95892000000003&humidity=197.0&tempf=386.6"
"&baromin=5.817406092448327&dewptf=386.6&solarradiation=197.0&rainin=197.0"
"&dailyrainin=&softwaretype=SSEC-RIG&action=updateraw",
],
)
def test_updater(self): def test_updater(self):
output = [] output = []
...@@ -89,23 +215,23 @@ class TestInfluxdb(unittest.TestCase): ...@@ -89,23 +215,23 @@ class TestInfluxdb(unittest.TestCase):
avg = self.updater.rolling_average(record) avg = self.updater.rolling_average(record)
if avg is not None: if avg is not None:
output.append({key: avg[key][-1] for key in avg}) output.append({key: avg[key][-1] for key in avg})
output[-1]['index'] = avg.index[-1] output[-1]["index"] = avg.index[-1]
self.assertGreaterEqual(len(self.test_data.expected_avg), len(output)) assert len(self.test_data.expected_avg) >= len(output)
self.assertEqual(len(self.test_data.expected_avg[len(output) - 1]), len(output[-1])) assert len(self.test_data.expected_avg[len(output) - 1]) == len(output[-1])
for key in output[-1]: for key in output[-1]:
if not (_isnan(output[-1][key]) and _isnan(self.test_data.expected_avg[len(output) - 1][key])): if not (_isnan(output[-1][key]) and _isnan(self.test_data.expected_avg[len(output) - 1][key])):
self.assertEqual(self.test_data.expected_avg[len(output) - 1][key], output[-1][key]) assert self.test_data.expected_avg[len(output) - 1][key] == output[-1][key]
self.assertEqual(len(self.test_data.expected_avg), len(output)) assert len(self.test_data.expected_avg) == len(output)
def test_construct_url(self): def test_construct_url(self):
output = [] output = []
for record in self.test_data.input: for record in self.test_data.input:
avg = self.updater.rolling_average(record) avg = self.updater.rolling_average(record)
if avg is not None: if avg is not None:
output.append(construct_url(get_url_data(avg, '', ''))) output.append(construct_url(get_url_data(avg, "", "")))
self.assertGreaterEqual(len(self.test_data.expected_url), len(output)) assert len(self.test_data.expected_url) >= len(output)
self.assertEqual(self.test_data.expected_url[len(output) - 1], output[-1]) assert self.test_data.expected_url[len(output) - 1] == output[-1]
self.assertEqual(len(self.test_data.expected_url), len(output)) assert len(self.test_data.expected_url) == len(output)
def suite(): def suite():
......
...@@ -3,61 +3,65 @@ from datetime import datetime ...@@ -3,61 +3,65 @@ from datetime import datetime
class ParserV0Tests(unittest.TestCase): class ParserV0Tests(unittest.TestCase):
line = (
line = ("TIME 0 ACCURAIN 0.000000 TEMP107_4 8.139600 " "TIME 0 ACCURAIN 0.000000 TEMP107_4 8.139600 "
"LI200X 0.066020 TEMP107_1 9.307800 RH41372 92.064000 " "LI200X 0.066020 TEMP107_1 9.307800 RH41372 92.064000 "
"TEMP107_5 -99999.000000 CS105 970.100000 PAROSCI 971.428000 " "TEMP107_5 -99999.000000 CS105 970.100000 PAROSCI 971.428000 "
"WSPD05305 8.663000 TEMP107_3 8.368400 CS10162 65.653000 " "WSPD05305 8.663000 TEMP107_3 8.368400 CS10162 65.653000 "
"RAIN380M 0.000000 TEMP107_2 8.287800 TEMP41372 8.202300 " "RAIN380M 0.000000 TEMP107_2 8.287800 TEMP41372 8.202300 "
"WDIR05305 143.380000\n") "WDIR05305 143.380000\n"
)
def _cut(self): def _cut(self):
from aosstower.level_00.parser import ParserV0 from aosstower.level_00.parser import ParserV0
return ParserV0() return ParserV0()
def test_maybe_mine(self): def test_maybe_mine(self):
parser = self._cut() parser = self._cut()
self.assertTrue(parser.maybe_mine(self.line)) assert parser.maybe_mine(self.line)
bad_line = 'xx' + self.line bad_line = "xx" + self.line
self.assertFalse(parser.maybe_mine(bad_line)) assert not parser.maybe_mine(bad_line)
def test_record_format(self): def test_record_format(self):
parser = self._cut() parser = self._cut()
record = parser.make_frame(self.line) record = parser.make_frame(self.line)
self.assertIn('stamp', record) assert "stamp" in record
self.assertEqual(record['stamp'], datetime(1970, 1, 1)) assert record["stamp"] == datetime(1970, 1, 1)
class ParserV1V2Tests(unittest.TestCase): class ParserV1V2Tests(unittest.TestCase):
line = (
line = ("1,1970,1,0000,0,976.59,5.8564,30.085,25.893,977.36,58732," "1,1970,1,0000,0,976.59,5.8564,30.085,25.893,977.36,58732,"
"47.375,24.234,23.865,22.615,37.219,6.9222,67.398,145.2,45.581," "47.375,24.234,23.865,22.615,37.219,6.9222,67.398,145.2,45.581,"
"22.669,10.417,145.2,22.665,163.94,0,0,30.015\n") "22.669,10.417,145.2,22.665,163.94,0,0,30.015\n"
)
def _cut(self): def _cut(self):
from aosstower.level_00.parser import ParserV1V2 from aosstower.level_00.parser import ParserV1V2
return ParserV1V2() return ParserV1V2()
def test_maybe_mine(self): def test_maybe_mine(self):
parser = self._cut() parser = self._cut()
self.assertTrue(parser.maybe_mine(self.line)) assert parser.maybe_mine(self.line)
bad_line = 'xx,' + self.line.strip() bad_line = "xx," + self.line.strip()
self.assertFalse(parser.maybe_mine(bad_line)) assert not parser.maybe_mine(bad_line)
def test_record_format(self): def test_record_format(self):
parser = self._cut() parser = self._cut()
record = parser.make_frame(self.line) record = parser.make_frame(self.line)
self.assertIn('stamp', record) assert "stamp" in record
self.assertEqual(record['stamp'], datetime(1970, 1, 1)) assert record["stamp"] == datetime(1970, 1, 1)
def test_record_supports_v1_and_v2(self): def test_record_supports_v1_and_v2(self):
parser = self._cut() parser = self._cut()
parser.make_frame(self.line) parser.make_frame(self.line)
parser.make_frame(self.line.strip() + ',999\n') parser.make_frame(self.line.strip() + ",999\n")
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf8
"""Test basic NetCDF generation.""" """Test basic NetCDF generation."""
import os import os
from datetime import datetime from datetime import datetime
import numpy as np import numpy as np
def get_nc_schema_database(fields=None): def get_nc_schema_database(fields=None):
"""Get a version of the NetCDF schema that mimics the nc script.""" """Get a version of the NetCDF schema that mimics the nc script."""
from aosstower import schema from aosstower import schema
if fields is None: if fields is None:
fields = schema.met_vars fields = schema.met_vars
mini_database = {k: schema.database_dict[k] for k in fields} mini_database = {k: schema.database_dict[k] for k in fields}
...@@ -18,19 +19,23 @@ def get_nc_schema_database(fields=None): ...@@ -18,19 +19,23 @@ def get_nc_schema_database(fields=None):
def test_nc_basic1(tmpdir): def test_nc_basic1(tmpdir):
"""Test basic usage of the NetCDF generation.""" """Test basic usage of the NetCDF generation."""
from netCDF4 import Dataset
from aosstower.level_b1.nc import create_giant_netcdf from aosstower.level_b1.nc import create_giant_netcdf
from aosstower.tests.utils import get_cached_level_00 from aosstower.tests.utils import get_cached_level_00
from netCDF4 import Dataset
input_files = list(get_cached_level_00(num_files=2)) input_files = list(get_cached_level_00(num_files=2))
nc_out = tmpdir.join('test.nc') nc_out = tmpdir.join("test.nc")
create_giant_netcdf( create_giant_netcdf(
input_files, str(nc_out), True, None, input_files,
str(nc_out),
True,
None,
start=datetime(2020, 1, 2, 0, 0, 0), start=datetime(2020, 1, 2, 0, 0, 0),
interval_width='1T', interval_width="1T",
database=get_nc_schema_database(), database=get_nc_schema_database(),
) )
assert os.path.isfile(nc_out) assert os.path.isfile(nc_out)
with Dataset(nc_out, 'r') as nc: with Dataset(nc_out, "r") as nc:
sflux = nc['solar_flux'][:] sflux = nc["solar_flux"][:]
assert np.count_nonzero(sflux.mask) == 2 assert np.count_nonzero(sflux.mask) == 2
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment