-
David Hoese authoredDavid Hoese authored
file_responses.py 13.32 KiB
"""Configuration for file-based API responses."""
from collections import defaultdict
from pathlib import Path
from typing import TypedDict
from metobsapi.util import FREQUENCY_DIR_FMT, ProductFrequency
# ARM Data Levels: https://www.arm.gov/policies/datapolicies/formatting-and-file-naming-protocols
L00_DESCRIPTION = "raw data - primary raw data stream collected directly from instrument"
LA0_DESCRIPTION = "converted to netCDF"
L11_DESCRIPTION = "calibration factors applied and converted to geophysical units"
LB1_DESCRIPTION = "QC checks applied to measurements"
class InstInfo(TypedDict):
display_name: str
levels: dict[str, dict]
class SiteInfo(TypedDict):
display_name: str
instruments: dict[str, InstInfo]
# TODO: Load from config file
ARCHIVE_INFO: dict[str, SiteInfo] = {
"aoss": {
"display_name": "AOSS",
"instruments": {
"tower": {
"display_name": "Tower",
"levels": {
"level_00": {
"description": L00_DESCRIPTION,
"versions": ("version_00",),
"products": {
"ascii": {
"frequency": ProductFrequency.DAILY_FILE,
"pattern": "aoss_tower.%Y-%m-%d.ascii",
"display_name": "Daily CSV (aoss_tower.YYYY-MM-DD.ascii)",
},
},
},
"level_b1": {
"description": LB1_DESCRIPTION,
"versions": ("version_00",),
"products": {
# 'nc-monthly': {
# },
"nc-daily": {
"frequency": ProductFrequency.DAILY_FILE,
"pattern": "aoss_tower.%Y-%m-%d.nc",
"display_name": "Daily NetCDF file (aoss_tower.YYYY-MM-DD.nc)",
"preview_product": "meteorogram-daily",
},
"meteorogram-daily": {
"frequency": ProductFrequency.DAILY_FILE,
"pattern": "aoss_tower.meteorogram.%Y-%m-%d.png",
"thumbnail_pattern": "aoss_tower.meteorogram.%Y-%m-%d_thumbnail.png",
"display_name": "Daily Meteorogram (aoss_tower.meteorogram.YYYY-MM-DD.png)",
},
"td-daily": {
"frequency": ProductFrequency.DAILY_FILE,
"pattern": "aoss_tower.td.%Y-%m-%d.png",
"thumbnail_pattern": "aoss_tower.td.%Y-%m-%d_thumbnail.png",
"display_name": "Daily Air and Dewpoint Temperature (aoss_tower.td.YYYY-MM-DD.png)",
},
"pressure-daily": {
"frequency": ProductFrequency.DAILY_FILE,
"pattern": "aoss_tower.pressure.%Y-%m-%d.png",
"thumbnail_pattern": "aoss_tower.pressure.%Y-%m-%d_thumbnail.png",
"display_name": "Daily Pressure (aoss_tower.pressure.YYYY-MM-DD.png)",
},
"wind-speed-daily": {
"frequency": ProductFrequency.DAILY_FILE,
"pattern": "aoss_tower.wind_speed.%Y-%m-%d.png",
"thumbnail_pattern": "aoss_tower.wind_speed.%Y-%m-%d_thumbnail.png",
"display_name": "Daily Wind Speed (aoss_tower.wind_speed.YYYY-MM-DD.png)",
},
"wind-dir-daily": {
"frequency": ProductFrequency.DAILY_FILE,
"pattern": "aoss_tower.wind_dir.%Y-%m-%d.png",
"thumbnail_pattern": "aoss_tower.wind_dir.%Y-%m-%d_thumbnail.png",
"display_name": "Daily Wind Direction (aoss_tower.wind_dir.YYYY-MM-DD.png)",
},
"accum-precip-daily": {
"frequency": ProductFrequency.DAILY_FILE,
"pattern": "aoss_tower.accum_precip.%Y-%m-%d.png",
"thumbnail_pattern": "aoss_tower.accum_precip.%Y-%m-%d_thumbnail.png",
"display_name": "Daily Accumulated Precipitation "
"(aoss_tower.accum_precip.YYYY-MM-DD.png)",
},
"solar-flux-daily": {
"frequency": ProductFrequency.DAILY_FILE,
"pattern": "aoss_tower.solar_flux.%Y-%m-%d.png",
"thumbnail_pattern": "aoss_tower.solar_flux.%Y-%m-%d_thumbnail.png",
"display_name": "Daily Solar Flux (aoss_tower.solar_flux.YYYY-MM-DD.png)",
},
},
},
},
},
"aeri": {
"display_name": "AERI",
"levels": {
"level_00": {
"description": L00_DESCRIPTION,
"versions": ("version_00",),
"products": {
"par": {
"frequency": ProductFrequency.DAILY_DIR,
"pattern": "%y%m%d.PAR",
},
"qc": {
"frequency": ProductFrequency.DAILY_DIR,
"pattern": "%y%m%d.QC",
},
"sum": {
"frequency": ProductFrequency.DAILY_DIR,
"pattern": "%y%m%d.SUM",
},
"scr-aesitter": {
"frequency": ProductFrequency.DAILY_DIR,
"pattern": "AESITTER.SCR",
},
"scr-radiance": {
"frequency": ProductFrequency.DAILY_DIR,
"pattern": "RADIANCE.SCR",
},
"scr-summary": {
"frequency": ProductFrequency.DAILY_DIR,
"pattern": "SUMMARY.SCR",
},
},
},
},
},
"ceilo": {
"display_name": "Ceilometer",
"levels": {
"level_00": {
"description": L00_DESCRIPTION,
"versions": ("version_00",),
"products": {
"ascii": {
"frequency": ProductFrequency.DAILY_FILE,
"pattern": "aoss_ceilo.%Y-%m-%d.ascii",
"display_name": "Daily CSV (aoss_ceilo.YYYY-MM-DD.ascii)",
},
},
},
"level_b1": {
"description": LB1_DESCRIPTION,
"versions": ("version_00",),
"products": {
# 'nc-monthly': {
# },
"nc-daily": {
"frequency": ProductFrequency.DAILY_FILE,
"pattern": "aoss_ceilo.%Y-%m-%d.nc",
"display_name": "Daily NetCDF file (aoss_ceilo.YYYY-MM-DD.nc)",
"preview_product": "meteorogram-daily",
},
"backscatter-daily": {
"frequency": ProductFrequency.DAILY_FILE,
"pattern": "aoss_ceilo.%Y-%m-%d_000000_235959.png",
"thumbnail_pattern": "aoss_ceilo.%Y-%m-%d_000000_235959_tn.png",
"display_name": "Daily Backscatter (aoss_ceilo.YYYY-MM-DD.png)",
},
},
},
},
},
},
},
"mendota": {
"display_name": "Mendota",
"instruments": {},
},
}
# Add the other AERI file types
for file_suffix in (
"B1.CXS",
"B1.UVS",
"B2.CXS",
"B2.UVS",
"C1.RNC",
"C2.RNC",
"F1.CSV",
"F1.CXS",
"F1.UVS",
"F2.CSV",
"F2.CXS",
"F2.UVS",
):
parts = file_suffix.split(".")
product_id = parts[1].lower() + "-" + parts[0].lower()
nfo = {
"frequency": ProductFrequency.DAILY_DIR,
"pattern": f"%y%m%d{file_suffix}",
}
ARCHIVE_INFO["aoss"]["instruments"]["aeri"]["levels"]["level_00"]["products"][product_id] = nfo
# Create stream_id -> stream_info mapping
ARCHIVE_STREAMS: dict[str, dict | list[str]] = {}
# Also create <site>.<inst> -> [stream_id, ...]
INSTRUMENT_STREAMS = defaultdict(list)
stream_id_fmt = "{site}.{inst}.{product}.{level}.{version}"
for site, site_info in ARCHIVE_INFO.items():
for inst, inst_info in site_info["instruments"].items():
inst_name = site + "." + inst
for level, level_info in inst_info["levels"].items():
for version in level_info["versions"]:
all_products = []
for product_id, pattern_info in level_info["products"].items():
stream_id = stream_id_fmt.format(
site=site,
inst=inst,
level=level.replace("level_", "l"),
product=product_id,
version=version.replace("version_", "v"),
)
all_products.append(stream_id)
path = (
Path(site)
/ inst
/ level
/ version
/ FREQUENCY_DIR_FMT[pattern_info["frequency"]]
/ pattern_info["pattern"]
)
stream_info = {
"relpath": path,
"site": site,
"inst": inst,
"level": level,
"version": version,
"file_pattern": pattern_info["pattern"],
}
if "thumbnail_pattern" in pattern_info:
stream_info["thumbnail"] = (
Path(site)
/ inst
/ level
/ version
/ FREQUENCY_DIR_FMT[pattern_info["frequency"]]
/ pattern_info["thumbnail_pattern"]
)
else:
stream_info["thumbnail"] = None
ARCHIVE_STREAMS[stream_id] = stream_info
INSTRUMENT_STREAMS[inst_name].append(stream_id)
# Special 'most recent' version stream_id
if version == level_info["versions"][0]:
recent_stream_id = stream_id_fmt.format(
site=site,
inst=inst,
level=level.replace("level_", "l"),
product=product_id,
version="*",
)
ARCHIVE_STREAMS[recent_stream_id] = ARCHIVE_STREAMS[stream_id]
INSTRUMENT_STREAMS[inst_name].append(recent_stream_id)
all_patterns_id = stream_id_fmt.format(
site=site,
inst=inst,
level=level.replace("level_", "l"),
product="*",
version=version.replace("version_", "v"),
)
ARCHIVE_STREAMS[all_patterns_id] = all_products
INSTRUMENT_STREAMS[inst_name].append(all_patterns_id)
if version == level_info["versions"][0]:
all_patterns_recent_id = all_patterns_id.replace(version.replace("version_", "v"), "*")
ARCHIVE_STREAMS[all_patterns_recent_id] = all_products
INSTRUMENT_STREAMS[inst_name].append(all_patterns_recent_id)
ERROR_MESSAGES = {
"datetime_error": (400, "missing begin or end time parameters"),
"malformed_timestamp": (400, "could not parse timestamp parameters 'begin' or 'end', check format"),
"missing_inst": (400, "missing or unknown inst parameter"),
"missing_site": (400, "missing or unknown site parameter"),
"missing_level": (400, "missing or unknown level parameter"),
"missing_streams": (400, "missing or unknown stream pattern parameter"),
"missing_version": (400, "missing or unknown version parameter"),
"unknown_stream": (400, "unknown stream ID, expected 'site'.'inst'.'level=lXX'.'pattern'.'versionXX or \"*\"'"),
}