Unverified Commit 8c1d49da authored by David Hoese's avatar David Hoese Committed by GitHub
Browse files

Merge pull request #307 from djhoese/bugfix-newer-satpy

Fix issues related with newer versions of Satpy and Pyyaml
parents b3e81b08 5af61258
......@@ -55,13 +55,13 @@ def _create_get_open_file_names_mock(returned_files):
def test_wizard_abi_l1b(qtbot, monkeypatch):
"""Test that the open file wizard works all the way through."""
from satpy import DatasetID
from satpy.tests.utils import make_dataid
files = ['OR_ABI-L1b-RadM1-M3C01_G16_s20182541300210_e20182541300267_c20182541300308.nc']
dataset_ids = [
# test that floating point resolutions don't crash
DatasetID(name='C01', resolution=1000.5, calibration='reflectance'),
make_dataid(name='C01', resolution=1000.5, calibration='reflectance'),
# radiance calibrations should be ignored by default
DatasetID(name='C01', resolution=1000.5, calibration='radiance'),
make_dataid(name='C01', resolution=1000.5, calibration='radiance'),
]
# Don't actually talk to Satpy
monkeypatch.setattr('uwsift.view.open_file_wizard.Scene', create_scene(dataset_ids))
......
......@@ -8,7 +8,8 @@ import xarray as xr
import numpy as np
import dask.array as da
from datetime import datetime
from satpy import DatasetID, Scene
from satpy import Scene
from satpy.tests.utils import make_dataid
from pyresample.geometry import AreaDefinition
from uwsift.workspace.importer import available_satpy_readers, SatpyImporter
from uwsift.common import Info, Kind
......@@ -114,7 +115,7 @@ def test_satpy_importer_basic(tmpdir, monkeypatch, mocker):
imp = SatpyImporter(['/test/file.nc'], tmpdir, db_sess,
scene=scn,
reader='abi_l1b',
dataset_ids=[DatasetID(name='C01')])
dataset_ids=[make_dataid(name='C01')])
imp.merge_resources()
assert imp.num_products == 1
products = list(imp.merge_products())
......@@ -153,7 +154,7 @@ def test_satpy_importer_contour_0_360(tmpdir, monkeypatch, mocker):
imp = SatpyImporter(['/test/file.nc'], tmpdir, db_sess,
scene=scn,
reader='grib',
dataset_ids=[DatasetID(name='gh', level=125)])
dataset_ids=[make_dataid(name='gh', level=125)])
imp.merge_resources()
assert imp.num_products == 1
products = list(imp.merge_products())
......
......@@ -86,9 +86,7 @@ def _load_satpy_readers_cache(force_refresh=None):
readers = available_readers(as_dict=True)
# sort list of readers just in case we depend on this in the future
readers = sorted(readers, key=lambda x: x['name'])
# filter out known python objects to simplify YAML serialization
for reader_info in readers:
reader_info.pop('reader')
readers = list(_sanitize_reader_info_for_yaml(readers))
cache_contents = {
'satpy_version': satpy_version,
'readers': readers,
......@@ -97,6 +95,15 @@ def _load_satpy_readers_cache(force_refresh=None):
return cache_contents['readers']
def _sanitize_reader_info_for_yaml(readers):
# filter out known python objects to simplify YAML serialization
for reader_info in readers:
reader_info.pop('reader')
reader_info.pop('data_identification_keys', None)
reader_info['config_files'] = list(reader_info['config_files'])
yield reader_info
def _save_satpy_readers_cache(cache_contents):
"""Write reader cache information to a file on disk."""
cfile_dir = os.path.dirname(SATPY_READER_CACHE_FILE)
......@@ -977,7 +984,8 @@ class SatpyImporter(aImporter):
from uuid import uuid1
scn = self.load_all_datasets()
for ds_id, ds in scn.datasets.items():
for ds_id in scn.keys():
ds = scn[ds_id]
# don't recreate a Product for one we already have
if ds_id in existing_ids:
yield existing_ids[ds_id]
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment