Skip to content

Commit

Permalink
Merge pull request #229 from davidh-ssec/bugfix-mod-with-wl-dep
Browse files Browse the repository at this point in the history
Fix bug in dep tree when modifier deps are modified wavelengths
  • Loading branch information
djhoese authored Mar 17, 2018
2 parents 547819c + a948947 commit f19ae9b
Show file tree
Hide file tree
Showing 4 changed files with 78 additions and 2 deletions.
30 changes: 28 additions & 2 deletions satpy/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

import logging

from satpy import DatasetDict, DatasetID
from satpy import DatasetDict, DatasetID, DATASET_KEYS

LOG = logging.getLogger(__name__)

Expand Down Expand Up @@ -320,6 +320,27 @@ def _get_compositor_prereqs(self, parent, prereq_names, skip=False,
self.add_child(parent, n)
return prereq_ids, unknowns

def _update_modifier_key(self, orig_key, dep_key):
"""Update a key based on the dataset it will modified (dep).
Typical use case is requesting a modified dataset (orig_key). This
modified dataset most likely depends on a less-modified
dataset (dep_key). The less-modified dataset must come from a reader
(at least for now) or will eventually depend on a reader dataset.
The original request key may be limited like
(wavelength=0.67, modifiers=('a', 'b')) while the reader-based key
should have all of its properties specified. This method updates the
original request key so it is fully specified and should reduce the
chance of Node's not being unique.
"""
orig_dict = orig_key._asdict()
dep_dict = dep_key._asdict()
# don't change the modifiers
for k in DATASET_KEYS[:-1]:
orig_dict[k] = dep_dict[k]
return DatasetID.from_dict(orig_dict)

def _find_compositor(self, dataset_key, calibration=None,
polarization=None, resolution=None):
"""Find the compositor object for the given dataset_key."""
Expand All @@ -330,7 +351,12 @@ def _find_compositor(self, dataset_key, calibration=None,
if isinstance(dataset_key, DatasetID) and dataset_key.modifiers:
new_prereq = DatasetID(
*dataset_key[:-1] + (dataset_key.modifiers[:-1],))
src_node, u = self._find_dependencies(new_prereq, calibration, polarization, resolution)
src_node, u = self._find_dependencies(new_prereq, calibration,
polarization, resolution)
# Update the requested DatasetID with information from the src
if src_node is not None:
dataset_key = self._update_modifier_key(dataset_key,
src_node.name)
if u:
return None, u

Expand Down
1 change: 1 addition & 0 deletions satpy/scene.py
Original file line number Diff line number Diff line change
Expand Up @@ -617,6 +617,7 @@ def unload(self, keepables=None):
if ds_id not in self.wishlist and (not keepables or ds_id
not in keepables)]
for ds_id in to_del:
LOG.debug("Unloading dataset: %r", ds_id)
del self.datasets[ds_id]

def load(self,
Expand Down
48 changes: 48 additions & 0 deletions satpy/tests/test_scene.py
Original file line number Diff line number Diff line change
Expand Up @@ -1193,6 +1193,54 @@ def test_no_generate_comp10(self, cri, cl):
self.assertIn('comp10', scene.datasets)
self.assertEqual(len(scene.missing_datasets), 0)

@mock.patch('satpy.composites.CompositorLoader.load_compositors')
@mock.patch('satpy.scene.Scene.create_reader_instances')
def test_modified_with_wl_dep(self, cri, cl):
"""Test modifying a dataset with a modifier with modified deps.
More importantly test that loading the modifiers dependency at the
same time as the original modified dataset that the dependency tree
nodes are unique and that DatasetIDs.
"""
import satpy.scene
from satpy.tests.utils import create_fake_reader, test_composites
from satpy import DatasetID
cri.return_value = {'fake_reader': create_fake_reader(
'fake_reader', 'fake_sensor')}
comps, mods = test_composites('fake_sensor')
cl.return_value = (comps, mods)
scene = satpy.scene.Scene(filenames=['bla'],
base_dir='bli',
reader='fake_reader')

# Check dependency tree nodes
# initialize the dep tree without loading the data
ds1_mod_id = DatasetID(name='ds1', modifiers=('mod_wl',))
ds3_mod_id = DatasetID(name='ds3', modifiers=('mod_wl',))
scene.dep_tree.find_dependencies({ds1_mod_id, ds3_mod_id})
ds1_mod_node = scene.dep_tree[ds1_mod_id]
ds3_mod_node = scene.dep_tree[ds3_mod_id]
ds1_mod_dep_node = ds1_mod_node.data[1][1]
ds3_mod_dep_node = ds3_mod_node.data[1][1]
# mod_wl depends on the this node:
ds6_modded_node = scene.dep_tree[DatasetID(name='ds6', modifiers=('mod1',))]
# this dep should be full qualified with name and wavelength
self.assertIsNotNone(ds6_modded_node.name.name)
self.assertIsNotNone(ds6_modded_node.name.wavelength)
self.assertEqual(len(ds6_modded_node.name.wavelength), 3)
# the node should be shared between everything that uses it
self.assertIs(ds1_mod_dep_node, ds3_mod_dep_node)
self.assertIs(ds1_mod_dep_node, ds6_modded_node)

# it is fine that an optional prereq doesn't exist
scene.load([ds1_mod_id, ds3_mod_id])

loaded_ids = list(scene.datasets.keys())
self.assertEqual(len(loaded_ids), 2)
self.assertIn(ds1_mod_id, scene.datasets)
self.assertIn(ds3_mod_id, scene.datasets)


class TestSceneResampling(unittest.TestCase):

Expand Down
1 change: 1 addition & 0 deletions satpy/tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,7 @@ def test_composites(sensor_name):
'mod_opt_prereq': (['ds1'], ['ds2']),
'mod_bad_opt': (['ds1'], ['ds9_fail_load']),
'mod_opt_only': ([], ['ds2']),
'mod_wl': ([DatasetID(wavelength=0.2, modifiers=('mod1',))], []),
}

comps = {sensor_name: DatasetDict((k, _create_fake_compositor(k, *v)) for k, v in comps.items())}
Expand Down

0 comments on commit f19ae9b

Please sign in to comment.