Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Reduce test warnings #202

Merged
merged 20 commits into from
Sep 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion continuous_integration/environment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ dependencies:
- pytest
- pytest-cov
- pyyaml
- dpath
- dpath>=2.1.0
- trollsift
- numpy
- satpy>=0.32.0
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
pass


install_requires = ['pyyaml', 'dpath', 'trollsift', 'posttroll>=1.10.0']
install_requires = ['pyyaml', 'dpath>=2.1.0', 'trollsift', 'posttroll>=1.10.0']

if "test" not in sys.argv:
install_requires += ['satpy>=0.32.0', 'pyorbital']
Expand Down
9 changes: 2 additions & 7 deletions trollflow2/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,9 @@
# are not necessary
"""Base module for trollflow2."""

from importlib.metadata import version
from multiprocessing import Manager

from pkg_resources import DistributionNotFound, get_distribution

MP_MANAGER = Manager()

try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
pass
__version__ = version(__name__)
6 changes: 3 additions & 3 deletions trollflow2/dict_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
# are not necessary
"""Tools for product list operations."""

import dpath.util
import dpath


def plist_iter(product_list, base_mda=None, level=None):
Expand Down Expand Up @@ -88,11 +88,11 @@ def get_config_value(config, path, key, default=None):
num = len(path_parts)
for i in range(num, 1, -1):
pwd = "/".join(path_parts[:i] + [key])
vals = dpath.util.values(config, pwd)
vals = dpath.values(config, pwd)
if len(vals) > 0:
return vals[0]

vals = dpath.util.values(config, "/common/" + key)
vals = dpath.values(config, "/common/" + key)
if len(vals) > 0:
return vals[0]

Expand Down
24 changes: 11 additions & 13 deletions trollflow2/plugins/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,14 @@

import dask
import dask.array as da
import dpath.util
import dpath
import rasterio
from dask.delayed import Delayed
from posttroll.message import Message
from posttroll.publisher import create_publisher_from_dict_config
from pyorbital.astronomy import sun_zenith_angle
from pyresample.area_config import AreaNotFound
from pyresample.boundary import AreaDefBoundary, Boundary
from pyresample.boundary import Boundary
from pyresample.geometry import get_geostationary_bounding_box
from rasterio.enums import Resampling
from satpy import Scene
Expand Down Expand Up @@ -92,8 +92,6 @@ def create_scene(job):

def load_composites(job):
"""Load composites given in the job's product_list."""
# composites = set().union(*(set(d.keys())
# for d in dpath.util.values(job['product_list'], '/product_list/areas/*/products')))
composites_by_res = {}
for flat_prod_cfg, _prod_cfg in plist_iter(job['product_list']['product_list'], level='product'):
res = flat_prod_cfg.get('resolution', DEFAULT)
Expand Down Expand Up @@ -649,7 +647,7 @@ def _check_overall_coverage_for_area(
"Area coverage %.2f %% below threshold %.2f %%",
cov, min_coverage)
logger.info("Removing area %s from the worklist", area)
dpath.util.delete(product_list, area_path)
dpath.delete(product_list, area_path)

else:
logger.debug(f"Area coverage {cov:.2f}% above threshold "
Expand Down Expand Up @@ -686,7 +684,7 @@ def check_metadata(job):
key)
continue
if key == 'start_time':
time_diff = dt.datetime.utcnow() - mda[key]
time_diff = dt.datetime.now(dt.timezone.utc) - mda[key]
if time_diff > abs(dt.timedelta(minutes=val)):
age = "older" if val < 0 else "newer"
raise AbortProcessing(
Expand Down Expand Up @@ -748,7 +746,7 @@ def sza_check(job):
if sunzen < limit:
logger.info("Sun zenith angle too small for nighttime "
"product '%s', product removed.", product)
dpath.util.delete(product_list, prod_path)
dpath.delete(product_list, prod_path)
continue

# Check daytime limit
Expand All @@ -758,12 +756,12 @@ def sza_check(job):
if sunzen > limit:
logger.info("Sun zenith angle too large for daytime "
"product '%s', product removed.", product)
dpath.util.delete(product_list, prod_path)
dpath.delete(product_list, prod_path)
continue

if len(product_list['product_list']['areas'][area]['products']) == 0:
logger.info("Removing empty area: %s", area)
dpath.util.delete(product_list, '/product_list/areas/%s' % area)
dpath.delete(product_list, '/product_list/areas/%s' % area)


def check_sunlight_coverage(job):
Expand Down Expand Up @@ -843,22 +841,22 @@ def check_sunlight_coverage(job):
logger.info("Not enough sunlight coverage for "
f"product '{product!s}', removed. Needs at least "
f"{min_day:.1f}%, got {coverage[check_pass]:.1%}.")
dpath.util.delete(product_list, prod_path)
dpath.delete(product_list, prod_path)
if max_day is not None and coverage[check_pass] > (max_day / 100.0):
logger.info("Too much sunlight coverage for "
f"product '{product!s}', removed. Needs at most "
f"{max_day:.1f}%, got {coverage[check_pass]:.1%}.")
dpath.util.delete(product_list, prod_path)
dpath.delete(product_list, prod_path)


def _get_sunlight_coverage(area_def, start_time, overpass=None):
"""Get the sunlight coverage of *area_def* at *start_time* as a value between 0 and 1."""
if area_def.proj_dict.get('proj') == 'geos':
if area_def.is_geostationary:
adp = Boundary(
*get_geostationary_bounding_box(area_def,
nb_points=100)).contour_poly
else:
adp = AreaDefBoundary(area_def, frequency=100).contour_poly
adp = area_def.boundary(vertices_per_side=100).contour_poly
poly = get_twilight_poly(start_time)
if overpass is not None:
ovp = overpass.boundary.contour_poly
Expand Down
28 changes: 16 additions & 12 deletions trollflow2/tests/test_trollflow2.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@
!!python/object:trollflow2.plugins.FilePublisher {port: 40002, nameservers: [localhost]}
"""

SCENE_START_TIME = dt.datetime.utcnow()
SCENE_START_TIME = dt.datetime.now(dt.timezone.utc)
SCENE_END_TIME = SCENE_START_TIME + dt.timedelta(minutes=15)
JOB_INPUT_MDA_START_TIME = SCENE_START_TIME + dt.timedelta(seconds=10)

Expand Down Expand Up @@ -712,14 +712,18 @@ def test_save_datasets_callback(tmp_path, caplog, fake_scene):

def testlog(obj, targs, job, fmat_config):
"""Toy function doing some logging."""
import warnings

filename = fmat_config["filename"]
# ensure computation has indeed completed and file was flushed
p = pathlib.Path(filename)
logger.info(f"Wrote {filename} successfully, {p.stat().st_size:d} bytes")
assert p.exists()
with rasterio.open(filename) as src:
arr = src.read(1)
assert arr[5, 5] == 142
with warnings.catch_warnings():
warnings.filterwarnings("ignore", message="Dataset has no geotransform")
with rasterio.open(filename) as src:
arr = src.read(1)
assert arr[5, 5] == 142
return obj

form = [
Expand Down Expand Up @@ -1110,21 +1114,20 @@ def setUp(self):
def test_coverage(self):
"""Test sunlight coverage."""
from trollflow2.plugins import _get_sunlight_coverage
with mock.patch('trollflow2.plugins.AreaDefBoundary') as area_def_boundary, \
mock.patch('trollflow2.plugins.Boundary') as boundary, \
with mock.patch('trollflow2.plugins.Boundary') as boundary, \
mock.patch('trollflow2.plugins.get_twilight_poly'), \
mock.patch('trollflow2.plugins.get_area_def'), \
mock.patch('trollflow2.plugins.get_geostationary_bounding_box'):

area_def_boundary.return_value.contour_poly.intersection.return_value.area.return_value = 0.02
boundary.return_value.contour_poly.intersection.return_value.area.return_value = 0.02
area_def_boundary.return_value.contour_poly.area.return_value = 0.2
adef = mock.MagicMock(is_geostationary=False)
adef.boundary.return_value.contour_poly.intersection.return_value.area.return_value = 0.02
adef.boundary.return_value.contour_poly.area.return_value = 0.2
start_time = dt.datetime(2019, 4, 7, 20, 8)
adef = mock.MagicMock(proj_dict={'proj': 'stere'})
res = _get_sunlight_coverage(adef, start_time)
np.testing.assert_allclose(res, 0.1)
boundary.assert_not_called()
adef = mock.MagicMock(proj_dict={'proj': 'geos'})
adef = mock.MagicMock(is_geostationary=True)
res = _get_sunlight_coverage(adef, start_time)
boundary.assert_called()

Expand Down Expand Up @@ -1552,7 +1555,7 @@ def test_discard_old_data(self):
from trollflow2.plugins import AbortProcessing, check_metadata
with mock.patch('trollflow2.plugins.get_config_value') as get_config_value:
get_config_value.return_value = None
job = {'product_list': None, 'input_mda': {'start_time': dt.datetime(2020, 3, 18)}}
job = {'product_list': None, 'input_mda': {'start_time': dt.datetime(2020, 3, 18, tzinfo=dt.timezone.utc)}}
assert check_metadata(job) is None
get_config_value.return_value = {'start_time': -20e6}
assert check_metadata(job) is None
Expand All @@ -1564,7 +1567,8 @@ def test_discard_new_data(self):
"""Test that new data are discarded."""
from trollflow2.plugins import AbortProcessing, check_metadata
with mock.patch('trollflow2.plugins.get_config_value') as get_config_value:
job = {'product_list': None, 'input_mda': {'start_time': dt.datetime.utcnow() - dt.timedelta(minutes=90)}}
job = {'product_list': None,
'input_mda': {'start_time': dt.datetime.now(dt.timezone.utc) - dt.timedelta(minutes=90)}}
get_config_value.return_value = {'start_time': +60}
with self.assertRaises(AbortProcessing):
check_metadata(job)
Expand Down