Skip to content

Commit

Permalink
updating cummax and cummin testing
Browse files Browse the repository at this point in the history
  • Loading branch information
katharinastarzer21 committed Nov 21, 2024
1 parent 19fce15 commit 41124f9
Show file tree
Hide file tree
Showing 3 changed files with 75 additions and 9 deletions.
3 changes: 3 additions & 0 deletions openeo_processes_dask/process_implementations/math.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,9 @@ def cumproduct(data, ignore_nodata=True, axis=None):


def cummin(data, ignore_nodata=True):
if isinstance(data, da.Array):
data = data.compute()

nan_mask = np.isnan(data)

if ignore_nodata:
Expand Down
65 changes: 64 additions & 1 deletion tests/test_apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ def test_apply_kernel(temporal_interval, bounding_box, random_raster_data):
xr.testing.assert_equal(output_cube, input_cube)


# TODO: testing cummin, cummax
# TODO: testing cummin


@pytest.mark.parametrize("size", [(6, 5, 30, 4)])
Expand Down Expand Up @@ -328,3 +328,66 @@ def test_apply_dimension_cumproduct_process(
cumprod_total = np.sum(cumprod_data)

assert cumprod_total >= original_abs_prod


@pytest.mark.parametrize("size", [(6, 5, 30, 4)])
@pytest.mark.parametrize("dtype", [np.float32])
def test_apply_dimension_cummax_process(
temporal_interval, bounding_box, random_raster_data, process_registry
):
input_cube = create_fake_rastercube(
data=random_raster_data,
spatial_extent=bounding_box,
temporal_extent=temporal_interval,
bands=["B02", "B03", "B04", "B08"],
backend="dask",
)

_process_cummax = partial(
process_registry["cummax"].implementation,
data=ParameterReference(from_parameter="data"),
)

output_cube_cummax = apply_dimension(
data=input_cube,
process=_process_cummax,
dimension="t",
).compute()

original_abs_max = np.max(input_cube.data, axis=0)
cummax_total = np.max(output_cube_cummax.data, axis=0)

assert np.all(cummax_total >= original_abs_max)


@pytest.mark.parametrize("size", [(6, 5, 30, 4)])
@pytest.mark.parametrize("dtype", [np.float32])
def test_apply_dimension_cummin_process(
temporal_interval, bounding_box, random_raster_data, process_registry
):
input_cube = create_fake_rastercube(
data=random_raster_data,
spatial_extent=bounding_box,
temporal_extent=temporal_interval,
bands=["B02", "B03", "B04", "B08"],
backend="dask",
)

_process_cummin = partial(
process_registry["cummin"].implementation,
data=ParameterReference(from_parameter="data"),
)

output_cube_cummin = apply_dimension(
data=input_cube,
process=_process_cummin,
dimension="t",
).compute()

print(input_cube.data.shape)
print(output_cube_cummin.data.shape)

original_abs_min = np.min(input_cube.data, axis=0)
cummin_total = np.min(output_cube_cummin.data, axis=0)

assert np.all(cummin_total <= original_abs_min)
16 changes: 8 additions & 8 deletions tests/test_math.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,32 +124,32 @@ def test_extrema():


def test_cumproduct():
array_list = [1, 2, 3, np.nan, 4, 5]
result_np = [1, 2, 6, np.nan, 24, 120]
array_list = [1, 2, 3, np.nan, 3, 1]
result_np = [1, 2, 6, np.nan, 18, 18]

result = cumproduct(array_list)
assert np.array_equal(result_np, result, equal_nan=True)


def test_cumsum():
array_list = [1, 2, 3, np.nan, 4, 5]
result_np = [1, 3, 6, np.nan, 10, 15]
array_list = [1, 3, np.nan, 3, 1]
result_np = [1, 4, np.nan, 7, 8]

result = cumsum(array_list)
assert np.array_equal(result_np, result, equal_nan=True)


def test_cummin():
array_list = [4, 3, 3, np.nan, 1, 5]
result_np = [4, 3, 3, np.nan, 1, 1]
array_list = [5, 3, np.nan, 1, 5]
result_np = [5, 3, np.nan, 1, 1]

result = cummin(array_list)
assert np.array_equal(result_np, result, equal_nan=True)


def test_cummax():
array_list = [1, 2, 6, np.nan, 4, 5]
result_np = [1, 2, 6, np.nan, 6, 6]
array_list = [1, 3, np.nan, 5, 1]
result_np = [1, 3, np.nan, 5, 5]

result = cummax(array_list)
assert np.array_equal(result_np, result, equal_nan=True)

0 comments on commit 41124f9

Please sign in to comment.