Skip to content

Commit

Permalink
test
Browse files Browse the repository at this point in the history
  • Loading branch information
Henry Isaacson committed May 31, 2024
1 parent ffb7f57 commit 95f59c9
Show file tree
Hide file tree
Showing 14 changed files with 291 additions and 208 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def _cell_dimensions(
box_size = float(box_size)

if box_size < minimum_cell_size:
raise ValueError('Box size must be at least as large as minimum cell size.')
raise ValueError("Box size must be at least as large as minimum cell size.")

if isinstance(box_size, Tensor):
if box_size.dtype in {torch.int32, torch.int64}:
Expand All @@ -36,7 +36,9 @@ def _cell_dimensions(

for cells in flattened_cells_per_side:
if cells.item() < 3:
raise ValueError('Box must be at least 3x the size of the grid spacing in each dimension.')
raise ValueError(
"Box must be at least 3x the size of the grid spacing in each dimension."
)

cell_count = functools.reduce(
operator.mul,
Expand All @@ -45,16 +47,18 @@ def _cell_dimensions(
)

elif box_size.dim() == 0:
cell_count = cells_per_side ** spatial_dimension
cell_count = cells_per_side**spatial_dimension

else:
raise ValueError(f'Box must be either: a scalar, a vector, or a matrix. Found {box_size}.')
raise ValueError(
f"Box must be either: a scalar, a vector, or a matrix. Found {box_size}."
)

else:
cells_per_side = math.floor(box_size / minimum_cell_size)

cell_size = box_size / cells_per_side

cell_count = cells_per_side ** spatial_dimension
cell_count = cells_per_side**spatial_dimension

return box_size, cell_size, cells_per_side, int(cell_count)
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,3 @@ def _cell_size(box: Tensor, minimum_unit_size: Tensor) -> Tensor:

else:
raise ValueError("Box and minimum unit size must be of the same shape.")

Original file line number Diff line number Diff line change
Expand Up @@ -33,18 +33,15 @@ def _hash_constants(spatial_dimensions: int, cells_per_side: Tensor) -> Tensor:
If the size of `cells_per_side` is not zero or `spatial_dimensions`.
"""
if cells_per_side.numel() == 1:
constants = [
cells_per_side ** dim for dim in range(spatial_dimensions)
]
constants = [cells_per_side**dim for dim in range(spatial_dimensions)]
return torch.tensor([constants], dtype=torch.int32)

elif cells_per_side.numel() == spatial_dimensions:
one = torch.tensor([[1]], dtype=torch.int32)
cells_per_side = torch.cat(
(one, cells_per_side[:-1].unsqueeze(0)),
dim=1
)
cells_per_side = torch.cat((one, cells_per_side[:-1].unsqueeze(0)), dim=1)
return torch.cumprod(cells_per_side.flatten(), dim=0)

else:
raise ValueError("Cells per side must either: have 0 dimensions, be the same size as spatial dimensions.")
raise ValueError(
"Cells per side must either: have 0 dimensions, be the same size as spatial dimensions."
)
40 changes: 21 additions & 19 deletions src/beignet/func/_molecular_dynamics/_partition/__segment_sum.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,31 +12,33 @@ def _segment_sum(
**kwargs,
) -> Tensor:
"""
Computes the sum of segments of a tensor along the first dimension.
Parameters
----------
input : Tensor
A tensor containing the input values to be summed.
indexes : Tensor
A 1D tensor containing the segment indexes for summation.
Should have the same length as the first dimension of the `input` tensor.
n : Optional[int], optional
The number of segments, by default `n` is set to `max(indexes) + 1`.
Returns
-------
Tensor
A tensor where each entry contains the sum of the corresponding segment
from the `input` tensor.
"""
Computes the sum of segments of a tensor along the first dimension.
Parameters
----------
input : Tensor
A tensor containing the input values to be summed.
indexes : Tensor
A 1D tensor containing the segment indexes for summation.
Should have the same length as the first dimension of the `input` tensor.
n : Optional[int], optional
The number of segments, by default `n` is set to `max(indexes) + 1`.
Returns
-------
Tensor
A tensor where each entry contains the sum of the corresponding segment
from the `input` tensor.
"""
if indexes.ndim == 1:
indexes = torch.repeat_interleave(indexes, math.prod([*input.shape[1:]])).view(
*[indexes.shape[0], *input.shape[1:]]
)

if input.size(0) != indexes.size(0):
raise ValueError("The length of the indexes tensor must match the size of the first dimension of the input tensor.")
raise ValueError(
"The length of the indexes tensor must match the size of the first dimension of the input tensor."
)

if n is None:
n = indexes.max().item() + 1
Expand Down
1 change: 0 additions & 1 deletion src/beignet/func/_molecular_dynamics/_partition/__shift.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,3 @@ def _shift(a: Tensor, b: Tensor) -> Tensor:
"""

return torch.roll(a, shifts=tuple(b), dims=tuple(range(len(b))))

26 changes: 17 additions & 9 deletions tests/beignet/func/test__cell_dimensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@
import torch
from torch import Tensor

from beignet.func._molecular_dynamics._partition.__cell_dimensions import \
_cell_dimensions
from beignet.func._molecular_dynamics._partition.__cell_dimensions import (
_cell_dimensions,
)


@st.composite
Expand All @@ -19,8 +20,11 @@ def _cell_dimensions_strategy(draw):
st.floats(min_value=3.0, max_value=10.0),
st.lists(
st.floats(min_value=3.0, max_value=10.0),
min_size=spatial_dimension, max_size=spatial_dimension
).map(torch.tensor).map(lambda x: x.float()),
min_size=spatial_dimension,
max_size=spatial_dimension,
)
.map(torch.tensor)
.map(lambda x: x.float()),
)
)

Expand All @@ -39,7 +43,9 @@ def _cell_dimensions_strategy(draw):
(0, torch.tensor([100]), 10.0, AssertionError),
],
)
def test_cell_dimensions_exceptions(spatial_dimension, box_size, minimum_cell_size, expected_exception):
def test_cell_dimensions_exceptions(
spatial_dimension, box_size, minimum_cell_size, expected_exception
):
if expected_exception is not None:
with pytest.raises(expected_exception):
_cell_dimensions(spatial_dimension, box_size, minimum_cell_size)
Expand All @@ -64,9 +70,7 @@ def test__cell_dimensions(data):
return

box_size_out, cell_size, cells_per_side, cell_count = _cell_dimensions(
spatial_dimension,
box_size,
minimum_cell_size
spatial_dimension, box_size, minimum_cell_size
)

if isinstance(box_size, (int, float)):
Expand All @@ -79,6 +83,10 @@ def test__cell_dimensions(data):

torch.testing.assert_allclose(box_size / cells_per_side.float(), cell_size)

expected_cell_count = int(torch.prod(cells_per_side).item()) if isinstance(cells_per_side, Tensor) else int(cells_per_side ** spatial_dimension)
expected_cell_count = (
int(torch.prod(cells_per_side).item())
if isinstance(cells_per_side, Tensor)
else int(cells_per_side**spatial_dimension)
)

assert cell_count == expected_cell_count
21 changes: 15 additions & 6 deletions tests/beignet/func/test__cell_size.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,31 @@
import pytest
from hypothesis import given

from beignet.func._molecular_dynamics._partition.__cell_size import \
_cell_size
from beignet.func._molecular_dynamics._partition.__cell_size import _cell_size


@st.composite
def _cell_size_strategy(draw):
shape = draw(st.integers(min_value=1, max_value=10))

box = torch.tensor(
draw(st.lists(st.floats(min_value=1.0, max_value=100.0), min_size=shape, max_size=shape)),
dtype=torch.float32
draw(
st.lists(
st.floats(min_value=1.0, max_value=100.0),
min_size=shape,
max_size=shape,
)
),
dtype=torch.float32,
)

minimum_unit_size = torch.tensor(
draw(st.lists(st.floats(min_value=1.0, max_value=10.0), min_size=shape, max_size=shape)),
dtype=torch.float32
draw(
st.lists(
st.floats(min_value=1.0, max_value=10.0), min_size=shape, max_size=shape
)
),
dtype=torch.float32,
)

return box, minimum_unit_size
Expand Down
44 changes: 29 additions & 15 deletions tests/beignet/func/test__hash_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,7 @@
import torch
from hypothesis import given

from beignet.func._molecular_dynamics._partition.__hash_constants import \
_hash_constants
from beignet.func._molecular_dynamics._partition.__hash_constants import _hash_constants


@st.composite
Expand All @@ -14,10 +13,13 @@ def _hash_constants_strategy(draw):
cells_per_side = draw(
st.one_of(
st.integers(min_value=1, max_value=10).map(
lambda x: torch.tensor([x], dtype=torch.int32)),
st.lists(st.integers(min_value=1, max_value=10),
min_size=spatial_dimensions, max_size=spatial_dimensions)
.map(lambda x: torch.tensor(x, dtype=torch.int32))
lambda x: torch.tensor([x], dtype=torch.int32)
),
st.lists(
st.integers(min_value=1, max_value=10),
min_size=spatial_dimensions,
max_size=spatial_dimensions,
).map(lambda x: torch.tensor(x, dtype=torch.int32)),
)
)

Expand All @@ -27,15 +29,24 @@ def _hash_constants_strategy(draw):
@pytest.mark.parametrize(
"spatial_dimensions, cells_per_side, expected_result, expected_exception",
[
(3, torch.tensor([4], dtype=torch.int32),
torch.tensor([[1, 4, 16]], dtype=torch.int32), None),
(3, torch.tensor([4, 4, 4], dtype=torch.int32),
torch.tensor([1, 4, 16], dtype=torch.int32), None),
(
3,
torch.tensor([4], dtype=torch.int32),
torch.tensor([[1, 4, 16]], dtype=torch.int32),
None,
),
(
3,
torch.tensor([4, 4, 4], dtype=torch.int32),
torch.tensor([1, 4, 16], dtype=torch.int32),
None,
),
(3, torch.tensor([4, 4], dtype=torch.int32), None, ValueError),
],
)
def test_hash_constants(spatial_dimensions, cells_per_side, expected_result,
expected_exception):
def test_hash_constants(
spatial_dimensions, cells_per_side, expected_result, expected_exception
):
if expected_exception is not None:
with pytest.raises(expected_exception):
_hash_constants(spatial_dimensions, cells_per_side)
Expand All @@ -52,7 +63,8 @@ def test__hash_constants(data):
if cells_per_side.numel() == 1:
expected_result = torch.tensor(
[[cells_per_side.item() ** i for i in range(spatial_dimensions)]],
dtype=torch.int32)
dtype=torch.int32,
)
else:
if cells_per_side.numel() != spatial_dimensions:
with pytest.raises(ValueError):
Expand All @@ -61,8 +73,10 @@ def test__hash_constants(data):
return

augmented = torch.cat(
(torch.tensor([1], dtype=torch.int32).view(1, 1),
cells_per_side[:-1].view(1, -1)),
(
torch.tensor([1], dtype=torch.int32).view(1, 1),
cells_per_side[:-1].view(1, -1),
),
dim=1,
)

Expand Down
26 changes: 18 additions & 8 deletions tests/beignet/func/test__iota.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,25 @@ def _iota_strategy(draw):
max_dimensions = 5
dim = draw(st.integers(min_value=0, max_value=max_dimensions - 1))

shape = tuple(draw(
st.lists(st.integers(min_value=1, max_value=10), min_size=1,
max_size=max_dimensions)))
shape = tuple(
draw(
st.lists(
st.integers(min_value=1, max_value=10),
min_size=1,
max_size=max_dimensions,
)
)
)

kwargs = {
"dtype": draw(st.sampled_from(
[torch.int32, torch.int64, torch.float32, torch.float64])),
"device": draw(st.sampled_from(
["cpu", "cuda"]) if torch.cuda.is_available() else st.just("cpu"))
"dtype": draw(
st.sampled_from([torch.int32, torch.int64, torch.float32, torch.float64])
),
"device": draw(
st.sampled_from(["cpu", "cuda"])
if torch.cuda.is_available()
else st.just("cpu")
),
}

return shape, dim, kwargs
Expand Down Expand Up @@ -60,7 +70,7 @@ def test__iota(data):
if len(shape) > 1:
assert torch.equal(
result.select(dim, idx),
torch.tensor(idx, **kwargs).expand(*result.select(dim, idx).shape)
torch.tensor(idx, **kwargs).expand(*result.select(dim, idx).shape),
)
else:
assert result[idx].item() == idx
Loading

0 comments on commit 95f59c9

Please sign in to comment.