diff --git a/tests/beignet/features/conftest.py b/tests/beignet/features/conftest.py deleted file mode 100644 index 72e2b5edd5..0000000000 --- a/tests/beignet/features/conftest.py +++ /dev/null @@ -1,8 +0,0 @@ -import pytest -import torch -from beignet.features._feature import Feature - - -@pytest.fixture -def feature() -> Feature: - return Feature(torch.tensor([1, 2, 3])) diff --git a/tests/beignet/features/test__feature.py b/tests/beignet/features/test__feature.py index 02d30af4ac..6262f9089d 100644 --- a/tests/beignet/features/test__feature.py +++ b/tests/beignet/features/test__feature.py @@ -6,18 +6,15 @@ class TestFeature: - def test__to_tensor(self, feature: Feature): - result = Feature._to_tensor([1, 2, 3]) + def test___deepcopy__(self): + feature = Feature(torch.tensor([1, 2, 3])) - assert torch.is_tensor(result) - - assert not result.requires_grad - - def test_wrap_like(self): with pytest.raises(NotImplementedError): - Feature.wrap_like(None, None) + copy.deepcopy(feature) + + def test___torch_function__(self): + feature = Feature(torch.tensor([1, 2, 3])) - def test___torch_function__(self, feature: Feature): result = feature.__torch_function__( torch.add, (Feature, torch.Tensor), @@ -26,18 +23,29 @@ def test___torch_function__(self, feature: Feature): assert not isinstance(result, Feature) - def test_device(self, feature: Feature): + def test__to_tensor(self): + feature = Feature._to_tensor([1, 2, 3]) + + assert torch.is_tensor(feature) + + assert not feature.requires_grad + + def test_device(self): + feature = Feature(torch.tensor([1, 2, 3])) + assert feature.device == feature.device - def test_ndim(self, feature: Feature): - assert feature.ndim == 1 + def test_dtype(self): + assert Feature(torch.tensor([1, 2, 3])).dtype == torch.int64 - def test_dtype(self, feature: Feature): - assert feature.dtype == torch.int64 + def test_ndim(self): + assert Feature(torch.tensor([1, 2, 3])).ndim == 1 - def test_shape(self, feature: Feature): - assert feature.shape == (3,) + def test_shape(self): + assert Feature(torch.tensor([1, 2, 3])).shape == (3,) - def test___deepcopy__(self, feature: Feature): + def test_wrap_like(self): with pytest.raises(NotImplementedError): - copy.deepcopy(feature) + feature = Feature(torch.tensor([1, 2, 3])) + + Feature.wrap_like(feature, torch.tensor([1, 2, 3]))