Skip to content

Commit

Permalink
Solving most of the warnings that are printed while running the tests…
Browse files Browse the repository at this point in the history
…. Only one is remaining, which would need further investigation as the behavior will change after panda 3.0 version
  • Loading branch information
ElliotMaitre committed Jun 6, 2024
1 parent 0235a90 commit c08d0d1
Show file tree
Hide file tree
Showing 7 changed files with 20 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def __init__(
anchor_sizes=((32,), (64,), (128,), (256,), (512,)),
):
assert backbone_str == "resnet50-fpn"
backbone = resnet_fpn_backbone("resnet50", pretrained=False)
backbone = resnet_fpn_backbone(backbone_name="resnet50", weights=None)

aspect_ratios = ((0.5, 1.0, 2.0),) * len(anchor_sizes)
rpn_anchor_generator = AnchorGenerator(anchor_sizes, aspect_ratios)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,8 @@ def lambd(batch):
gamma=0.1,
)
lr_scheduler.last_epoch = start_epoch - 1
lr_scheduler.step()
# This led to a warning in newer version of PyTorch?
#lr_scheduler.step()

for epoch in range(start_epoch, end_epoch):
meters_train = defaultdict(AverageValueMeter)
Expand Down
2 changes: 1 addition & 1 deletion happypose/toolbox/inference/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def create_instance_id(df: pd.DataFrame) -> pd.DataFrame:

df = inputs.infos
df = df.groupby(["batch_im_id", "label"], group_keys=False).apply(
lambda df: create_instance_id(df),
lambda df: create_instance_id(df), include_groups=True
)
inputs.infos = df
return inputs
Expand Down
10 changes: 5 additions & 5 deletions tests/test_batch_renderer_panda3d.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import torch
from numpy.testing import assert_array_less as np_assert_array_less
from numpy.testing import assert_equal as np_assert_equal
from torch.testing import assert_allclose as tr_assert_allclose
from torch.testing import assert_close as tr_assert_close

from happypose.toolbox.datasets.object_dataset import RigidObject, RigidObjectDataset
from happypose.toolbox.lib3d.transform import Transform
Expand Down Expand Up @@ -113,11 +113,11 @@ def test_batch_renderer(self, device):
assert renderings.binary_masks.dtype == torch.bool

# Renders from 2 identical cams are equals
assert tr_assert_allclose(renderings.rgbs[0], renderings.rgbs[1]) is None
assert tr_assert_allclose(renderings.normals[0], renderings.normals[1]) is None
assert tr_assert_allclose(renderings.depths[0], renderings.depths[1]) is None
assert tr_assert_close(renderings.rgbs[0], renderings.rgbs[1]) is None
assert tr_assert_close(renderings.normals[0], renderings.normals[1]) is None
assert tr_assert_close(renderings.depths[0], renderings.depths[1]) is None
assert (
tr_assert_allclose(renderings.binary_masks[0], renderings.binary_masks[1])
tr_assert_close(renderings.binary_masks[0], renderings.binary_masks[1])
is None
)

Expand Down
3 changes: 2 additions & 1 deletion tests/test_cosypose_detector_training.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,8 @@ def lambd(batch):
gamma=0.1,
)
lr_scheduler.last_epoch = start_epoch - 1
lr_scheduler.step()
# This led to a warning in newer version of Py
# lr_scheduler.step()

for epoch in range(start_epoch, end_epoch):
meters_train = defaultdict(AverageValueMeter)
Expand Down
8 changes: 4 additions & 4 deletions tests/test_renderer_bullet.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import torch
from numpy.testing import assert_array_less as np_assert_array_less
from numpy.testing import assert_equal as np_assert_equal
from torch.testing import assert_allclose as tr_assert_allclose
from torch.testing import assert_close as tr_assert_close

from happypose.toolbox.datasets.object_dataset import RigidObject, RigidObjectDataset
from happypose.toolbox.lib3d.transform import Transform
Expand Down Expand Up @@ -206,10 +206,10 @@ def test_batch_renderer(self, device):
assert renderings.binary_masks.dtype == torch.bool

# Renders from 2 identical cams are equals
assert tr_assert_allclose(renderings.rgbs[0], renderings.rgbs[1]) is None
assert tr_assert_allclose(renderings.depths[0], renderings.depths[1]) is None
assert tr_assert_close(renderings.rgbs[0], renderings.rgbs[1]) is None
assert tr_assert_close(renderings.depths[0], renderings.depths[1]) is None
assert (
tr_assert_allclose(renderings.binary_masks[0], renderings.binary_masks[1])
tr_assert_close(renderings.binary_masks[0], renderings.binary_masks[1])
is None
)

Expand Down
10 changes: 5 additions & 5 deletions tests/test_scene_renderer_panda3d.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import pytest
from numpy.testing import assert_array_less as np_assert_array_less
from numpy.testing import assert_equal as np_assert_equal
from torch.testing import assert_allclose as tr_assert_allclose
from torch.testing import assert_close as tr_assert_close

from happypose.toolbox.datasets.object_dataset import RigidObject, RigidObjectDataset
from happypose.toolbox.lib3d.transform import Transform
Expand Down Expand Up @@ -91,11 +91,11 @@ def test_scene_renderer(self, device):
assert len(renderings) == self.Nc

# render from 2 identical cams are equals
assert tr_assert_allclose(renderings[0].rgb, renderings[1].rgb) is None
assert tr_assert_allclose(renderings[0].normals, renderings[1].normals) is None
assert tr_assert_allclose(renderings[0].depth, renderings[1].depth) is None
assert tr_assert_close(renderings[0].rgb, renderings[1].rgb) is None
assert tr_assert_close(renderings[0].normals, renderings[1].normals) is None
assert tr_assert_close(renderings[0].depth, renderings[1].depth) is None
assert (
tr_assert_allclose(renderings[0].binary_mask, renderings[1].binary_mask)
tr_assert_close(renderings[0].binary_mask, renderings[1].binary_mask)
is None
)

Expand Down

0 comments on commit c08d0d1

Please sign in to comment.