From 8cdd56de7dc3ffdc0705912d6574cf6975f072c6 Mon Sep 17 00:00:00 2001 From: mfourmy Date: Thu, 29 Feb 2024 16:24:19 +0100 Subject: [PATCH 1/2] Cosypose wrapper works with bullet --- docs/book/cosypose/download_data.md | 4 +- .../cosypose/datasets/urdf_dataset.py | 21 +++++-- .../cosypose/integrated/pose_estimator.py | 2 +- .../cosypose/cosypose/libmesh/urdf_utils.py | 47 +++++++++++++--- .../cosypose/cosypose/models/pose.py | 47 ++++++++++------ .../scripts/convert_bop_ds_to_urdf.py | 43 ++++++++++++++ .../scripts/convert_models_to_urdf.py | 53 ------------------ .../cosypose/utils/cosypose_wrapper.py | 56 +++++++++++++++---- tests/test_renderer_bullet.py | 4 +- tests/test_renderer_panda3d.py | 2 +- 10 files changed, 176 insertions(+), 103 deletions(-) create mode 100644 happypose/pose_estimators/cosypose/cosypose/scripts/convert_bop_ds_to_urdf.py delete mode 100644 happypose/pose_estimators/cosypose/cosypose/scripts/convert_models_to_urdf.py diff --git a/docs/book/cosypose/download_data.md b/docs/book/cosypose/download_data.md index 085e3584..8379f895 100644 --- a/docs/book/cosypose/download_data.md +++ b/docs/book/cosypose/download_data.md @@ -34,8 +34,8 @@ Notes: - The URDF files were obtained using these commands (requires `meshlab` to be installed): ```sh - python -m happypose.pose_estimators.cosypose.cosypose.scripts.convert_models_to_urdf --models=ycbv - python -m happypose.pose_estimators.cosypose.cosypose.scripts.convert_models_to_urdf --models=tless.cad + python -m happypose.pose_estimators.cosypose.cosypose.scripts.convert_bop_ds_to_urdf --ds_name=ycbv + python -m happypose.pose_estimators.cosypose.cosypose.scripts.convert_bop_ds_to_urdf --ds_name=tless.cad ``` - Compatibility models were obtained using the following script: diff --git a/happypose/pose_estimators/cosypose/cosypose/datasets/urdf_dataset.py b/happypose/pose_estimators/cosypose/cosypose/datasets/urdf_dataset.py index 976e11e8..69808dd4 100644 --- a/happypose/pose_estimators/cosypose/cosypose/datasets/urdf_dataset.py +++ b/happypose/pose_estimators/cosypose/cosypose/datasets/urdf_dataset.py @@ -1,18 +1,29 @@ +import json from pathlib import Path import pandas as pd class UrdfDataset: - def __init__(self, ds_dir): - ds_dir = Path(ds_dir) + def __init__(self, urdf_ds_dir, label_filename="objname2label.json"): + urdf_ds_dir = Path(urdf_ds_dir) + label_path = urdf_ds_dir / label_filename + if label_path.exists(): + with label_path.open() as fp: + objname2label = json.load(fp) + else: + objname2label = None index = [] - for urdf_dir in Path(ds_dir).iterdir(): - urdf_paths = list(urdf_dir.glob("*.urdf")) + for obj_dir in urdf_ds_dir.iterdir(): + urdf_paths = list(obj_dir.glob("*.urdf")) if len(urdf_paths) == 1: urdf_path = urdf_paths[0] + if objname2label is None: + label = obj_dir.name + else: + label = objname2label[obj_dir.name] infos = { - "label": urdf_dir.name, + "label": label, "urdf_path": urdf_path.as_posix(), "scale": 1.0, } diff --git a/happypose/pose_estimators/cosypose/cosypose/integrated/pose_estimator.py b/happypose/pose_estimators/cosypose/cosypose/integrated/pose_estimator.py index 34d9bda0..99453225 100644 --- a/happypose/pose_estimators/cosypose/cosypose/integrated/pose_estimator.py +++ b/happypose/pose_estimators/cosypose/cosypose/integrated/pose_estimator.py @@ -147,7 +147,7 @@ def run_inference_pipeline( coarse_estimates: Optional[PoseEstimatesType] = None, detection_th: float = 0.7, mask_th: float = 0.8, - labels_to_keep: List[str] = None, + labels_to_keep: Optional[List[str]] = None, ) -> Tuple[PoseEstimatesType, dict]: timing_str = "" timer = SimpleTimer() diff --git a/happypose/pose_estimators/cosypose/cosypose/libmesh/urdf_utils.py b/happypose/pose_estimators/cosypose/cosypose/libmesh/urdf_utils.py index 668d4a20..b11d3283 100644 --- a/happypose/pose_estimators/cosypose/cosypose/libmesh/urdf_utils.py +++ b/happypose/pose_estimators/cosypose/cosypose/libmesh/urdf_utils.py @@ -1,34 +1,63 @@ +import json +import shutil import xml.etree.ElementTree as ET from pathlib import Path from xml.dom import minidom import numpy as np import trimesh +from tqdm import tqdm from happypose.toolbox.datasets.object_dataset import RigidObjectDataset def convert_rigid_body_dataset_to_urdfs( - rb_ds: RigidObjectDataset, urdf_dir: Path, texture_size=(1024, 1024) + rb_ds: RigidObjectDataset, + urdf_dir: Path, + texture_size=(1024, 1024), + override=True, + label2objname_file_name: str = "objname2label.json", ): """ - Converts a RigidObjectDataset in a bullet renderer compatible directory with .obj and urdf files. + Converts a RigidObjectDataset into a directory of urdf files with structure: + + urdf_dir/.json + urdf_dir/obj_000001/obj_000001.mtl + obj_000001.obj + obj_000001_texture.png + obj_000001.urdf + urdf_dir/obj_000002/obj_000002.mtl + obj_000002.obj + obj_000002_texture.png + obj_000002.urdf + + .json: stores a map between object file names (e.g. obj_000002) and + object labels used in happypose (e.g. the detector may output "ycbv-obj_000002") """ + if override and urdf_dir.exists(): + shutil.rmtree(urdf_dir, ignore_errors=True) urdf_dir.mkdir(exist_ok=True, parents=True) - for obj in rb_ds.list_objects: - out_dir = urdf_dir / obj.mesh_path.with_suffix("").name - out_dir.mkdir(exist_ok=True) - obj_path = out_dir / obj.mesh_path.with_suffix(".obj").name - urdf_path = obj_path.with_suffix(".urdf") + objname2label = {} + for obj in tqdm(rb_ds.list_objects): + objname = obj.mesh_path.with_suffix("").name # e.g. "obj_000002" + objname2label[objname] = obj.label # e.g. obj_000002 -> ycbv-obj_000002 + # Create object folder + obj_urdf_dir = urdf_dir / objname + obj_urdf_dir.mkdir(exist_ok=True) # urdf_dir/obj_000002/ created + # Convert mesh from ply to obj + obj_path = (obj_urdf_dir / objname).with_suffix(".obj") if obj.mesh_path.suffix == ".ply": - if obj_path.exists(): - obj_path.unlink() ply_to_obj(obj.mesh_path, obj_path, texture_size) else: ValueError(f"{obj.mesh_path.suffix} file type not supported") + # Create a .urdf file associated to the .obj file + urdf_path = obj_path.with_suffix(".urdf") obj_to_urdf(obj_path, urdf_path) + with open(urdf_dir / label2objname_file_name, "w") as fp: + json.dump(objname2label, fp) + def ply_to_obj(ply_path: Path, obj_path: Path, texture_size=None): assert obj_path.suffix == ".obj" diff --git a/happypose/pose_estimators/cosypose/cosypose/models/pose.py b/happypose/pose_estimators/cosypose/cosypose/models/pose.py index 286aebe1..753b3f8a 100644 --- a/happypose/pose_estimators/cosypose/cosypose/models/pose.py +++ b/happypose/pose_estimators/cosypose/cosypose/models/pose.py @@ -24,6 +24,8 @@ compute_rotation_matrix_from_quaternions, ) from happypose.toolbox.renderer import Panda3dLightData +from happypose.toolbox.renderer.bullet_batch_renderer import BulletBatchRenderer +from happypose.toolbox.renderer.panda3d_batch_renderer import Panda3dBatchRenderer logger = get_logger(__name__) @@ -128,21 +130,32 @@ def forward(self, images, K, labels, TCO, n_iterations=1): labels, ) - ambient_light = Panda3dLightData( - light_type="ambient", - color=(1.0, 1.0, 1.0, 1.0), - ) - light_datas = [[ambient_light] for _ in range(len(labels))] - - renders = self.renderer.render( - labels=labels, - TCO=TCO_input, - K=K_crop, - resolution=self.render_size, - light_datas=light_datas, - ) - renders = renders.rgbs - x = torch.cat((images_crop, renders), dim=1) + if isinstance(self.renderer, Panda3dBatchRenderer): + ambient_light = Panda3dLightData( + light_type="ambient", + color=(1.0, 1.0, 1.0, 1.0), + ) + light_datas = [[ambient_light] for _ in range(len(labels))] + + renders = self.renderer.render( + labels=labels, + TCO=TCO_input, + K=K_crop, + resolution=self.render_size, + light_datas=light_datas, + ) + elif isinstance(self.renderer, BulletBatchRenderer): + renders = self.renderer.render( + labels=labels, + TCO=TCO_input, + K=K_crop, + resolution=self.render_size, + ) + else: + raise ValueError( + f"Renderer of type {type(self.renderer)} not supported" + ) + x = torch.cat((images_crop, renders.rgbs), dim=1) model_outputs = self.net_forward(x) @@ -158,7 +171,7 @@ def forward(self, images, K, labels, TCO, n_iterations=1): } outputs[f"iteration={n+1}"] = PosePredictorOutputCosypose( - renders=renders, + renders=renders.rgbs, images_crop=images_crop, TCO_input=TCO_input, TCO_output=TCO_output, @@ -176,7 +189,7 @@ def forward(self, images, K, labels, TCO, n_iterations=1): self.tmp_debug.update( images=images, images_crop=images_crop, - renders=renders, + renders=renders.rgbs, ) path = DEBUG_DATA_DIR / f"debug_iter={n+1}.pth.tar" logger.info(f"Wrote debug data: {path}") diff --git a/happypose/pose_estimators/cosypose/cosypose/scripts/convert_bop_ds_to_urdf.py b/happypose/pose_estimators/cosypose/cosypose/scripts/convert_bop_ds_to_urdf.py new file mode 100644 index 00000000..9d4c91f4 --- /dev/null +++ b/happypose/pose_estimators/cosypose/cosypose/scripts/convert_bop_ds_to_urdf.py @@ -0,0 +1,43 @@ +import argparse +from pathlib import Path + +from happypose.pose_estimators.cosypose.cosypose.libmesh.urdf_utils import ( + convert_rigid_body_dataset_to_urdfs, +) +from happypose.toolbox.datasets.datasets_cfg import make_object_dataset + + +def convert_bop_dataset_to_urdfs( + obj_ds_name: str, urdf_dir: Path, texture_size=(1024, 1024), override=True +): + obj_dataset = make_object_dataset(obj_ds_name) + urdf_ds_dir = urdf_dir / obj_ds_name + convert_rigid_body_dataset_to_urdfs( + obj_dataset, urdf_ds_dir, texture_size, override + ) + + +def main(): + parser = argparse.ArgumentParser("3D ply object ds_name -> pybullet URDF converter") + parser.add_argument( + "--ds_name", + default="", + type=str, + help="Bop dataset model name: ycbv, tless.cad, etc.", + ) + parser.add_argument( + "--override", + default=True, + type=bool, + help="If true, erases previous content of urdf/.", + ) + args = parser.parse_args() + + from happypose.pose_estimators.cosypose.cosypose.config import LOCAL_DATA_DIR + + urdf_dir = LOCAL_DATA_DIR / "urdfs" + convert_bop_dataset_to_urdfs(args.ds_name, urdf_dir, args.override) + + +if __name__ == "__main__": + main() diff --git a/happypose/pose_estimators/cosypose/cosypose/scripts/convert_models_to_urdf.py b/happypose/pose_estimators/cosypose/cosypose/scripts/convert_models_to_urdf.py deleted file mode 100644 index 00569b26..00000000 --- a/happypose/pose_estimators/cosypose/cosypose/scripts/convert_models_to_urdf.py +++ /dev/null @@ -1,53 +0,0 @@ -import argparse -from pathlib import Path - -from tqdm import tqdm - -from happypose.pose_estimators.cosypose.cosypose.libmesh.urdf_utils import ( - obj_to_urdf, - ply_to_obj, -) - -# from happypose.pose_estimators.cosypose.cosypose.datasets.datasets_cfg import \ -# make_object_dataset -from happypose.toolbox.datasets.datasets_cfg import make_object_dataset - - -def convert_bop_dataset_to_urdfs( - obj_ds_name: str, urdf_dir: Path, texture_size=(1024, 1024) -): - """ - For each object, generate these files: - - {path_to_urdf_dir}/{ds_name}/{obj_label}/{obj_label}.obj - {path_to_urdf_dir}/{ds_name}/{obj_label}/{obj_label}.obj.mtl - {path_to_urdf_dir}/{ds_name}/{obj_label}/{obj_label}.png - {path_to_urdf_dir}/{ds_name}/{obj_label}/{obj_label}.urdf - """ - obj_dataset = make_object_dataset(obj_ds_name) - urdf_ds_dir = urdf_dir / obj_ds_name - urdf_ds_dir.mkdir(exist_ok=True, parents=True) - for n in tqdm(range(len(obj_dataset))): - obj = obj_dataset[n] - ply_path = obj.mesh_path - obj_name = ply_path.with_suffix("").name - obj_urdf_dir = urdf_ds_dir / obj_name - obj_urdf_dir.mkdir(exist_ok=True) - obj_path = (obj_urdf_dir / obj_name).with_suffix(".obj") - ply_to_obj(ply_path, obj_path, texture_size=texture_size) - obj_to_urdf(obj_path, obj_path.with_suffix(".urdf")) - - -def main(): - parser = argparse.ArgumentParser("3D ply object models -> pybullet URDF converter") - parser.add_argument("--models", default="", type=str) - args = parser.parse_args() - - from happypose.pose_estimators.cosypose.cosypose.config import LOCAL_DATA_DIR - - urdf_dir = LOCAL_DATA_DIR / "urdfs" - convert_bop_dataset_to_urdfs(args.models, urdf_dir) - - -if __name__ == "__main__": - main() diff --git a/happypose/pose_estimators/cosypose/cosypose/utils/cosypose_wrapper.py b/happypose/pose_estimators/cosypose/cosypose/utils/cosypose_wrapper.py index b31f4310..378ad6c3 100644 --- a/happypose/pose_estimators/cosypose/cosypose/utils/cosypose_wrapper.py +++ b/happypose/pose_estimators/cosypose/cosypose/utils/cosypose_wrapper.py @@ -29,7 +29,6 @@ from happypose.toolbox.datasets.object_dataset import RigidObjectDataset from happypose.toolbox.inference.utils import load_detector from happypose.toolbox.lib3d.rigid_mesh_database import MeshDataBase -from happypose.toolbox.renderer.panda3d_batch_renderer import Panda3dBatchRenderer device = torch.device("cuda" if torch.cuda.is_available() else "cpu") @@ -41,12 +40,24 @@ def __init__( object_dataset: Union[None, RigidObjectDataset] = None, n_workers=8, gpu_renderer=False, + renderer_type: str = "panda3d", ) -> None: + """ + inputs: + - dataset_name: hope|tless|ycbv + - object_dataset: None or already existing rigid object dataset. If None, will use dataset_name + to build one + - n_workers: how many processes will be spun in the batch renderer + - renderer_type: 'panda3d'|'bullet' + """ + self.dataset_name = dataset_name self.object_dataset = object_dataset - self.detector, self.pose_predictor = self.get_model(dataset_name, n_workers) + self.detector, self.pose_predictor = self.get_model( + dataset_name, n_workers, renderer_type + ) - def get_model(self, dataset_name, n_workers): + def get_model(self, dataset_name, n_workers, renderer_type): # load models if dataset_name == "hope": # HOPE setup @@ -77,9 +88,7 @@ def get_model(self, dataset_name, n_workers): raise ValueError(msg) detector = load_detector(detector_run_id) coarse_model, refiner_model = self.load_pose_models( - coarse_run_id=coarse_run_id, - refiner_run_id=refiner_run_id, - n_workers=n_workers, + coarse_run_id, refiner_run_id, n_workers, renderer_type ) pose_estimator = PoseEstimator( @@ -89,14 +98,37 @@ def get_model(self, dataset_name, n_workers): ) return detector, pose_estimator - def load_pose_models(self, coarse_run_id, refiner_run_id, n_workers): + def load_pose_models( + self, + coarse_run_id: str, + refiner_run_id: str, + n_workers: int, + renderer_type: str = "panda3d", + ): if self.object_dataset is None: self.object_dataset = make_object_dataset(self.dataset_name) - renderer = Panda3dBatchRenderer( - self.object_dataset, - n_workers=n_workers, - preload_cache=False, - ) + if renderer_type == "panda3d": + from happypose.toolbox.renderer.panda3d_batch_renderer import ( + Panda3dBatchRenderer, + ) + + renderer = Panda3dBatchRenderer( + self.object_dataset, + n_workers=n_workers, + ) + elif renderer_type == "bullet": + from happypose.toolbox.renderer.bullet_batch_renderer import ( + BulletBatchRenderer, + ) + + renderer = BulletBatchRenderer( + self.object_dataset, + n_workers=n_workers, + gpu_renderer=torch.cuda.is_available(), + ) + else: + raise ValueError(f"Renderer type {renderer_type} not supported") + mesh_db = MeshDataBase.from_object_ds(self.object_dataset) mesh_db_batched = mesh_db.batched().to(device) diff --git a/tests/test_renderer_bullet.py b/tests/test_renderer_bullet.py index a0285471..0cbc7928 100644 --- a/tests/test_renderer_bullet.py +++ b/tests/test_renderer_bullet.py @@ -18,9 +18,7 @@ class TestBulletRenderer(unittest.TestCase): """Unit tests for bullet renderer.""" def setUp(self) -> None: - self.obj_label = ( - "obj_000001" # TODO: current limitation: label must be equal to object name - ) + self.obj_label = "my_favorite_object_label" self.obj_path = Path(__file__).parent.joinpath("data/obj_000001.ply") self.rigid_object_dataset = RigidObjectDataset( objects=[ diff --git a/tests/test_renderer_panda3d.py b/tests/test_renderer_panda3d.py index 5086457d..a0d54e27 100644 --- a/tests/test_renderer_panda3d.py +++ b/tests/test_renderer_panda3d.py @@ -26,7 +26,7 @@ class TestPanda3DRenderer(unittest.TestCase): """Unit tests for Panda3D renderer.""" def setUp(self) -> None: - self.obj_label = "obj" + self.obj_label = "my_favorite_object_label" self.obj_path = Path(__file__).parent.joinpath("data/obj_000001.ply") self.asset_dataset = RigidObjectDataset( objects=[ From f58d767e5af331738c079b54c43e3fb38a1fcb96 Mon Sep 17 00:00:00 2001 From: mfourmy Date: Thu, 29 Feb 2024 16:32:34 +0100 Subject: [PATCH 2/2] add cosypose+bullet unittest --- tests/test_cosypose_inference.py | 94 +++++++++++++++++++++++++------- 1 file changed, 73 insertions(+), 21 deletions(-) diff --git a/tests/test_cosypose_inference.py b/tests/test_cosypose_inference.py index 6eb9a45d..279b1b56 100644 --- a/tests/test_cosypose_inference.py +++ b/tests/test_cosypose_inference.py @@ -20,72 +20,124 @@ from happypose.toolbox.inference.types import ObservationTensor from happypose.toolbox.inference.utils import load_detector from happypose.toolbox.lib3d.rigid_mesh_database import MeshDataBase -from happypose.toolbox.renderer.panda3d_batch_renderer import Panda3dBatchRenderer class TestCosyPoseInference(unittest.TestCase): """Unit tests for CosyPose inference example.""" - def test_cosypose_pipeline(self): + def setUp(self) -> None: """Run detector with coarse and refiner from CosyPose.""" - expected_object_label = "hope-obj_000002" + self.expected_object_label = "hope-obj_000002" mesh_file_name = "hope-obj_000002.ply" data_dir = LOCAL_DATA_DIR / "examples" / "barbecue-sauce" mesh_dir = data_dir / "meshes" mesh_path = mesh_dir / mesh_file_name - device = "cpu" - n_workers = 1 + + self.coarse_run_id = "coarse-bop-hope-pbr--225203" + self.refiner_run_id = "refiner-bop-hope-pbr--955392" + + self.device = "cpu" rgb, depth, camera_data = load_observation_example(data_dir, load_depth=True) # TODO: cosypose forward does not work if depth is loaded detection contrary to megapose - observation = ObservationTensor.from_numpy(rgb, depth=None, K=camera_data.K) + self.observation = ObservationTensor.from_numpy( + rgb, depth=None, K=camera_data.K + ) - detector = load_detector(run_id="detector-bop-hope-pbr--15246", device=device) + self.detector = load_detector( + run_id="detector-bop-hope-pbr--15246", device=self.device + ) # detections = detector.get_detections(observation=observation) - object_dataset = RigidObjectDataset( + self.object_dataset = RigidObjectDataset( objects=[ RigidObject( - label=expected_object_label, mesh_path=mesh_path, mesh_units="mm" + label=self.expected_object_label, + mesh_path=mesh_path, + mesh_units="mm", ) ] ) + mesh_db = MeshDataBase.from_object_ds(self.object_dataset) + self.mesh_db_batched = mesh_db.batched().to("cpu") + + def test_cosypose_pipeline_panda3d(self): + from happypose.toolbox.renderer.panda3d_batch_renderer import ( + Panda3dBatchRenderer, + ) renderer = Panda3dBatchRenderer( - object_dataset, - n_workers=n_workers, + self.object_dataset, + n_workers=1, preload_cache=False, ) - coarse_run_id = "coarse-bop-hope-pbr--225203" - refiner_run_id = "refiner-bop-hope-pbr--955392" + coarse_model = load_model_cosypose( + EXP_DIR / self.coarse_run_id, renderer, self.mesh_db_batched, self.device + ) + refiner_model = load_model_cosypose( + EXP_DIR / self.refiner_run_id, renderer, self.mesh_db_batched, self.device + ) + + pose_estimator = PoseEstimator( + refiner_model=refiner_model, + coarse_model=coarse_model, + detector_model=self.detector, + ) + + # Run detector and pose estimator filtering object labels + preds, _ = pose_estimator.run_inference_pipeline( + observation=self.observation, + detection_th=0.8, + run_detector=True, + n_refiner_iterations=3, + labels_to_keep=[self.expected_object_label], + ) + + self.assertEqual(len(preds), 1) + self.assertEqual(preds.infos.label[0], self.expected_object_label) + + pose = pin.SE3(preds.poses[0].numpy()) + exp_pose = pin.SE3( + pin.exp3(np.array([1.4, 1.6, -1.11])), + np.array([0.1, 0.07, 0.45]), + ) + diff = pose.inverse() * exp_pose + self.assertLess(np.linalg.norm(pin.log6(diff).vector), 0.3) + + def test_cosypose_pipeline_bullet(self): + from happypose.toolbox.renderer.bullet_batch_renderer import BulletBatchRenderer + + renderer = BulletBatchRenderer( + self.object_dataset, + n_workers=0, + gpu_renderer=False, + ) - mesh_db = MeshDataBase.from_object_ds(object_dataset) - mesh_db_batched = mesh_db.batched().to("cpu") coarse_model = load_model_cosypose( - EXP_DIR / coarse_run_id, renderer, mesh_db_batched, device + EXP_DIR / self.coarse_run_id, renderer, self.mesh_db_batched, self.device ) refiner_model = load_model_cosypose( - EXP_DIR / refiner_run_id, renderer, mesh_db_batched, device + EXP_DIR / self.refiner_run_id, renderer, self.mesh_db_batched, self.device ) pose_estimator = PoseEstimator( refiner_model=refiner_model, coarse_model=coarse_model, - detector_model=detector, + detector_model=self.detector, ) # Run detector and pose estimator filtering object labels preds, _ = pose_estimator.run_inference_pipeline( - observation=observation, + observation=self.observation, detection_th=0.8, run_detector=True, n_refiner_iterations=3, - labels_to_keep=[expected_object_label], + labels_to_keep=[self.expected_object_label], ) self.assertEqual(len(preds), 1) - self.assertEqual(preds.infos.label[0], expected_object_label) + self.assertEqual(preds.infos.label[0], self.expected_object_label) pose = pin.SE3(preds.poses[0].numpy()) exp_pose = pin.SE3(