From fda448b94300a680175fa5ab81f52cb36da1dbb1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Elliot=20Ma=C3=AEtre?= Date: Mon, 18 Sep 2023 12:27:52 +0200 Subject: [PATCH] Fixing config names (MEGAPOSE_DATA_DIR --> HAPPYPOSE_DATA_DIR) and fixing bugs introduced in the previous merge --- README.md | 2 +- .../cosypose/cosypose/config.py | 2 +- .../megapose/src/megapose/config.py | 2 +- .../megapose/evaluation/prediction_runner.py | 45 +------------------ .../src/megapose/inference/pose_estimator.py | 7 +-- .../scripts/run_full_megapose_eval.py | 2 - 6 files changed, 6 insertions(+), 54 deletions(-) diff --git a/README.md b/README.md index bfc5e8ca..90ae3702 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ pip install -r requirements.txt -e . ``` Create data dir /somewhere/convenient. The dataset to store are quite large. -export MEGAPOSE_DATA_DIR=/somewhere/convenient +export HAPPYPOSE_DATA_DIR=/somewhere/convenient ``` # Configuration for the evaluation diff --git a/happypose/pose_estimators/cosypose/cosypose/config.py b/happypose/pose_estimators/cosypose/cosypose/config.py index 61f922b4..b745f35c 100644 --- a/happypose/pose_estimators/cosypose/cosypose/config.py +++ b/happypose/pose_estimators/cosypose/cosypose/config.py @@ -15,7 +15,7 @@ PROJECT_ROOT = Path(happypose.__file__).parent.parent PROJECT_DIR = PROJECT_ROOT DATA_DIR = PROJECT_DIR / 'data' -LOCAL_DATA_DIR = Path(os.environ.get("MEGAPOSE_DATA_DIR", Path(PROJECT_DIR) / "local_data")) +LOCAL_DATA_DIR = Path(os.environ.get("HAPPYPOSE_DATA_DIR", Path(PROJECT_DIR) / "local_data")) TEST_DATA_DIR = LOCAL_DATA_DIR DASK_LOGS_DIR = LOCAL_DATA_DIR / 'dasklogs' SYNT_DS_DIR = LOCAL_DATA_DIR / 'synt_datasets' diff --git a/happypose/pose_estimators/megapose/src/megapose/config.py b/happypose/pose_estimators/megapose/src/megapose/config.py index 6d69d7dc..cdcc65fc 100644 --- a/happypose/pose_estimators/megapose/src/megapose/config.py +++ b/happypose/pose_estimators/megapose/src/megapose/config.py @@ -28,7 +28,7 @@ PROJECT_ROOT = Path(megapose.__file__).parent.parent.parent PROJECT_DIR = PROJECT_ROOT -LOCAL_DATA_DIR = Path(os.environ.get("MEGAPOSE_DATA_DIR", Path(PROJECT_DIR) / "local_data")) +LOCAL_DATA_DIR = Path(os.environ.get("HAPPYPOSE_DATA_DIR", Path(PROJECT_DIR) / "local_data")) BOP_DS_DIR = LOCAL_DATA_DIR / "bop_datasets" NB_DATA_DIR = LOCAL_DATA_DIR / "notebook_data" SHAPENET_DIR = LOCAL_DATA_DIR / "shapenetcorev2" diff --git a/happypose/pose_estimators/megapose/src/megapose/evaluation/prediction_runner.py b/happypose/pose_estimators/megapose/src/megapose/evaluation/prediction_runner.py index 82ea2f94..69ccaee8 100644 --- a/happypose/pose_estimators/megapose/src/megapose/evaluation/prediction_runner.py +++ b/happypose/pose_estimators/megapose/src/megapose/evaluation/prediction_runner.py @@ -60,7 +60,6 @@ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') -<<<<<<< HEAD ################################## @@ -112,8 +111,6 @@ ################################## -======= ->>>>>>> c6cd60e ( first attempt at bop challenge) class PredictionRunner: def __init__( @@ -161,14 +158,10 @@ def run_inference_pipeline( """ - print("gt detections =\n", gt_detections) - print("sam detections =\n", sam_detections) - # TODO: this check could be done outside of run_inference_pipeline # and then only check if detections are None if self.inference_cfg.detection_type == "gt": detections = gt_detections - print("gt detections =", gt_detections.bboxes) run_detector = False elif self.inference_cfg.detection_type == "sam": # print("sam_detections =", sam_detections.bboxes) @@ -177,14 +170,7 @@ def run_inference_pipeline( elif self.inference_cfg.detection_type == "detector": detections = None run_detector = True -<<<<<<< HEAD -======= - elif self.inference_cfg.detection_type == "sam": - print("sam_detections =", sam_detections.bboxes) - detections = sam_detections - run_detector = False ->>>>>>> c6cd60e ( first attempt at bop challenge) else: raise ValueError(f"Unknown detection type {self.inference_cfg.detection_type}") @@ -252,7 +238,6 @@ def get_predictions(self, pose_estimator: PoseEstimator) -> Dict[str, PoseEstima predictions_list = defaultdict(list) -<<<<<<< HEAD ###### # This section opens the detections stored in "baseline.json" # format it and store it in a dataframe that will be accessed later @@ -291,24 +276,12 @@ def get_predictions(self, pose_estimator: PoseEstimator) -> Dict[str, PoseEstima df_all_dets = pd.DataFrame.from_records(dets_lst) df_targets = pd.read_json(self.scene_ds.ds_dir / "test_targets_bop19.json") -======= - # Temporary solution - if self.inference_cfg.detection_type == "sam": - data_path = Path("/home/emaitre/local_data/bop23/baseline/ycbv/baseline.json") - object_data = json.loads(data_path.read_text()) - for object in object_data: - object['bbox'] = [float(i) for i in object['bbox']] - object['bbox_modal'] = object['bbox'] - object['label'] = "ycbv-obj_{}".format(str(object['category_id']).zfill(6)) - object_data = pd.DataFrame.from_records(object_data) ->>>>>>> c6cd60e ( first attempt at bop challenge) for n, data in enumerate(tqdm(self.dataloader)): # data is a dict rgb = data["rgb"] depth = data["depth"] K = data["cameras"].K -<<<<<<< HEAD # Dirty but avoids creating error when running with real detector dt_det = 0 @@ -368,20 +341,6 @@ def get_predictions(self, pose_estimator: PoseEstimator) -> Dict[str, PoseEstima sam_detections = make_detections_from_object_data(list_object_data).to(device) sam_detections.infos['score'] = scores -======= - - # Temporary solution - if self.inference_cfg.detection_type == "sam": - list_object_data = [] - scene_id = data['im_infos'][0]['scene_id'] - view_id = data['im_infos'][0]['view_id'] - print("scene and view :", scene_id, view_id) - list_object = object_data.loc[(object_data['scene_id'] == scene_id) & (object_data['image_id'] == view_id)].to_dict('records') - for object in list_object: - list_object_data.append(ObjectData.from_json(object)) - sam_detections = make_detections_from_object_data(list_object_data).to(device) - print("sam_detections =", sam_detections) ->>>>>>> c6cd60e ( first attempt at bop challenge) else: sam_detections = None gt_detections = data["gt_detections"].cuda() @@ -417,9 +376,9 @@ def get_predictions(self, pose_estimator: PoseEstimator) -> Dict[str, PoseEstima v.infos['view_id'] = view_id predictions_list[k].append(v) - # Concatenate the lists of PandasTensorCollections + # Concatenate the lists of PandasTensorCollections predictions = dict() for k, v in predictions_list.items(): predictions[k] = tc.concatenate(v) - return predictions + return predictions \ No newline at end of file diff --git a/happypose/pose_estimators/megapose/src/megapose/inference/pose_estimator.py b/happypose/pose_estimators/megapose/src/megapose/inference/pose_estimator.py index 7abc1479..f28b4357 100644 --- a/happypose/pose_estimators/megapose/src/megapose/inference/pose_estimator.py +++ b/happypose/pose_estimators/megapose/src/megapose/inference/pose_estimator.py @@ -582,12 +582,7 @@ def run_inference_pipeline( detections, **detection_filter_kwargs ) -<<<<<<< HEAD # Run the coarse estimator using detections -======= - # Run the coarse estimator using gt_detections - print("detections coarse =", detections) ->>>>>>> c6cd60e ( first attempt at bop challenge) data_TCO_coarse, coarse_extra_data = self.forward_coarse_model( observation=observation, detections=detections, @@ -679,4 +674,4 @@ def filter_pose_estimates( data_TCO_filtered = data_TCO[df.index.tolist()] - return data_TCO_filtered + return data_TCO_filtered \ No newline at end of file diff --git a/happypose/pose_estimators/megapose/src/megapose/scripts/run_full_megapose_eval.py b/happypose/pose_estimators/megapose/src/megapose/scripts/run_full_megapose_eval.py index f8273390..2cf6f36a 100644 --- a/happypose/pose_estimators/megapose/src/megapose/scripts/run_full_megapose_eval.py +++ b/happypose/pose_estimators/megapose/src/megapose/scripts/run_full_megapose_eval.py @@ -206,8 +206,6 @@ def update_cfg_debug(cfg: EvalConfig) -> FullEvalConfig: cli_cfg = OmegaConf.from_cli() logger.info(f"CLI config: \n {OmegaConf.to_yaml(cli_cfg)}") - print("full eval config =", FullEvalConfig.ds_names) - cfg: FullEvalConfig = OmegaConf.structured(FullEvalConfig) cfg.hardware = HardwareConfig( n_cpus=int(os.environ.get("N_CPUS", 10)),