Skip to content

Commit

Permalink
Fixing config names (MEGAPOSE_DATA_DIR --> HAPPYPOSE_DATA_DIR) and fi…
Browse files Browse the repository at this point in the history
…xing bugs introduced in the previous merge
  • Loading branch information
ElliotMaitre committed Sep 18, 2023
1 parent 2f0786b commit fda448b
Show file tree
Hide file tree
Showing 6 changed files with 6 additions and 54 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ pip install -r requirements.txt -e .

```
Create data dir /somewhere/convenient. The dataset to store are quite large.
export MEGAPOSE_DATA_DIR=/somewhere/convenient
export HAPPYPOSE_DATA_DIR=/somewhere/convenient
```

# Configuration for the evaluation
Expand Down
2 changes: 1 addition & 1 deletion happypose/pose_estimators/cosypose/cosypose/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
PROJECT_ROOT = Path(happypose.__file__).parent.parent
PROJECT_DIR = PROJECT_ROOT
DATA_DIR = PROJECT_DIR / 'data'
LOCAL_DATA_DIR = Path(os.environ.get("MEGAPOSE_DATA_DIR", Path(PROJECT_DIR) / "local_data"))
LOCAL_DATA_DIR = Path(os.environ.get("HAPPYPOSE_DATA_DIR", Path(PROJECT_DIR) / "local_data"))
TEST_DATA_DIR = LOCAL_DATA_DIR
DASK_LOGS_DIR = LOCAL_DATA_DIR / 'dasklogs'
SYNT_DS_DIR = LOCAL_DATA_DIR / 'synt_datasets'
Expand Down
2 changes: 1 addition & 1 deletion happypose/pose_estimators/megapose/src/megapose/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@

PROJECT_ROOT = Path(megapose.__file__).parent.parent.parent
PROJECT_DIR = PROJECT_ROOT
LOCAL_DATA_DIR = Path(os.environ.get("MEGAPOSE_DATA_DIR", Path(PROJECT_DIR) / "local_data"))
LOCAL_DATA_DIR = Path(os.environ.get("HAPPYPOSE_DATA_DIR", Path(PROJECT_DIR) / "local_data"))
BOP_DS_DIR = LOCAL_DATA_DIR / "bop_datasets"
NB_DATA_DIR = LOCAL_DATA_DIR / "notebook_data"
SHAPENET_DIR = LOCAL_DATA_DIR / "shapenetcorev2"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@

device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')

<<<<<<< HEAD


##################################
Expand Down Expand Up @@ -112,8 +111,6 @@
##################################


=======
>>>>>>> c6cd60e ( first attempt at bop challenge)

class PredictionRunner:
def __init__(
Expand Down Expand Up @@ -161,14 +158,10 @@ def run_inference_pipeline(
"""
print("gt detections =\n", gt_detections)
print("sam detections =\n", sam_detections)

# TODO: this check could be done outside of run_inference_pipeline
# and then only check if detections are None
if self.inference_cfg.detection_type == "gt":
detections = gt_detections
print("gt detections =", gt_detections.bboxes)
run_detector = False
elif self.inference_cfg.detection_type == "sam":
# print("sam_detections =", sam_detections.bboxes)
Expand All @@ -177,14 +170,7 @@ def run_inference_pipeline(
elif self.inference_cfg.detection_type == "detector":
detections = None
run_detector = True
<<<<<<< HEAD

=======
elif self.inference_cfg.detection_type == "sam":
print("sam_detections =", sam_detections.bboxes)
detections = sam_detections
run_detector = False
>>>>>>> c6cd60e ( first attempt at bop challenge)
else:
raise ValueError(f"Unknown detection type {self.inference_cfg.detection_type}")

Expand Down Expand Up @@ -252,7 +238,6 @@ def get_predictions(self, pose_estimator: PoseEstimator) -> Dict[str, PoseEstima

predictions_list = defaultdict(list)

<<<<<<< HEAD
######
# This section opens the detections stored in "baseline.json"
# format it and store it in a dataframe that will be accessed later
Expand Down Expand Up @@ -291,24 +276,12 @@ def get_predictions(self, pose_estimator: PoseEstimator) -> Dict[str, PoseEstima
df_all_dets = pd.DataFrame.from_records(dets_lst)

df_targets = pd.read_json(self.scene_ds.ds_dir / "test_targets_bop19.json")
=======
# Temporary solution
if self.inference_cfg.detection_type == "sam":
data_path = Path("/home/emaitre/local_data/bop23/baseline/ycbv/baseline.json")
object_data = json.loads(data_path.read_text())
for object in object_data:
object['bbox'] = [float(i) for i in object['bbox']]
object['bbox_modal'] = object['bbox']
object['label'] = "ycbv-obj_{}".format(str(object['category_id']).zfill(6))
object_data = pd.DataFrame.from_records(object_data)
>>>>>>> c6cd60e ( first attempt at bop challenge)

for n, data in enumerate(tqdm(self.dataloader)):
# data is a dict
rgb = data["rgb"]
depth = data["depth"]
K = data["cameras"].K
<<<<<<< HEAD

# Dirty but avoids creating error when running with real detector
dt_det = 0
Expand Down Expand Up @@ -368,20 +341,6 @@ def get_predictions(self, pose_estimator: PoseEstimator) -> Dict[str, PoseEstima
sam_detections = make_detections_from_object_data(list_object_data).to(device)
sam_detections.infos['score'] = scores

=======

# Temporary solution
if self.inference_cfg.detection_type == "sam":
list_object_data = []
scene_id = data['im_infos'][0]['scene_id']
view_id = data['im_infos'][0]['view_id']
print("scene and view :", scene_id, view_id)
list_object = object_data.loc[(object_data['scene_id'] == scene_id) & (object_data['image_id'] == view_id)].to_dict('records')
for object in list_object:
list_object_data.append(ObjectData.from_json(object))
sam_detections = make_detections_from_object_data(list_object_data).to(device)
print("sam_detections =", sam_detections)
>>>>>>> c6cd60e ( first attempt at bop challenge)
else:
sam_detections = None
gt_detections = data["gt_detections"].cuda()
Expand Down Expand Up @@ -417,9 +376,9 @@ def get_predictions(self, pose_estimator: PoseEstimator) -> Dict[str, PoseEstima
v.infos['view_id'] = view_id
predictions_list[k].append(v)

# Concatenate the lists of PandasTensorCollections
# Concatenate the lists of PandasTensorCollections
predictions = dict()
for k, v in predictions_list.items():
predictions[k] = tc.concatenate(v)

return predictions
return predictions
Original file line number Diff line number Diff line change
Expand Up @@ -582,12 +582,7 @@ def run_inference_pipeline(
detections, **detection_filter_kwargs
)

<<<<<<< HEAD
# Run the coarse estimator using detections
=======
# Run the coarse estimator using gt_detections
print("detections coarse =", detections)
>>>>>>> c6cd60e ( first attempt at bop challenge)
data_TCO_coarse, coarse_extra_data = self.forward_coarse_model(
observation=observation,
detections=detections,
Expand Down Expand Up @@ -679,4 +674,4 @@ def filter_pose_estimates(

data_TCO_filtered = data_TCO[df.index.tolist()]

return data_TCO_filtered
return data_TCO_filtered
Original file line number Diff line number Diff line change
Expand Up @@ -206,8 +206,6 @@ def update_cfg_debug(cfg: EvalConfig) -> FullEvalConfig:
cli_cfg = OmegaConf.from_cli()
logger.info(f"CLI config: \n {OmegaConf.to_yaml(cli_cfg)}")

print("full eval config =", FullEvalConfig.ds_names)

cfg: FullEvalConfig = OmegaConf.structured(FullEvalConfig)
cfg.hardware = HardwareConfig(
n_cpus=int(os.environ.get("N_CPUS", 10)),
Expand Down

0 comments on commit fda448b

Please sign in to comment.