Skip to content

Commit

Permalink
handle copy items
Browse files Browse the repository at this point in the history
  • Loading branch information
NikolaiPetukhov committed Dec 30, 2024
1 parent 11f142d commit 6274d5d
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 16 deletions.
44 changes: 29 additions & 15 deletions src/api_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def get_project_activity(api: sly.Api, project_id: int):
return df


def images_get_list(api, dataset_id):
def images_get_list(api: sly.Api, dataset_id, image_ids=None):
api_fields = [
ApiField.ID,
ApiField.NAME,
Expand All @@ -73,7 +73,10 @@ def images_get_list(api, dataset_id):
ApiField.PATH_ORIGINAL,
ApiField.CREATED_BY_ID[0][0],
]
img_infos = api.image.get_list(dataset_id, fields=api_fields)
filters = None
if image_ids is not None:
filters = [{"field": ApiField.ID, "operator": "in", "value": image_ids}]
img_infos = api.image.get_list(dataset_id, filters=filters, fields=api_fields, force_metadata_for_links=False)
return img_infos


Expand Down Expand Up @@ -106,10 +109,18 @@ def create_dataset(
data[ApiField.UPDATED_AT] = updated_at
if created_by is not None:
data[ApiField.CREATED_BY_ID[0][0]] = created_by
response = api.post(
"datasets.add",
data,
)
try:
response = api.post(
"datasets.add",
data,
)
except Exception as e:
if "Some users not found in team" in str(e):
raise ValueError(
"Unable to create a dataset. Dataset creator is not a member of the destination team."
) from e
else:
raise e
return api.dataset._convert_json_info(response.json())


Expand All @@ -136,15 +147,18 @@ def images_bulk_add(
img_json[ApiField.HASH] = img_info.hash
img_data.append(img_json)

response = api.post(
"images.bulk.add",
{
ApiField.DATASET_ID: dataset_id,
ApiField.IMAGES: img_data,
ApiField.FORCE_METADATA_FOR_LINKS: False,
ApiField.SKIP_VALIDATION: True,
},
)
try:
response = api.post(
"images.bulk.add",
{
ApiField.DATASET_ID: dataset_id,
ApiField.IMAGES: img_data,
ApiField.FORCE_METADATA_FOR_LINKS: False,
ApiField.SKIP_VALIDATION: True,
},
)
except Exception as e:

results = []
for info_json in response.json():
info_json_copy = info_json.copy()
Expand Down
2 changes: 1 addition & 1 deletion src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -1239,7 +1239,7 @@ def move_datasets_tree(
def get_item_infos(dataset_id: int, item_ids: List[int], project_type: str):
filters = [{"field": "id", "operator": "in", "value": item_ids}]
if project_type == str(sly.ProjectType.IMAGES):
return api.image.get_info_by_id_batch(item_ids, force_metadata_for_links=False)
return api_utils.images_get_list(api, dataset_id, item_ids)
if project_type == str(sly.ProjectType.VIDEOS):
return api.video.get_info_by_id_batch(item_ids)
if project_type == str(sly.ProjectType.VOLUMES):
Expand Down

0 comments on commit 6274d5d

Please sign in to comment.