Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Workflow and upgrade SDK to v6.73.157 #6

Merged
merged 7 commits into from
Aug 15, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions config.json
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
{
"name": "Export to DOTA",
"type": "app",
"version": "2.0.0",
"headless": true,
"description": "Export images in DOTA format and prepares downloadable archive",
"categories": [
"images",
"export"
],
"docker_image": "supervisely/import-export:0.0.5",
"docker_image": "supervisely/import-export:6.73.157",
"icon": "https://user-images.githubusercontent.com/115161827/203991592-de444a0d-3d7c-4c26-b575-6fa4ac020635.jpg",
"icon_cover": true,
"poster": "https://user-images.githubusercontent.com/115161827/203993840-5a170216-d7a2-4e45-a74a-4b4c856e5b2f.jpg",
Expand All @@ -19,7 +20,6 @@
]
},
"min_agent_version": "6.7.4",
"min_instance_version": "6.5.51",
"entrypoint": "python -m uvicorn src.main:app --host 0.0.0.0 --port 8000",
"port": 8000
"min_instance_version": "6.10.0",
"main_script": "src/main.py"
}
2 changes: 1 addition & 1 deletion dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
black==22.6.0
supervisely==6.72.55
supervisely==6.73.157
3 changes: 2 additions & 1 deletion src/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def label_to_ro_bbox(label: sly.Label, project_meta: sly.ProjectMeta):
return ann_line


def upload_project_to_tf(api, project):
def upload_project_to_tf(api: sly.Api, project: sly.ProjectInfo) -> sly.api.file_api.FileInfo:
full_archive_name = f"{str(project.id)}_{project.name}.tar"
result_archive = os.path.join(STORAGE_DIR, full_archive_name)
sly.fs.archive_directory(PROJECT_DIR, result_archive)
Expand Down Expand Up @@ -118,3 +118,4 @@ def _print_progress(monitor, upload_progress):
api.task.set_output_archive(
TASK_ID, file_info.id, full_archive_name, file_url=file_info.storage_path
)
return file_info
4 changes: 1 addition & 3 deletions src/globals.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,6 @@
sly.Rectangle,
]

app = sly.Application()

STORAGE_DIR = sly.app.get_data_dir()
PROJECT_DIR = os.path.join(STORAGE_DIR, "dota")
mkdir(PROJECT_DIR, True)
mkdir(PROJECT_DIR, True)
97 changes: 52 additions & 45 deletions src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,52 +9,59 @@
get_anns_list,
upload_project_to_tf,
)
from src.globals import DATASET_ID, PROJECT_DIR, PROJECT_ID, api, app
from src.globals import DATASET_ID, PROJECT_DIR, PROJECT_ID, api
import src.workflow as w

project = api.project.get_info_by_id(id=PROJECT_ID)
project_meta = sly.ProjectMeta.from_json(data=api.project.get_meta(id=PROJECT_ID))
project_meta = convert_obj_classes_to_poly(project_meta=project_meta)
@sly.handle_exceptions(has_ui=False)
def main():
project = api.project.get_info_by_id(id=PROJECT_ID)
project_meta = sly.ProjectMeta.from_json(data=api.project.get_meta(id=PROJECT_ID))
project_meta = convert_obj_classes_to_poly(project_meta=project_meta)

if DATASET_ID is not None:
datasets = [api.dataset.get_info_by_id(id=DATASET_ID)]
else:
datasets = api.dataset.get_list(project_id=PROJECT_ID)
progress_ds = sly.Progress(message="Exporting datasets", total_cnt=len(datasets))
for dataset in datasets:
dataset_dir = os.path.join(PROJECT_DIR, dataset.name)
images_dir = os.path.join(dataset_dir, "images")
ann_dir = os.path.join(dataset_dir, "labelTxt")
mkdir(images_dir)
mkdir(ann_dir)
images_infos = api.image.get_list(dataset_id=dataset.id)
images_ids = [img_info.id for img_info in images_infos]
images_names = [img_info.name for img_info in images_infos]
images_paths = [
os.path.join(images_dir, img_info.name) for img_info in images_infos
]
anns = get_anns_list(api=api, ds_id=dataset.id, project_meta=project_meta)
anns_paths = [
os.path.join(ann_dir, f"{get_file_name(img_info.name)}.txt")
for img_info in images_infos
]
if DATASET_ID is not None:
datasets = [api.dataset.get_info_by_id(id=DATASET_ID)]
w.workflow_input(api, DATASET_ID, type="dataset")
else:
datasets = api.dataset.get_list(project_id=PROJECT_ID)
w.workflow_input(api, PROJECT_ID, type="project")
progress_ds = sly.Progress(message="Exporting datasets", total_cnt=len(datasets))
for dataset in datasets:
dataset_dir = os.path.join(PROJECT_DIR, dataset.name)
images_dir = os.path.join(dataset_dir, "images")
ann_dir = os.path.join(dataset_dir, "labelTxt")
mkdir(images_dir)
mkdir(ann_dir)
images_infos = api.image.get_list(dataset_id=dataset.id)
images_ids = [img_info.id for img_info in images_infos]
images_paths = [
os.path.join(images_dir, img_info.name) for img_info in images_infos
]
anns = get_anns_list(api=api, ds_id=dataset.id, project_meta=project_meta)
anns_paths = [
os.path.join(ann_dir, f"{get_file_name(img_info.name)}.txt")
for img_info in images_infos
]

progress_img = sly.Progress(message="Processing images", total_cnt=len(images_ids))
for batch_img_ids, batch_img_paths, batch_anns, batch_anns_paths in zip(
sly.batched(images_ids),
sly.batched(images_paths),
sly.batched(anns),
sly.batched(anns_paths),
):
convert_sly_to_dota(
anns_paths=batch_anns_paths,
anns=batch_anns,
project_meta=project_meta,
)
api.image.download_paths(
dataset_id=dataset.id, ids=batch_img_ids, paths=batch_img_paths
)
progress_img.iters_done_report(len(batch_img_ids))
progress_ds.iter_done_report()
progress_img = sly.Progress(message="Processing images", total_cnt=len(images_ids))
for batch_img_ids, batch_img_paths, batch_anns, batch_anns_paths in zip(
sly.batched(images_ids),
sly.batched(images_paths),
sly.batched(anns),
sly.batched(anns_paths),
):
convert_sly_to_dota(
anns_paths=batch_anns_paths,
anns=batch_anns,
project_meta=project_meta,
)
api.image.download_paths(
dataset_id=dataset.id, ids=batch_img_ids, paths=batch_img_paths
)
progress_img.iters_done_report(len(batch_img_ids))
progress_ds.iter_done_report()

upload_project_to_tf(api, project)
app.shutdown()
file_info = upload_project_to_tf(api, project)
w.workflow_output(api, file_info)

if __name__ == "__main__":
sly.main_wrapper("main", main)
14 changes: 0 additions & 14 deletions src/modal.html

This file was deleted.

30 changes: 30 additions & 0 deletions src/workflow.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# This module contains the functions that are used to configure the input and output of the workflow for the current app.

import supervisely as sly
from typing import Union, Literal

def workflow_input(api: sly.Api, id: Union[int, str], type: Literal["project", "dataset"]):
if type == "project":
api.app.workflow.add_input_project(int(id))
sly.logger.debug(f"Workflow: Input project - {id}")
elif type == "dataset":
api.app.workflow.add_input_dataset(int(id))
sly.logger.debug(f"Workflow: Input dataset - {id}")

def workflow_output(api: sly.Api, file: Union[int, sly.api.file_api.FileInfo]):
try:
if isinstance(file, int):
file = api.file.get_info_by_id(file)
relation_settings = sly.WorkflowSettings(
title=file.name,
icon="archive",
icon_color="#33c94c",
icon_bg_color="#d9f7e4",
url=f"/files/{file.id}/true/?teamId={file.team_id}",
url_title="Download",
)
meta = sly.WorkflowMeta(relation_settings=relation_settings)
api.app.workflow.add_output_file(file, meta=meta)
sly.logger.debug(f"Workflow: Output file - {file}")
except Exception as e:
sly.logger.debug(f"Failed to add output to the workflow: {repr(e)}")