Skip to content

Commit

Permalink
merge changes in the test branch (#18)
Browse files Browse the repository at this point in the history
  • Loading branch information
almazgimaev authored May 15, 2024
1 parent f79e9d5 commit fbeadc8
Show file tree
Hide file tree
Showing 9 changed files with 110 additions and 266 deletions.
3 changes: 3 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[flake8]
max-line-length = 100
ignore = E203, E501, W503, E722, W605
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,6 @@ venv
dataset_tools
supervisely
debug
__pycache__
data/
.DS_Store
4 changes: 2 additions & 2 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"**/__pycache__": true,
"build": true,
"supervisely.egg-info": true,
".venv": true
// ".venv": true
},
"python.defaultInterpreterPath": ".venv/bin/python",
"editor.formatOnSave": true,
Expand All @@ -20,7 +20,7 @@
"editor.defaultFormatter": "ms-python.black-formatter",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true
"source.organizeImports": "explicit"
}
},
"isort.args": ["--profile", "black"],
Expand Down
57 changes: 29 additions & 28 deletions config.json
Original file line number Diff line number Diff line change
@@ -1,30 +1,31 @@
{
"name": "Export to Supervisely format",
"type": "app",
"categories": [
"images",
"export"
"name": "Export to Supervisely format",
"type": "app",
"version": "2.0.0",
"categories": [
"images",
"export"
],
"description": "images and JSON annotations",
"docker_image": "supervisely/import-export:6.73.60",
"instance_version": "6.9.4",
"main_script": "src/main.py",
"modal_template": "src/modal.html",
"modal_template_state": {
"download": "all",
"fixExtension": false
},
"task_location": "workspace_tasks",
"isolate": true,
"headless": true,
"icon": "https://i.imgur.com/1hqGMyg.png",
"icon_background": "#FFFFFF",
"context_menu": {
"target": [
"images_project",
"images_dataset"
],
"description": "images and JSON annotations",
"docker_image": "supervisely/import-export:6.72.205",
"instance_version": "6.8.48",
"main_script": "src/main.py",
"modal_template": "src/modal.html",
"modal_template_state": {
"download": "all",
"fixExtension": false
},
"task_location": "workspace_tasks",
"isolate": true,
"headless": true,
"icon": "https://i.imgur.com/1hqGMyg.png",
"icon_background": "#FFFFFF",
"context_menu": {
"target": [
"images_project",
"images_dataset"
],
"context_root": "Download as"
},
"poster": "https://user-images.githubusercontent.com/106374579/186665737-ec3da9cc-193f-43ee-85db-a6f802b2dfe4.png"
}
"context_root": "Download as"
},
"poster": "https://user-images.githubusercontent.com/106374579/186665737-ec3da9cc-193f-43ee-85db-a6f802b2dfe4.png"
}
22 changes: 22 additions & 0 deletions create_venv.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
#!/bin/bash

# learn more in documentation
# Official python docs: https://docs.python.org/3/library/venv.html
# Superviely developer portal: https://developer.supervise.ly/getting-started/installation#venv

if [ -d ".venv" ]; then
echo "VENV already exists, will be removed"
rm -rf .venv
fi

echo "VENV will be created" && \
python3 -m venv .venv && \
source .venv/bin/activate && \

echo "Install requirements..." && \
pip3 install -r dev_requirements.txt && \
echo "Requirements have been successfully installed" && \
echo "Testing imports, please wait a minute ..." && \
python3 -c "import supervisely as sly" && \
echo "Success!" && \
deactivate
2 changes: 1 addition & 1 deletion dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
supervisely==6.72.206
supervisely==6.73.60
15 changes: 4 additions & 11 deletions local.env
Original file line number Diff line number Diff line change
@@ -1,13 +1,6 @@
PYTHONUNBUFFERED=1
TASK_ID=50046

context.teamId=537
context.workspaceId=1029
modal.state.slyProjectId=32318
TEAM_ID = 448
WORKSPACE_ID = 690
PROJECT_ID = 35637
modal.state.download=all
modal.state.fixExtension=true

DEBUG_APP_DIR=debug/app_debug_data
DEBUG_CACHE_DIR=debug/app_debug_cache

LOG_LEVEL="debug"
# DATASET_ID =
183 changes: 46 additions & 137 deletions src/main.py
Original file line number Diff line number Diff line change
@@ -1,31 +1,33 @@
import os
from distutils import util

from dotenv import load_dotenv

import sly_functions as f
import supervisely as sly
from dotenv import load_dotenv
from supervisely.api.module_api import ApiField
from supervisely.app.v1.app_service import AppService
from supervisely.io.fs import get_file_ext

if sly.is_development():
load_dotenv("local.env")
load_dotenv(os.path.expanduser("~/supervisely.env"))

api = sly.Api.from_env()
my_app = AppService()

TEAM_ID = int(os.environ["context.teamId"])
WORKSPACE_ID = int(os.environ["context.workspaceId"])
PROJECT_ID = int(os.environ["modal.state.slyProjectId"])
DATASET_ID = os.environ.get("modal.state.slyDatasetId", None)
if DATASET_ID is not None:
DATASET_ID = int(DATASET_ID)
task_id = int(os.environ["TASK_ID"])
mode = os.environ["modal.state.download"]
replace_method = bool(util.strtobool(os.environ["modal.state.fixExtension"]))

# region constants
parent_dir = os.path.dirname(os.path.abspath(__file__))
data_dir = os.path.join(parent_dir, "data")
batch_size = 10
# endregion
# region envvars
team_id = sly.env.team_id()
project_id = sly.env.project_id()
dataset_id = sly.env.dataset_id(raise_not_found=False)
mode = os.environ.get("modal.state.download", "all")
replace_method = bool(util.strtobool(os.environ.get("modal.state.fixExtension", "false")))
# endregion
sly.logger.info(
f"Team: {team_id}, Project: {project_id}, Dataset: {dataset_id}, Mode: {mode}, "
f"Fix extension: {replace_method}"
)


def ours_convert_json_info(self, info: dict, skip_missing=True):
Expand Down Expand Up @@ -58,145 +60,52 @@ def ours_convert_json_info(self, info: dict, skip_missing=True):
return self.InfoType(*field_values)


def init(data, state):
state["download"] = mode
state["fixExtension"] = replace_method


if replace_method:
sly.logger.debug("change SDK method")
sly.logger.debug("Fix image extension method is enabled")
sly.api.image_api.ImageApi._convert_json_info = ours_convert_json_info


@my_app.callback("download_as_sly")
@sly.timeit
def download_as_sly(api: sly.Api, task_id, context, state, app_logger):
global TEAM_ID, PROJECT_ID, DATASET_ID, mode
project = api.project.get_info_by_id(PROJECT_ID)
if project is None:
raise Exception(f"Project with ID {PROJECT_ID} not found in your account")
if DATASET_ID is not None:
dataset_ids = [DATASET_ID]
dataset = api.dataset.get_info_by_id(DATASET_ID)
if dataset is None:
raise Exception(f"Dataset with ID {DATASET_ID} not found in your account")
def download(project: sly.Project) -> str:
"""Downloads the project and returns the path to the downloaded directory.
:param project: The project to download
:type project: sly.Project
:return: The path to the downloaded directory
:rtype: str
"""
download_dir = os.path.join(data_dir, f"{project.id}_{project.name}")
sly.fs.mkdir(download_dir, remove_content_if_exists=True)

if dataset_id is not None:
dataset_ids = [dataset_id]
else:
try:
datasets = api.dataset.get_list(project.id)
except Exception as e:
raise Exception(f"Failed to get list of datasets from project ID:{project.id}. {e}")
datasets = api.dataset.get_list(project.id, recursive=True)
dataset_ids = [dataset.id for dataset in datasets]

if mode == "all":
download_json_plus_images(api, project, dataset_ids)
save_images = True
else:
download_only_json(api, project, dataset_ids)

download_dir = os.path.join(my_app.data_dir, f"{project.id}_{project.name}")
full_archive_name = str(project.id) + "_" + project.name + ".tar"
result_archive = os.path.join(my_app.data_dir, full_archive_name)
sly.fs.archive_directory(download_dir, result_archive)
app_logger.info("Result directory is archived")
upload_progress = []
remote_archive_path = os.path.join(
sly.team_files.RECOMMENDED_EXPORT_PATH,
"export-to-supervisely-format/{}_{}".format(task_id, full_archive_name),
)

def _print_progress(monitor, upload_progress):
if len(upload_progress) == 0:
upload_progress.append(
sly.Progress(
message="Upload {!r}".format(full_archive_name),
total_cnt=monitor.len,
ext_logger=app_logger,
is_size=True,
)
)
upload_progress[0].set_current_value(monitor.bytes_read)

file_info = api.file.upload(
TEAM_ID, result_archive, remote_archive_path, lambda m: _print_progress(m, upload_progress)
)
app_logger.info("Uploaded to Team-Files: {!r}".format(file_info.path))
api.task.set_output_archive(
task_id, file_info.id, full_archive_name, file_url=file_info.storage_path
)
my_app.stop()
save_images = False

sly.logger.info(f"Starting download of project {project.name} to {download_dir}...")

def download_json_plus_images(api, project, dataset_ids):
sly.logger.info("DOWNLOAD_PROJECT", extra={"title": project.name})
download_dir = os.path.join(my_app.data_dir, f"{project.id}_{project.name}")
if os.path.exists(download_dir):
sly.fs.clean_dir(download_dir)
f.download_project(
sly.Project.download(
api,
project.id,
download_dir,
project_id,
dest_dir=download_dir,
dataset_ids=dataset_ids,
log_progress=True,
batch_size=batch_size,
save_image_meta=True,
)
sly.logger.info("Project {!r} has been successfully downloaded.".format(project.name))


def download_only_json(api, project, dataset_ids):
sly.logger.info("DOWNLOAD_PROJECT", extra={"title": project.name})
download_dir = os.path.join(my_app.data_dir, f"{project.id}_{project.name}")
sly.fs.mkdir(download_dir)
meta_json = api.project.get_meta(project.id)
sly.io.json.dump_json_file(meta_json, os.path.join(download_dir, "meta.json"))

total_images = 0
dataset_info = (
[api.dataset.get_info_by_id(ds_id) for ds_id in dataset_ids]
if (dataset_ids is not None)
else api.dataset.get_list(project.id)
)

for dataset in dataset_info:
ann_dir = os.path.join(download_dir, dataset.name, "ann")
sly.fs.mkdir(ann_dir)

images = api.image.get_list(dataset.id)
ds_progress = sly.Progress(
"Downloading annotations for: {!r}/{!r}".format(project.name, dataset.name),
total_cnt=len(images),
)
for batch in sly.batched(images, batch_size=10):
image_ids = [image_info.id for image_info in batch]
image_names = [image_info.name for image_info in batch]

# download annotations in json format
ann_infos = api.annotation.download_batch(dataset.id, image_ids)

for image_name, ann_info in zip(image_names, ann_infos):
sly.io.json.dump_json_file(
ann_info.annotation, os.path.join(ann_dir, image_name + ".json")
)
ds_progress.iters_done_report(len(batch))
total_images += len(batch)

sly.logger.info("Project {!r} has been successfully downloaded".format(project.name))
sly.logger.info("Total number of images: {!r}".format(total_images))


def main():
sly.logger.info(
"Script arguments",
extra={
"TEAM_ID": TEAM_ID,
"WORKSPACE_ID": WORKSPACE_ID,
"PROJECT_ID": PROJECT_ID,
},
save_images=save_images,
)

data = {}
state = {}
init(data, state)
my_app.run(initial_events=[{"command": "download_as_sly"}])
sly.logger.info("Project downloaded...")
return download_dir


if __name__ == "__main__":
sly.main_wrapper("main", main)
project = api.project.get_info_by_id(project_id)
download_dir = download(project)
sly.output.set_download(download_dir)
sly.logger.info("Archive uploaded and ready for download.")
Loading

0 comments on commit fbeadc8

Please sign in to comment.