diff --git a/dev_requirements.txt b/dev_requirements.txt index 3134f30..e99e686 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,4 +1,4 @@ -supervisely==6.73.226 +supervisely==6.73.229 openmim ffmpeg-python==0.2.0 diff --git a/docker/Dockerfile b/docker/Dockerfile index b031c59..1f77abb 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -16,7 +16,6 @@ RUN pip3 install yapf==0.40.1 # COPY dev_requirements.txt dev_requirements.txt # RUN pip3 install -r dev_requirements.txt -RUN pip3 install supervisely==6.73.226 RUN pip3 install setuptools==69.5.1 RUN pip3 install openmim @@ -24,7 +23,8 @@ RUN pip3 install ffmpeg-python==0.2.0 RUN pip3 install pyyaml==6.0 RUN mkdir -p /tmp/mmseg \ - && wget https://github.com/open-mmlab/mmsegmentation/archive/refs/tags/v0.23.0.tar.gz -P /tmp/mmseg \ - && tar -xvf /tmp/mmseg/v0.23.0.tar.gz -C /tmp/mmseg +&& wget https://github.com/open-mmlab/mmsegmentation/archive/refs/tags/v0.23.0.tar.gz -P /tmp/mmseg \ +&& tar -xvf /tmp/mmseg/v0.23.0.tar.gz -C /tmp/mmseg -LABEL python_sdk_version=6.73.226 +RUN pip3 install supervisely==6.73.229 +LABEL python_sdk_version=6.73.229 diff --git a/serve/config.json b/serve/config.json index f10cf6f..cb701e6 100644 --- a/serve/config.json +++ b/serve/config.json @@ -11,8 +11,8 @@ "serve" ], "description": "Deploy model as REST API service", - "docker_image": "supervisely/mmseg:1.3.16", - "min_instance_version": "6.12.4", + "docker_image": "supervisely/mmseg:1.3.17", + "min_instance_version": "6.12.5", "entrypoint": "python -m uvicorn main:m.app --app-dir ./serve/src --host 0.0.0.0 --port 8000 --ws websockets", "port": 8000, "task_location": "application_sessions", diff --git a/serve/dev_requirements.txt b/serve/dev_requirements.txt index e424240..a46035d 100644 --- a/serve/dev_requirements.txt +++ b/serve/dev_requirements.txt @@ -1,6 +1,6 @@ # git+https://github.com/supervisely/supervisely.git@some-test-branch -supervisely==6.73.226 +supervisely==6.73.229 openmim ffmpeg-python==0.2.0 diff --git a/train/config.json b/train/config.json index cd2302c..4f7d343 100644 --- a/train/config.json +++ b/train/config.json @@ -10,8 +10,8 @@ "train" ], "description": "Dashboard to configure, start and monitor training", - "docker_image": "supervisely/mmseg:1.3.16", - "min_instance_version": "6.12.4", + "docker_image": "supervisely/mmseg:1.3.17", + "min_instance_version": "6.12.5", "main_script": "train/src/main.py", "gui_template": "train/src/gui.html", "task_location": "workspace_tasks", diff --git a/train/dev_requirements.txt b/train/dev_requirements.txt index e424240..a46035d 100644 --- a/train/dev_requirements.txt +++ b/train/dev_requirements.txt @@ -1,6 +1,6 @@ # git+https://github.com/supervisely/supervisely.git@some-test-branch -supervisely==6.73.226 +supervisely==6.73.229 openmim ffmpeg-python==0.2.0 diff --git a/train/src/sly_project_cached.py b/train/src/sly_project_cached.py index 6ad0cdd..41a6ee8 100644 --- a/train/src/sly_project_cached.py +++ b/train/src/sly_project_cached.py @@ -4,8 +4,8 @@ from supervisely.project.download import ( download_to_cache, copy_from_cache, - is_cached, get_cache_size, + download_async ) from sly_train_progress import get_progress_cb import sly_globals as g @@ -13,15 +13,27 @@ def _no_cache_download(api: sly.Api, project_info: sly.ProjectInfo, project_dir: str, progress_index: int): total = project_info.items_count - download_progress = get_progress_cb(progress_index, "Downloading input data...", total * 2) - sly.download( - api=api, - project_id=project_info.id, - dest_dir=project_dir, - dataset_ids=None, - log_progress=True, - progress_cb=download_progress, - ) + try: + download_progress = get_progress_cb(progress_index, "Downloading input data...", total) + download_async( + api, + project_info.id, + project_dir, + progress_cb=download_progress + ) + except Exception as e: + api.logger.warning( + "Failed to download project using async download. Trying sync download..." + ) + download_progress = get_progress_cb(progress_index, "Downloading input data...", total) + sly.download( + api=api, + project_id=project_info.id, + dest_dir=project_dir, + dataset_ids=None, + log_progress=True, + progress_cb=download_progress, + ) def download_project( api: sly.Api, @@ -37,47 +49,23 @@ def download_project( return try: - # get datasets to download and cached - dataset_infos = api.dataset.get_list(project_info.id) - to_download = [info for info in dataset_infos if not is_cached(project_info.id, info.name)] - cached = [info for info in dataset_infos if is_cached(project_info.id, info.name)] - if len(cached) == 0: - log_msg = "No cached datasets found" - else: - log_msg = "Using cached datasets: " + ", ".join( - f"{ds_info.name} ({ds_info.id})" for ds_info in cached - ) - sly.logger.info(log_msg) - if len(to_download) == 0: - log_msg = "All datasets are cached. No datasets to download" - else: - log_msg = "Downloading datasets: " + ", ".join( - f"{ds_info.name} ({ds_info.id})" for ds_info in to_download - ) - sly.logger.info(log_msg) - # get images count - total = sum([ds_info.images_count for ds_info in dataset_infos]) + total = project_info.items_count # download - if total > 0: - download_progress = get_progress_cb(progress_index, "Downloading input data...", total * 2) - download_to_cache( - api=api, - project_id=project_info.id, - dataset_infos=dataset_infos, - log_progress=True, - progress_cb=download_progress, - ) + download_progress = get_progress_cb(progress_index, "Downloading input data...", total) + download_to_cache( + api=api, + project_id=project_info.id, + log_progress=True, + progress_cb=download_progress, + ) # copy datasets from cache - total = sum([get_cache_size(project_info.id, ds.name) for ds in dataset_infos]) - dataset_names = [ds_info.name for ds_info in dataset_infos] - if total > 0: - download_progress = get_progress_cb(progress_index, "Retreiving data from cache...", total, is_size=True) - copy_from_cache( - project_id=project_info.id, - dest_dir=project_dir, - dataset_names=dataset_names, - progress_cb=download_progress, - ) + total = get_cache_size(project_info.id) + download_progress = get_progress_cb(progress_index, "Retreiving data from cache...", total, is_size=True) + copy_from_cache( + project_id=project_info.id, + dest_dir=project_dir, + progress_cb=download_progress, + ) except Exception: sly.logger.warning(f"Failed to retreive project from cache. Downloading it...", exc_info=True) if os.path.exists(project_dir):