Skip to content

Commit

Permalink
temp debug logs
Browse files Browse the repository at this point in the history
  • Loading branch information
NikolaiPetukhov committed Feb 28, 2024
1 parent 69a471e commit 439c681
Showing 1 changed file with 6 additions and 2 deletions.
8 changes: 6 additions & 2 deletions train/src/dataset_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,9 @@ def download_project(api: sly.Api, project_info: sly.ProjectInfo, dataset_infos:
# clean project dir
if os.path.exists(g.project_dir):
sly.fs.clean_dir(g.project_dir)

# TODO Check if to_download is empty

# download
with progress(message="Downloading input data...", total=total) as pbar:
sly.download(
Expand All @@ -66,8 +66,12 @@ def download_project(api: sly.Api, project_info: sly.ProjectInfo, dataset_infos:
dataset_name = dataset_infos_dict[dataset_id].name
dataset_dir = os.path.join(g.project_dir, project_info.name, dataset_name)
cache_dataset_dir = os.path.join(g.cache_dir, str(project_info.id), str(dataset_id))
sly.logger.info(f"Saving dataset '{dataset_name}' to cache...", extra={"dataset_id": dataset_id})

sly.logger.info(f"dataset_dir: {dataset_dir}, cache_dataset_dir: {cache_dataset_dir}", extra={"is_dataset_dir_exist": os.path.exists(dataset_dir)})
with progress(message="Saving data to cache...") as pbar:
sly.fs.copy_dir_recursively(dataset_dir, cache_dataset_dir, progress_cb=pbar.update)
sly.logger.info(f"dataset_dir: {dataset_dir}, cache_dataset_dir: {cache_dataset_dir}", extra={"is_cache_dataset_dir_exist": os.path.exists(cache_dataset_dir)})
# copy cached datasets
for dataset_id in cached:
dataset_name = dataset_infos_dict[dataset_id].name
Expand Down

0 comments on commit 439c681

Please sign in to comment.