From 982a3e485dd65231f8cacd296844a7d0bc9ff556 Mon Sep 17 00:00:00 2001 From: aimspot Date: Tue, 25 Jun 2024 15:40:02 +0300 Subject: [PATCH] update readme and tests --- README.md | 35 +- examples/example_ODRS.ipynb | 1062 +++++++++++++++++++++++++++++++ src/DL/config/train_config.yaml | 13 +- src/ML/config/ml_config.yaml | 10 +- 4 files changed, 1094 insertions(+), 26 deletions(-) create mode 100644 examples/example_ODRS.ipynb diff --git a/README.md b/README.md index a66324c..9ee322b 100755 --- a/README.md +++ b/README.md @@ -1,7 +1,6 @@ # ODRS -[![PythonVersion](https://img.shields.io/badge/python-3.8%20%7C%203.9%20%7C%203.10-blue)](https://pypi.org/project/scikit-learn/) - +[![PythonVersion](https://img.shields.io/badge/python-3.8-blue)](https://pypi.org/project/scikit-learn/) [![Documentation Status](https://readthedocs.org/projects/odrs-test/badge/?version=latest)](https://odrs-test.readthedocs.io/en/latest/?badge=latest) [![wiki](https://img.shields.io/badge/wiki-latest-blue)](http://www.wiki.odrs.space)
@@ -152,7 +151,10 @@ After you have placed your dataset in the folder ***user_datasets/*** and create ``` ## Detectors Training -1. Go to the directory containing ***custom_config.yaml*** in which the training parameters are specified. +1. Go to the directory containing ***train_config.yaml*** in which the training parameters are specified. + ```markdown + cd ODRS/src/DL/config + ``` 2. Setting up training parameters: ```markdown # Name *.txt file with names classes @@ -172,7 +174,8 @@ After you have placed your dataset in the folder ***user_datasets/*** and create # "yolov7x", "yolov7", "yolov7-tiny", #"yolov8x6", "yolov8x", # "yolov8s", "yolov8n", "yolov8m", "faster-rcnn", "ssd"] - # **NOTE**: For successful training of the ssd model, the size of your images should not exceed 512x512 + # **NOTE**: For successful training of the ssd model, + # the size of your images should not exceed 512x512 MODEL: ssd @@ -190,11 +193,11 @@ After you have placed your dataset in the folder ***user_datasets/*** and create SPLIT_VAL_VALUE: 0.35 ``` 3. Starting training: -**NOTE**: If, for example, you specified in ***custom_config.yaml***, the path to the yolov5 model, and you want to start yolov8, training will not start. +**NOTE**: If, for example, you specified in ***train_config.yaml***, the path to the yolov5 model, and you want to start yolov8, training will not start. ```markdown - cd ODRS/ODRS/train_utils/train_model - python custom_train_all.py + cd .. + python train_detectors.py ``` 4. After the training, you will see in the root directory ***ODRS*** a new directory ***runs***, all the results of experiments will be saved in it. For convenience, the result of each experiment is saved in a separate folder in the following form: ```markdown @@ -207,16 +210,14 @@ After you have placed your dataset in the folder ***user_datasets/*** and create To use the project in your code, you can use the built-in Api. You can see full examples of using the API here: [Example API](https://github.com/saaresearch/ODRS/blob/master/examples/api_example.ipynb). 1. Initializing a task: ```python -from ODRS.ODRS.api.ODRS import ODRS +from ODRS.src.api.ODRS import ODRS #init object with parameters -odrs = ODRS(job="object_detection", data_path = 'full_data_path', classes = "classes.txt", - img_size = "512", batch_size = "25", epochs = "300", - model = 'yolov8x6', gpu_count = 1, select_gpu = "0", config_path = "dataset.yaml", - split_train_value = 0.6, split_val_value = 0.35) +odrs = ODRS(job="object_detection", data_path='full_data_path', classes="classes.txt", img_size = 300, + batch_size = 20, epochs = 1, model = 'yolov8n', split_train_value = 0.85, split_val_value = 0.1, + gpu_count = 1, select_gpu = 0) ``` 2. Starting training: ```python -from ODRS.ODRS.api.ODRS import ODRS odrs.fit() ``` 3. Getting results: @@ -236,7 +237,11 @@ This project is actively used in testing new models and datasets in Insystem for
-## Contacts -- [Telegram](https://t.me/dedinside4ever) +## Contact us +
+ + Telegram + +
diff --git a/examples/example_ODRS.ipynb b/examples/example_ODRS.ipynb new file mode 100644 index 0000000..67f5787 --- /dev/null +++ b/examples/example_ODRS.ipynb @@ -0,0 +1,1062 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "wH89jCAK-yP2" + }, + "source": [ + "# Cloning the repository" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "GKBxLmBuec7z", + "outputId": "efa5800b-5ae6-4ea1-8ded-b391c54654e7" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Cloning into 'ODRS'...\n", + "remote: Enumerating objects: 2812, done.\u001b[K\n", + "remote: Counting objects: 100% (907/907), done.\u001b[K\n", + "remote: Compressing objects: 100% (479/479), done.\u001b[K\n", + "remote: Total 2812 (delta 382), reused 818 (delta 355), pack-reused 1905\u001b[K\n", + "Receiving objects: 100% (2812/2812), 198.32 MiB | 36.01 MiB/s, done.\n", + "Resolving deltas: 100% (1251/1251), done.\n", + "/content/ODRS\n" + ] + } + ], + "source": [ + "!git clone -b develop https://github.com/saaresearch/ODRS.git\n", + "%cd ODRS/" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "cs0fBMiam8r-" + }, + "source": [ + "# Installing dependencies" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "id": "Tb3p1UsFfHYb", + "outputId": "0112e205-eb65-4513-e67d-84b27f57b723" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Collecting Pillow==9.5.0 (from -r requirements.txt (line 1))\n", + " Downloading Pillow-9.5.0-cp310-cp310-manylinux_2_28_x86_64.whl (3.4 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.4/3.4 MB\u001b[0m \u001b[31m31.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting pandas==1.4.2 (from -r requirements.txt (line 2))\n", + " Downloading pandas-1.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (11.7 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m11.7/11.7 MB\u001b[0m \u001b[31m81.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting GitPython==3.1.32 (from -r requirements.txt (line 3))\n", + " Downloading GitPython-3.1.32-py3-none-any.whl (188 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m188.5/188.5 kB\u001b[0m \u001b[31m27.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting matplotlib==3.7.0 (from -r requirements.txt (line 4))\n", + " Downloading matplotlib-3.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (11.6 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m11.6/11.6 MB\u001b[0m \u001b[31m71.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting numpy==1.23.5 (from -r requirements.txt (line 5))\n", + " Downloading numpy-1.23.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (17.1 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m17.1/17.1 MB\u001b[0m \u001b[31m27.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting opencv-python==4.7.0.72 (from -r requirements.txt (line 6))\n", + " Downloading opencv_python-4.7.0.72-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (61.8 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m61.8/61.8 MB\u001b[0m \u001b[31m9.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting psutil==5.9.4 (from -r requirements.txt (line 7))\n", + " Downloading psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (280 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m280.2/280.2 kB\u001b[0m \u001b[31m2.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting PyYAML==6.0 (from -r requirements.txt (line 8))\n", + " Downloading PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (682 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m682.2/682.2 kB\u001b[0m \u001b[31m33.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting requests==2.28.2 (from -r requirements.txt (line 9))\n", + " Downloading requests-2.28.2-py3-none-any.whl (62 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m62.8/62.8 kB\u001b[0m \u001b[31m8.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting scipy==1.9.1 (from -r requirements.txt (line 10))\n", + " Downloading scipy-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (43.9 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m43.9/43.9 MB\u001b[0m \u001b[31m10.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting thop==0.1.1.post2209072238 (from -r requirements.txt (line 11))\n", + " Downloading thop-0.1.1.post2209072238-py3-none-any.whl (15 kB)\n", + "Collecting torch==1.13.1 (from -r requirements.txt (line 12))\n", + " Downloading torch-1.13.1-cp310-cp310-manylinux1_x86_64.whl (887.5 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m887.5/887.5 MB\u001b[0m \u001b[31m1.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting torchvision==0.14.1 (from -r requirements.txt (line 13))\n", + " Downloading torchvision-0.14.1-cp310-cp310-manylinux1_x86_64.whl (24.2 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m24.2/24.2 MB\u001b[0m \u001b[31m67.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting tqdm==4.64.1 (from -r requirements.txt (line 14))\n", + " Downloading tqdm-4.64.1-py2.py3-none-any.whl (78 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m78.5/78.5 kB\u001b[0m \u001b[31m11.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting ultralytics==8.0.149 (from -r requirements.txt (line 15))\n", + " Downloading ultralytics-8.0.149-py3-none-any.whl (614 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m614.4/614.4 kB\u001b[0m \u001b[31m61.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: scikit-learn==1.2.2 in /usr/local/lib/python3.10/dist-packages (from -r requirements.txt (line 16)) (1.2.2)\n", + "Collecting loguru==0.6.0 (from -r requirements.txt (line 17))\n", + " Downloading loguru-0.6.0-py3-none-any.whl (58 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 kB\u001b[0m \u001b[31m9.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting tensorboard==2.11.2 (from -r requirements.txt (line 18))\n", + " Downloading tensorboard-2.11.2-py3-none-any.whl (6.0 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.0/6.0 MB\u001b[0m \u001b[31m107.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting wandb==0.15.8 (from -r requirements.txt (line 19))\n", + " Downloading wandb-0.15.8-py3-none-any.whl (2.1 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.1/2.1 MB\u001b[0m \u001b[31m95.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting pycocotools==2.0.6 (from -r requirements.txt (line 20))\n", + " Downloading pycocotools-2.0.6.tar.gz (24 kB)\n", + " Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n", + " Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n", + " Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n", + "Requirement already satisfied: albumentations==1.3.1 in /usr/local/lib/python3.10/dist-packages (from -r requirements.txt (line 21)) (1.3.1)\n", + "Collecting vision-transformers==0.1.1.0 (from -r requirements.txt (line 22))\n", + " Downloading vision_transformers-0.1.1.0.tar.gz (38 kB)\n", + " Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + "Collecting torchinfo==1.8.0 (from -r requirements.txt (line 23))\n", + " Downloading torchinfo-1.8.0-py3-none-any.whl (23 kB)\n", + "Collecting catboost (from -r requirements.txt (line 24))\n", + " Downloading catboost-1.2.5-cp310-cp310-manylinux2014_x86_64.whl (98.2 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m98.2/98.2 MB\u001b[0m \u001b[31m9.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting umap-learn (from -r requirements.txt (line 25))\n", + " Downloading umap_learn-0.5.6-py3-none-any.whl (85 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m85.7/85.7 kB\u001b[0m \u001b[31m11.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting yacs (from -r requirements.txt (line 26))\n", + " Downloading yacs-0.1.8-py3-none-any.whl (14 kB)\n", + "Collecting wget (from -r requirements.txt (line 27))\n", + " Downloading wget-3.2.zip (10 kB)\n", + " Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + "Requirement already satisfied: python-dateutil>=2.8.1 in /usr/local/lib/python3.10/dist-packages (from pandas==1.4.2->-r requirements.txt (line 2)) (2.8.2)\n", + "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas==1.4.2->-r requirements.txt (line 2)) (2023.4)\n", + "Collecting gitdb<5,>=4.0.1 (from GitPython==3.1.32->-r requirements.txt (line 3))\n", + " Downloading gitdb-4.0.11-py3-none-any.whl (62 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m62.7/62.7 kB\u001b[0m \u001b[31m7.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: contourpy>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib==3.7.0->-r requirements.txt (line 4)) (1.2.1)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.10/dist-packages (from matplotlib==3.7.0->-r requirements.txt (line 4)) (0.12.1)\n", + "Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib==3.7.0->-r requirements.txt (line 4)) (4.53.0)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib==3.7.0->-r requirements.txt (line 4)) (1.4.5)\n", + "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib==3.7.0->-r requirements.txt (line 4)) (24.1)\n", + "Requirement already satisfied: pyparsing>=2.3.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib==3.7.0->-r requirements.txt (line 4)) (3.1.2)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests==2.28.2->-r requirements.txt (line 9)) (3.3.2)\n", + "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests==2.28.2->-r requirements.txt (line 9)) (3.7)\n", + "Collecting urllib3<1.27,>=1.21.1 (from requests==2.28.2->-r requirements.txt (line 9))\n", + " Downloading urllib3-1.26.19-py2.py3-none-any.whl (143 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m143.9/143.9 kB\u001b[0m \u001b[31m21.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests==2.28.2->-r requirements.txt (line 9)) (2024.6.2)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.10/dist-packages (from torch==1.13.1->-r requirements.txt (line 12)) (4.12.2)\n", + "Collecting nvidia-cuda-runtime-cu11==11.7.99 (from torch==1.13.1->-r requirements.txt (line 12))\n", + " Downloading nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl (849 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m849.3/849.3 kB\u001b[0m \u001b[31m65.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting nvidia-cudnn-cu11==8.5.0.96 (from torch==1.13.1->-r requirements.txt (line 12))\n", + " Downloading nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl (557.1 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m557.1/557.1 MB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting nvidia-cublas-cu11==11.10.3.66 (from torch==1.13.1->-r requirements.txt (line 12))\n", + " Downloading nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl (317.1 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m317.1/317.1 MB\u001b[0m \u001b[31m2.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting nvidia-cuda-nvrtc-cu11==11.7.99 (from torch==1.13.1->-r requirements.txt (line 12))\n", + " Downloading nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl (21.0 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m21.0/21.0 MB\u001b[0m \u001b[31m71.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: seaborn>=0.11.0 in /usr/local/lib/python3.10/dist-packages (from ultralytics==8.0.149->-r requirements.txt (line 15)) (0.13.1)\n", + "Requirement already satisfied: py-cpuinfo in /usr/local/lib/python3.10/dist-packages (from ultralytics==8.0.149->-r requirements.txt (line 15)) (9.0.0)\n", + "Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn==1.2.2->-r requirements.txt (line 16)) (1.4.2)\n", + "Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn==1.2.2->-r requirements.txt (line 16)) (3.5.0)\n", + "Requirement already satisfied: absl-py>=0.4 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.11.2->-r requirements.txt (line 18)) (1.4.0)\n", + "Requirement already satisfied: grpcio>=1.24.3 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.11.2->-r requirements.txt (line 18)) (1.64.1)\n", + "Requirement already satisfied: google-auth<3,>=1.6.3 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.11.2->-r requirements.txt (line 18)) (2.27.0)\n", + "Collecting google-auth-oauthlib<0.5,>=0.4.1 (from tensorboard==2.11.2->-r requirements.txt (line 18))\n", + " Downloading google_auth_oauthlib-0.4.6-py2.py3-none-any.whl (18 kB)\n", + "Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.11.2->-r requirements.txt (line 18)) (3.6)\n", + "Requirement already satisfied: protobuf<4,>=3.9.2 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.11.2->-r requirements.txt (line 18)) (3.20.3)\n", + "Requirement already satisfied: setuptools>=41.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.11.2->-r requirements.txt (line 18)) (67.7.2)\n", + "Collecting tensorboard-data-server<0.7.0,>=0.6.0 (from tensorboard==2.11.2->-r requirements.txt (line 18))\n", + " Downloading tensorboard_data_server-0.6.1-py3-none-manylinux2010_x86_64.whl (4.9 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.9/4.9 MB\u001b[0m \u001b[31m104.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting tensorboard-plugin-wit>=1.6.0 (from tensorboard==2.11.2->-r requirements.txt (line 18))\n", + " Downloading tensorboard_plugin_wit-1.8.1-py3-none-any.whl (781 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m781.3/781.3 kB\u001b[0m \u001b[31m62.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: werkzeug>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.11.2->-r requirements.txt (line 18)) (3.0.3)\n", + "Requirement already satisfied: wheel>=0.26 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.11.2->-r requirements.txt (line 18)) (0.43.0)\n", + "Requirement already satisfied: Click!=8.0.0,>=7.1 in /usr/local/lib/python3.10/dist-packages (from wandb==0.15.8->-r requirements.txt (line 19)) (8.1.7)\n", + "Collecting sentry-sdk>=1.0.0 (from wandb==0.15.8->-r requirements.txt (line 19))\n", + " Downloading sentry_sdk-2.6.0-py2.py3-none-any.whl (296 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m296.1/296.1 kB\u001b[0m \u001b[31m34.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting docker-pycreds>=0.4.0 (from wandb==0.15.8->-r requirements.txt (line 19))\n", + " Downloading docker_pycreds-0.4.0-py2.py3-none-any.whl (9.0 kB)\n", + "Collecting pathtools (from wandb==0.15.8->-r requirements.txt (line 19))\n", + " Downloading pathtools-0.1.2.tar.gz (11 kB)\n", + " Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + "Collecting setproctitle (from wandb==0.15.8->-r requirements.txt (line 19))\n", + " Downloading setproctitle-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (30 kB)\n", + "Collecting appdirs>=1.4.3 (from wandb==0.15.8->-r requirements.txt (line 19))\n", + " Downloading appdirs-1.4.4-py2.py3-none-any.whl (9.6 kB)\n", + "Requirement already satisfied: scikit-image>=0.16.1 in /usr/local/lib/python3.10/dist-packages (from albumentations==1.3.1->-r requirements.txt (line 21)) (0.19.3)\n", + "Requirement already satisfied: qudida>=0.0.4 in /usr/local/lib/python3.10/dist-packages (from albumentations==1.3.1->-r requirements.txt (line 21)) (0.0.4)\n", + "Requirement already satisfied: opencv-python-headless>=4.1.1 in /usr/local/lib/python3.10/dist-packages (from albumentations==1.3.1->-r requirements.txt (line 21)) (4.10.0.84)\n", + "Requirement already satisfied: graphviz in /usr/local/lib/python3.10/dist-packages (from catboost->-r requirements.txt (line 24)) (0.20.3)\n", + "Requirement already satisfied: plotly in /usr/local/lib/python3.10/dist-packages (from catboost->-r requirements.txt (line 24)) (5.15.0)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.10/dist-packages (from catboost->-r requirements.txt (line 24)) (1.16.0)\n", + "Requirement already satisfied: numba>=0.51.2 in /usr/local/lib/python3.10/dist-packages (from umap-learn->-r requirements.txt (line 25)) (0.58.1)\n", + "Collecting pynndescent>=0.5 (from umap-learn->-r requirements.txt (line 25))\n", + " Downloading pynndescent-0.5.13-py3-none-any.whl (56 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m56.9/56.9 kB\u001b[0m \u001b[31m7.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting smmap<6,>=3.0.1 (from gitdb<5,>=4.0.1->GitPython==3.1.32->-r requirements.txt (line 3))\n", + " Downloading smmap-5.0.1-py3-none-any.whl (24 kB)\n", + "Requirement already satisfied: cachetools<6.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard==2.11.2->-r requirements.txt (line 18)) (5.3.3)\n", + "Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard==2.11.2->-r requirements.txt (line 18)) (0.4.0)\n", + "Requirement already satisfied: rsa<5,>=3.1.4 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard==2.11.2->-r requirements.txt (line 18)) (4.9)\n", + "Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard==2.11.2->-r requirements.txt (line 18)) (1.3.1)\n", + "Requirement already satisfied: llvmlite<0.42,>=0.41.0dev0 in /usr/local/lib/python3.10/dist-packages (from numba>=0.51.2->umap-learn->-r requirements.txt (line 25)) (0.41.1)\n", + "Requirement already satisfied: networkx>=2.2 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.16.1->albumentations==1.3.1->-r requirements.txt (line 21)) (3.3)\n", + "Requirement already satisfied: imageio>=2.4.1 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.16.1->albumentations==1.3.1->-r requirements.txt (line 21)) (2.31.6)\n", + "Requirement already satisfied: tifffile>=2019.7.26 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.16.1->albumentations==1.3.1->-r requirements.txt (line 21)) (2024.6.18)\n", + "Requirement already satisfied: PyWavelets>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.16.1->albumentations==1.3.1->-r requirements.txt (line 21)) (1.6.0)\n", + "Requirement already satisfied: MarkupSafe>=2.1.1 in /usr/local/lib/python3.10/dist-packages (from werkzeug>=1.0.1->tensorboard==2.11.2->-r requirements.txt (line 18)) (2.1.5)\n", + "Requirement already satisfied: tenacity>=6.2.0 in /usr/local/lib/python3.10/dist-packages (from plotly->catboost->-r requirements.txt (line 24)) (8.4.1)\n", + "Requirement already satisfied: pyasn1<0.7.0,>=0.4.6 in /usr/local/lib/python3.10/dist-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard==2.11.2->-r requirements.txt (line 18)) (0.6.0)\n", + "Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.10/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard==2.11.2->-r requirements.txt (line 18)) (3.2.2)\n", + "Building wheels for collected packages: pycocotools, vision-transformers, wget, pathtools\n", + " Building wheel for pycocotools (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for pycocotools: filename=pycocotools-2.0.6-cp310-cp310-linux_x86_64.whl size=377170 sha256=2fbcc0899918976cc8414c1da06d086f5242c07baf9cf1db84e9bd4dbeeaeb07\n", + " Stored in directory: /root/.cache/pip/wheels/58/e6/f9/f87c8f8be098b51b616871315318329cae12cdb618f4caac93\n", + " Building wheel for vision-transformers (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for vision-transformers: filename=vision_transformers-0.1.1.0-py3-none-any.whl size=48412 sha256=05e052df2250029f1b1e1ae2f7d89777e7120f309321ed1032699ead7ea8b385\n", + " Stored in directory: /root/.cache/pip/wheels/02/f4/94/0a5c8d2a4fcb6aa4c590906ffd3d52dc8edbe94262ecaa7dae\n", + " Building wheel for wget (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for wget: filename=wget-3.2-py3-none-any.whl size=9656 sha256=a264244a71c22327ba3136c7f723cfaa5052bb48074363470d2b45f3df3834d3\n", + " Stored in directory: /root/.cache/pip/wheels/8b/f1/7f/5c94f0a7a505ca1c81cd1d9208ae2064675d97582078e6c769\n", + " Building wheel for pathtools (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for pathtools: filename=pathtools-0.1.2-py3-none-any.whl size=8791 sha256=54ec10407443c31a038c0a14ed204da50ca2c5941051adb65e121b8dd3b07eb7\n", + " Stored in directory: /root/.cache/pip/wheels/e7/f3/22/152153d6eb222ee7a56ff8617d80ee5207207a8c00a7aab794\n", + "Successfully built pycocotools vision-transformers wget pathtools\n", + "Installing collected packages: wget, tensorboard-plugin-wit, pathtools, appdirs, urllib3, tqdm, torchinfo, tensorboard-data-server, smmap, setproctitle, PyYAML, psutil, Pillow, nvidia-cuda-runtime-cu11, nvidia-cuda-nvrtc-cu11, nvidia-cublas-cu11, numpy, loguru, docker-pycreds, yacs, sentry-sdk, scipy, requests, pandas, opencv-python, nvidia-cudnn-cu11, gitdb, torch, matplotlib, GitPython, wandb, torchvision, thop, pynndescent, pycocotools, google-auth-oauthlib, catboost, vision-transformers, umap-learn, ultralytics, tensorboard\n", + " Attempting uninstall: urllib3\n", + " Found existing installation: urllib3 2.0.7\n", + " Uninstalling urllib3-2.0.7:\n", + " Successfully uninstalled urllib3-2.0.7\n", + " Attempting uninstall: tqdm\n", + " Found existing installation: tqdm 4.66.4\n", + " Uninstalling tqdm-4.66.4:\n", + " Successfully uninstalled tqdm-4.66.4\n", + " Attempting uninstall: tensorboard-data-server\n", + " Found existing installation: tensorboard-data-server 0.7.2\n", + " Uninstalling tensorboard-data-server-0.7.2:\n", + " Successfully uninstalled tensorboard-data-server-0.7.2\n", + " Attempting uninstall: PyYAML\n", + " Found existing installation: PyYAML 6.0.1\n", + " Uninstalling PyYAML-6.0.1:\n", + " Successfully uninstalled PyYAML-6.0.1\n", + " Attempting uninstall: psutil\n", + " Found existing installation: psutil 5.9.5\n", + " Uninstalling psutil-5.9.5:\n", + " Successfully uninstalled psutil-5.9.5\n", + " Attempting uninstall: Pillow\n", + " Found existing installation: Pillow 9.4.0\n", + " Uninstalling Pillow-9.4.0:\n", + " Successfully uninstalled Pillow-9.4.0\n", + " Attempting uninstall: numpy\n", + " Found existing installation: numpy 1.25.2\n", + " Uninstalling numpy-1.25.2:\n", + " Successfully uninstalled numpy-1.25.2\n", + " Attempting uninstall: scipy\n", + " Found existing installation: scipy 1.11.4\n", + " Uninstalling scipy-1.11.4:\n", + " Successfully uninstalled scipy-1.11.4\n", + " Attempting uninstall: requests\n", + " Found existing installation: requests 2.31.0\n", + " Uninstalling requests-2.31.0:\n", + " Successfully uninstalled requests-2.31.0\n", + " Attempting uninstall: pandas\n", + " Found existing installation: pandas 2.0.3\n", + " Uninstalling pandas-2.0.3:\n", + " Successfully uninstalled pandas-2.0.3\n", + " Attempting uninstall: opencv-python\n", + " Found existing installation: opencv-python 4.8.0.76\n", + " Uninstalling opencv-python-4.8.0.76:\n", + " Successfully uninstalled opencv-python-4.8.0.76\n", + " Attempting uninstall: torch\n", + " Found existing installation: torch 2.3.0+cu121\n", + " Uninstalling torch-2.3.0+cu121:\n", + " Successfully uninstalled torch-2.3.0+cu121\n", + " Attempting uninstall: matplotlib\n", + " Found existing installation: matplotlib 3.7.1\n", + " Uninstalling matplotlib-3.7.1:\n", + " Successfully uninstalled matplotlib-3.7.1\n", + " Attempting uninstall: torchvision\n", + " Found existing installation: torchvision 0.18.0+cu121\n", + " Uninstalling torchvision-0.18.0+cu121:\n", + " Successfully uninstalled torchvision-0.18.0+cu121\n", + " Attempting uninstall: pycocotools\n", + " Found existing installation: pycocotools 2.0.8\n", + " Uninstalling pycocotools-2.0.8:\n", + " Successfully uninstalled pycocotools-2.0.8\n", + " Attempting uninstall: google-auth-oauthlib\n", + " Found existing installation: google-auth-oauthlib 1.2.0\n", + " Uninstalling google-auth-oauthlib-1.2.0:\n", + " Successfully uninstalled google-auth-oauthlib-1.2.0\n", + " Attempting uninstall: tensorboard\n", + " Found existing installation: tensorboard 2.15.2\n", + " Uninstalling tensorboard-2.15.2:\n", + " Successfully uninstalled tensorboard-2.15.2\n", + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "bigframes 1.9.0 requires matplotlib>=3.7.1, but you have matplotlib 3.7.0 which is incompatible.\n", + "bigframes 1.9.0 requires pandas>=1.5.0, but you have pandas 1.4.2 which is incompatible.\n", + "chex 0.1.86 requires numpy>=1.24.1, but you have numpy 1.23.5 which is incompatible.\n", + "cudf-cu12 24.4.1 requires pandas<2.2.2dev0,>=2.0, but you have pandas 1.4.2 which is incompatible.\n", + "google-colab 1.0.0 requires pandas==2.0.3, but you have pandas 1.4.2 which is incompatible.\n", + "google-colab 1.0.0 requires requests==2.31.0, but you have requests 2.28.2 which is incompatible.\n", + "pandas-gbq 0.19.2 requires google-auth-oauthlib>=0.7.0, but you have google-auth-oauthlib 0.4.6 which is incompatible.\n", + "pandas-stubs 2.0.3.230814 requires numpy>=1.25.0; python_version >= \"3.9\", but you have numpy 1.23.5 which is incompatible.\n", + "plotnine 0.12.4 requires pandas>=1.5.0, but you have pandas 1.4.2 which is incompatible.\n", + "tensorflow 2.15.0 requires tensorboard<2.16,>=2.15, but you have tensorboard 2.11.2 which is incompatible.\n", + "torchaudio 2.3.0+cu121 requires torch==2.3.0, but you have torch 1.13.1 which is incompatible.\n", + "torchtext 0.18.0 requires torch>=2.3.0, but you have torch 1.13.1 which is incompatible.\n", + "yfinance 0.2.40 requires requests>=2.31, but you have requests 2.28.2 which is incompatible.\u001b[0m\u001b[31m\n", + "\u001b[0mSuccessfully installed GitPython-3.1.32 Pillow-9.5.0 PyYAML-6.0 appdirs-1.4.4 catboost-1.2.5 docker-pycreds-0.4.0 gitdb-4.0.11 google-auth-oauthlib-0.4.6 loguru-0.6.0 matplotlib-3.7.0 numpy-1.23.5 nvidia-cublas-cu11-11.10.3.66 nvidia-cuda-nvrtc-cu11-11.7.99 nvidia-cuda-runtime-cu11-11.7.99 nvidia-cudnn-cu11-8.5.0.96 opencv-python-4.7.0.72 pandas-1.4.2 pathtools-0.1.2 psutil-5.9.4 pycocotools-2.0.6 pynndescent-0.5.13 requests-2.28.2 scipy-1.9.1 sentry-sdk-2.6.0 setproctitle-1.3.3 smmap-5.0.1 tensorboard-2.11.2 tensorboard-data-server-0.6.1 tensorboard-plugin-wit-1.8.1 thop-0.1.1.post2209072238 torch-1.13.1 torchinfo-1.8.0 torchvision-0.14.1 tqdm-4.64.1 ultralytics-8.0.149 umap-learn-0.5.6 urllib3-1.26.19 vision-transformers-0.1.1.0 wandb-0.15.8 wget-3.2 yacs-0.1.8\n" + ] + }, + { + "output_type": "display_data", + "data": { + "application/vnd.colab-display-data+json": { + "pip_warning": { + "packages": [ + "PIL", + "matplotlib", + "mpl_toolkits", + "numpy", + "psutil" + ] + }, + "id": "8186175209264b48affa214e6e067702" + } + }, + "metadata": {} + } + ], + "source": [ + "!pip install -r requirements.txt" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "Hgx6nQzrfNpo", + "outputId": "a9cad5d0-ae37-424d-e6ce-faab7e03ee3a" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "/content\n" + ] + } + ], + "source": [ + "%cd ..\n", + "# %cd /content/" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Mce4luDenCXW" + }, + "source": [ + "# Download dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "yReAG1OUnDYT" + }, + "source": [ + "[Link to data and code on Kaggle](https://www.kaggle.com/datasets/parohod/warp-waste-recycling-plant-dataset?select=Warp-D)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "mRvnbmwOfjvA", + "outputId": "9d2a6c07-286a-4360-9bea-67f4be6206c2" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Cloning into 'WaRP'...\n", + "remote: Enumerating objects: 16721, done.\u001b[K\n", + "remote: Counting objects: 2% (1/45)\u001b[K\rremote: Counting objects: 4% (2/45)\u001b[K\rremote: Counting objects: 6% (3/45)\u001b[K\rremote: Counting objects: 8% (4/45)\u001b[K\rremote: Counting objects: 11% (5/45)\u001b[K\rremote: Counting objects: 13% (6/45)\u001b[K\rremote: Counting objects: 15% (7/45)\u001b[K\rremote: Counting objects: 17% (8/45)\u001b[K\rremote: Counting objects: 20% (9/45)\u001b[K\rremote: Counting objects: 22% (10/45)\u001b[K\rremote: Counting objects: 24% (11/45)\u001b[K\rremote: Counting objects: 26% (12/45)\u001b[K\rremote: Counting objects: 28% (13/45)\u001b[K\rremote: Counting objects: 31% (14/45)\u001b[K\rremote: Counting objects: 33% (15/45)\u001b[K\rremote: Counting objects: 35% (16/45)\u001b[K\rremote: Counting objects: 37% (17/45)\u001b[K\rremote: Counting objects: 40% (18/45)\u001b[K\rremote: Counting objects: 42% (19/45)\u001b[K\rremote: Counting objects: 44% (20/45)\u001b[K\rremote: Counting objects: 46% (21/45)\u001b[K\rremote: Counting objects: 48% (22/45)\u001b[K\rremote: Counting objects: 51% (23/45)\u001b[K\rremote: Counting objects: 53% (24/45)\u001b[K\rremote: Counting objects: 55% (25/45)\u001b[K\rremote: Counting objects: 57% (26/45)\u001b[K\rremote: Counting objects: 60% (27/45)\u001b[K\rremote: Counting objects: 62% (28/45)\u001b[K\rremote: Counting objects: 64% (29/45)\u001b[K\rremote: Counting objects: 66% (30/45)\u001b[K\rremote: Counting objects: 68% (31/45)\u001b[K\rremote: Counting objects: 71% (32/45)\u001b[K\rremote: Counting objects: 73% (33/45)\u001b[K\rremote: Counting objects: 75% (34/45)\u001b[K\rremote: Counting objects: 77% (35/45)\u001b[K\rremote: Counting objects: 80% (36/45)\u001b[K\rremote: Counting objects: 82% (37/45)\u001b[K\rremote: Counting objects: 84% (38/45)\u001b[K\rremote: Counting objects: 86% (39/45)\u001b[K\rremote: Counting objects: 88% (40/45)\u001b[K\rremote: Counting objects: 91% (41/45)\u001b[K\rremote: Counting objects: 93% (42/45)\u001b[K\rremote: Counting objects: 95% (43/45)\u001b[K\rremote: Counting objects: 97% (44/45)\u001b[K\rremote: Counting objects: 100% (45/45)\u001b[K\rremote: Counting objects: 100% (45/45), done.\u001b[K\n", + "remote: Compressing objects: 100% (45/45), done.\u001b[K\n", + "remote: Total 16721 (delta 28), reused 0 (delta 0), pack-reused 16676\u001b[K\n", + "Receiving objects: 100% (16721/16721), 794.77 MiB | 43.73 MiB/s, done.\n", + "Resolving deltas: 100% (110/110), done.\n", + "Updating files: 100% (16898/16898), done.\n" + ] + } + ], + "source": [ + "!git clone https://github.com/AIRI-Institute/WaRP" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BejVnLT7nJ0-" + }, + "source": [ + "## Image Example" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "QyNYYnRb-yP8" + }, + "source": [ + "![WaRP-Categories.png](attachment:WaRP-Categories.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "3Esy_ert-yP8" + }, + "source": [ + "## Structure" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "CmBW9iRq-yP8" + }, + "source": [ + "![WaRP-Dataset.png](attachment:WaRP-Dataset.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "DKx9HHco-yP8" + }, + "source": [ + "To use the recommendation system or train the desired detector, put your dataset in yolo format in the ***user_datasets*** directory. The set can have the following structures:\n", + "```markdown\n", + "user_datasets\n", + "|_ _ \n", + " |_ _train\n", + " |_ _images\n", + " |_ .jpg\n", + " |_ ...\n", + " |_ .jpg\n", + " |_ _labels\n", + " |_ .txt\n", + " |_ ...\n", + " |_ .txt\n", + " |_ _valid\n", + " |_ _images\n", + " |_ .jpg\n", + " |_ ...\n", + " |_ .jpg\n", + " |_ _labels\n", + " |_ .txt\n", + " |_ ...\n", + " |_ .txt\n", + " |_ _test\n", + " |_ _images\n", + " |_ .jpg\n", + " |_ ...\n", + " |_ .jpg\n", + " |_ _labels\n", + " |_ .txt\n", + " |_ ...\n", + " |_ .txt\n", + "\n", + "```\n", + "***or you can use the following structure, then your set will be automatically divided into samples:***\n", + "\n", + "```markdown\n", + "user_datasets\n", + "|_ _ \n", + " |_ .jpg\n", + " |_ ...\n", + " |_ .jpg\n", + " |_ ...\n", + " |_ .txt\n", + " |_ ...\n", + " |_ .txt\n", + "\n", + "```\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "id": "OPgn9jHxnbbP" + }, + "outputs": [], + "source": [ + "!cp WaRP/Warp-D/classes.txt ./ODRS/" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "id": "9dPerVnCfqY3" + }, + "outputs": [], + "source": [ + "!mkdir WaRP/Warp-D/valid\n", + "!cp -r WaRP/Warp-D/test/* WaRP/Warp-D/valid/" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "uFnnPpINnU7B" + }, + "source": [ + "# ML Recommendation System" + ] + }, + { + "cell_type": "code", + "source": [ + "%cd ODRS/src/ML/config/" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "Pn0vZ9nj4iyF", + "outputId": "6ff0d7c9-0c71-49ec-dec4-fb461d300570" + }, + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "/content/ODRS/src/ML/config\n" + ] + } + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 272 + }, + "id": "zjfFE-dSgSzd", + "outputId": "767306fd-a886-449b-acb2-625b31cf0439" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Before:\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "{'GPU': True,\n", + " 'accuracy': False,\n", + " 'balance': False,\n", + " 'classes_path': 'classes.txt',\n", + " 'dataset_path': '/content/WaRP/Warp-D',\n", + " 'speed': True}" + ] + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\n", + "After:\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "{'GPU': True,\n", + " 'accuracy': False,\n", + " 'balance': False,\n", + " 'classes_path': 'classes.txt',\n", + " 'dataset_path': '/content/WaRP/Warp-D',\n", + " 'speed': True}" + ] + }, + "metadata": {} + } + ], + "source": [ + "import yaml\n", + "from IPython.display import display\n", + "\n", + "print(\"Before:\")\n", + "\n", + "with open('ml_config.yaml', 'r') as yaml_file:\n", + " yaml_content = yaml.safe_load(yaml_file)\n", + " display(yaml_content)\n", + "\n", + "# Speed changing\n", + "yaml_content['speed'] = True\n", + "yaml_content['dataset_path'] = '/content/WaRP/Warp-D'\n", + "with open('ml_config.yaml', 'w') as yaml_file:\n", + " yaml.dump(yaml_content, yaml_file)\n", + "\n", + "print(\"\\nAfter:\")\n", + "\n", + "with open('ml_config.yaml', 'r') as yaml_file:\n", + " yaml_content = yaml.safe_load(yaml_file)\n", + " display(yaml_content)" + ] + }, + { + "cell_type": "code", + "source": [ + "%cd .." + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "loyj0fcAJQJu", + "outputId": "011d1ff8-20c6-439e-ebd1-b9c5aced205e" + }, + "execution_count": 5, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "/content/ODRS/src/ML\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "!python run_recommender.py" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "oQGW30MdJSfE", + "outputId": "dadda32a-06d1-49c6-f146-29027a41f7f7" + }, + "execution_count": 6, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "2024-06-25 11:26:30.480732: I tensorflow/core/tpu/tpu_api_dlsym_initializer.cc:95] Opening library: /usr/local/lib/python3.10/dist-packages/tensorflow/python/platform/../../libtensorflow_cc.so.2\n", + "2024-06-25 11:26:30.480980: I tensorflow/core/tpu/tpu_api_dlsym_initializer.cc:119] Libtpu path is: libtpu.so\n", + "2024-06-25 11:26:30.532253: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", + "To enable the following instructions: AVX2 AVX512F FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", + "\u001b[32m2024-06-25 11:26:32.518\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36msrc.data_processing.data_utils.utils\u001b[0m:\u001b[36mget_data_path\u001b[0m:\u001b[36m74\u001b[0m - \u001b[1mCopying a set of images to /content/ODRS/user_datasets\u001b[0m\n", + "\u001b[32m2024-06-25 11:26:33.978\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36msrc.data_processing.data_utils.split_dataset\u001b[0m:\u001b[36msplit_data\u001b[0m:\u001b[36m35\u001b[0m - \u001b[1mDataset is ready\u001b[0m\n", + "Image analyze: 100% 3496/3496 [17:57<00:00, 3.24it/s]\n", + "Annotation analyze: 100% 3496/3496 [00:00<00:00, 23098.08it/s]\n", + "/usr/local/lib/python3.10/dist-packages/numba/np/ufunc/parallel.py:371: NumbaWarning: The TBB threading layer requires TBB version 2021 update 6 or later i.e., TBB_INTERFACE_VERSION >= 12060. Found TBB_INTERFACE_VERSION = 12050. The TBB threading layer is disabled.\n", + " warnings.warn(problem)\n", + "\u001b[32m2024-06-25 11:44:45.793\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36mpredict\u001b[0m:\u001b[36m64\u001b[0m - \u001b[1mTop models for training:\u001b[0m\n", + "\u001b[32m2024-06-25 11:44:45.793\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36mpredict\u001b[0m:\u001b[36m66\u001b[0m - \u001b[1m1) yolov5n\u001b[0m\n", + "\u001b[32m2024-06-25 11:44:45.793\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36mpredict\u001b[0m:\u001b[36m66\u001b[0m - \u001b[1m2) yolov5s\u001b[0m\n", + "\u001b[32m2024-06-25 11:44:45.793\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36m__main__\u001b[0m:\u001b[36mpredict\u001b[0m:\u001b[36m66\u001b[0m - \u001b[1m3) yolov8n\u001b[0m\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "084biqObpVEI" + }, + "source": [ + "# Model traning" + ] + }, + { + "cell_type": "code", + "source": [ + "%cd ODRS/src/DL/config/" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "oxv4_eiXMwER", + "outputId": "89409d9a-cb7f-460c-d11d-0724d5ca2147" + }, + "execution_count": 11, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "/content/ODRS/src/DL/config\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "import yaml\n", + "from IPython.display import display\n", + "\n", + "import yaml\n", + "from IPython.display import display\n", + "\n", + "print(\"Before\")\n", + "\n", + "with open('train_config.yaml', 'r') as yaml_file:\n", + " yaml_content = yaml.safe_load(yaml_file)\n", + " display(yaml_content)\n", + "\n", + "# Speed changing\n", + "yaml_content['GPU_COUNT'] = 1\n", + "yaml_content['SELECT_GPU'] = '0'\n", + "yaml_content['MODEL'] = 'yolov7-tiny'\n", + "yaml_content['DATA_PATH'] = '/content/WaRP/Warp-D'\n", + "with open('train_config.yaml', 'w') as yaml_file:\n", + " yaml.dump(yaml_content, yaml_file)\n", + "\n", + "print(\"After\")\n", + "\n", + "with open('train_config.yaml', 'r') as yaml_file:\n", + " yaml_content = yaml.safe_load(yaml_file)\n", + " display(yaml_content)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 391 + }, + "id": "CM0F8rbsMv67", + "outputId": "65ba87aa-4fd1-44c8-807c-f7cce2dcc393" + }, + "execution_count": 12, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Before\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "{'BATCH_SIZE': 20,\n", + " 'CLASSES': 'classes.txt',\n", + " 'DATA_PATH': '/content/WaRP/Warp-D',\n", + " 'EPOCHS': 2,\n", + " 'GPU_COUNT': 1,\n", + " 'IMG_SIZE': 300,\n", + " 'MODEL': 'yolov7-tiny',\n", + " 'SELECT_GPU': '0',\n", + " 'SPLIT_TRAIN_VALUE': 0.85,\n", + " 'SPLIT_VAL_VALUE': 0.1}" + ] + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "After\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "{'BATCH_SIZE': 20,\n", + " 'CLASSES': 'classes.txt',\n", + " 'DATA_PATH': '/content/WaRP/Warp-D',\n", + " 'EPOCHS': 2,\n", + " 'GPU_COUNT': 1,\n", + " 'IMG_SIZE': 300,\n", + " 'MODEL': 'yolov7-tiny',\n", + " 'SELECT_GPU': '0',\n", + " 'SPLIT_TRAIN_VALUE': 0.85,\n", + " 'SPLIT_VAL_VALUE': 0.1}" + ] + }, + "metadata": {} + } + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "id": "ggGkXaKU-yP9", + "outputId": "91df932d-1e5e-47f2-85c9-7ae5e39a6779", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "/content/ODRS/src/DL\n" + ] + } + ], + "source": [ + "%cd ..\n" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "id": "KUWdpHSapxLh", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "34599612-e096-4870-d295-3da22b08a63f" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\u001b[32m2024-06-25 11:54:42.169\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36msrc.data_processing.data_utils.utils\u001b[0m:\u001b[36mget_data_path\u001b[0m:\u001b[36m74\u001b[0m - \u001b[1mCopying a set of images to /content/ODRS/user_datasets\u001b[0m\n", + "\u001b[32m2024-06-25 11:54:48.542\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36msrc.data_processing.data_utils.split_dataset\u001b[0m:\u001b[36msplit_data\u001b[0m:\u001b[36m35\u001b[0m - \u001b[1mDataset is ready\u001b[0m\n", + "\u001b[32m2024-06-25 11:54:48.549\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36msrc.data_processing.train_processing.prepare_train\u001b[0m:\u001b[36mcreate_config_data\u001b[0m:\u001b[36m153\u001b[0m - \u001b[1mCreate config file\u001b[0m\n", + "YOLOR 🚀 84c11c9 torch 1.13.1+cu117 CUDA:0 (Tesla T4, 15102.0625MB)\n", + "\n", + "Namespace(weights='', cfg='/content/ODRS/src/DL/train_models/models/yolov7/cfg/training/yolov7-tiny.yaml', data='/content/ODRS/runs/2024-06-25_11-54-48_yolov7-tiny/dataset.yaml', hyp='./train_models/models/yolov7/data/hyp.scratch.p5.yaml', epochs=2, batch_size=20, img_size=[300, 300], rect=False, resume=False, nosave=False, notest=False, noautoanchor=False, evolve=False, bucket='', cache_images=False, image_weights=False, device='0', multi_scale=False, single_cls=False, adam=False, sync_bn=False, local_rank=-1, workers=8, project='/content/ODRS/runs/2024-06-25_11-54-48_yolov7-tiny', entity=None, name='exp', exist_ok=False, quad=False, linear_lr=False, label_smoothing=0.0, upload_dataset=False, bbox_interval=-1, save_period=-1, artifact_alias='latest', freeze=[0], v5_metric=False, world_size=1, global_rank=-1, save_dir='/content/ODRS/runs/2024-06-25_11-54-48_yolov7-tiny/exp', total_batch_size=20)\n", + "\u001b[34m\u001b[1mtensorboard: \u001b[0mStart with 'tensorboard --logdir /content/ODRS/runs/2024-06-25_11-54-48_yolov7-tiny', view at http://localhost:6006/\n", + "2024-06-25 11:54:55.876720: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", + "2024-06-25 11:54:55.876796: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", + "2024-06-25 11:54:56.013932: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", + "2024-06-25 11:54:56.281940: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", + "To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", + "2024-06-25 11:54:58.482337: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n", + "\u001b[34m\u001b[1mhyperparameters: \u001b[0mlr0=0.01, lrf=0.1, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=0.05, cls=0.3, cls_pw=1.0, obj=0.7, obj_pw=1.0, iou_t=0.2, anchor_t=4.0, fl_gamma=0.0, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.2, scale=0.9, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.15, copy_paste=0.0, paste_in=0.15, loss_ota=1\n", + "Overriding model.yaml nc=80 with nc=28\n", + "\n", + "from n params module arguments\n", + "0 -1 1 928 models.common.Conv [3, 32, 3, 2, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "1 -1 1 18560 models.common.Conv [32, 64, 3, 2, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "2 -1 1 2112 models.common.Conv [64, 32, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "3 -2 1 2112 models.common.Conv [64, 32, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "4 -1 1 9280 models.common.Conv [32, 32, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "5 -1 1 9280 models.common.Conv [32, 32, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "6 [-1, -2, -3, -4] 1 0 models.common.Concat [1]\n", + "7 -1 1 8320 models.common.Conv [128, 64, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "8 -1 1 0 models.common.MP []\n", + "9 -1 1 4224 models.common.Conv [64, 64, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "10 -2 1 4224 models.common.Conv [64, 64, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "11 -1 1 36992 models.common.Conv [64, 64, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "12 -1 1 36992 models.common.Conv [64, 64, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "13 [-1, -2, -3, -4] 1 0 models.common.Concat [1]\n", + "14 -1 1 33024 models.common.Conv [256, 128, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "15 -1 1 0 models.common.MP []\n", + "16 -1 1 16640 models.common.Conv [128, 128, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "17 -2 1 16640 models.common.Conv [128, 128, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "18 -1 1 147712 models.common.Conv [128, 128, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "19 -1 1 147712 models.common.Conv [128, 128, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "20 [-1, -2, -3, -4] 1 0 models.common.Concat [1]\n", + "21 -1 1 131584 models.common.Conv [512, 256, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "22 -1 1 0 models.common.MP []\n", + "23 -1 1 66048 models.common.Conv [256, 256, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "24 -2 1 66048 models.common.Conv [256, 256, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "25 -1 1 590336 models.common.Conv [256, 256, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "26 -1 1 590336 models.common.Conv [256, 256, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "27 [-1, -2, -3, -4] 1 0 models.common.Concat [1]\n", + "28 -1 1 525312 models.common.Conv [1024, 512, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "29 -1 1 131584 models.common.Conv [512, 256, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "30 -2 1 131584 models.common.Conv [512, 256, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "31 -1 1 0 models.common.SP [5]\n", + "32 -2 1 0 models.common.SP [9]\n", + "33 -3 1 0 models.common.SP [13]\n", + "34 [-1, -2, -3, -4] 1 0 models.common.Concat [1]\n", + "35 -1 1 262656 models.common.Conv [1024, 256, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "36 [-1, -7] 1 0 models.common.Concat [1]\n", + "37 -1 1 131584 models.common.Conv [512, 256, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "38 -1 1 33024 models.common.Conv [256, 128, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "39 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest']\n", + "40 21 1 33024 models.common.Conv [256, 128, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "41 [-1, -2] 1 0 models.common.Concat [1]\n", + "42 -1 1 16512 models.common.Conv [256, 64, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "43 -2 1 16512 models.common.Conv [256, 64, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "44 -1 1 36992 models.common.Conv [64, 64, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "45 -1 1 36992 models.common.Conv [64, 64, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "46 [-1, -2, -3, -4] 1 0 models.common.Concat [1]\n", + "47 -1 1 33024 models.common.Conv [256, 128, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "48 -1 1 8320 models.common.Conv [128, 64, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "49 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest']\n", + "50 14 1 8320 models.common.Conv [128, 64, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "51 [-1, -2] 1 0 models.common.Concat [1]\n", + "52 -1 1 4160 models.common.Conv [128, 32, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "53 -2 1 4160 models.common.Conv [128, 32, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "54 -1 1 9280 models.common.Conv [32, 32, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "55 -1 1 9280 models.common.Conv [32, 32, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "56 [-1, -2, -3, -4] 1 0 models.common.Concat [1]\n", + "57 -1 1 8320 models.common.Conv [128, 64, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "58 -1 1 73984 models.common.Conv [64, 128, 3, 2, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "59 [-1, 47] 1 0 models.common.Concat [1]\n", + "60 -1 1 16512 models.common.Conv [256, 64, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "61 -2 1 16512 models.common.Conv [256, 64, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "62 -1 1 36992 models.common.Conv [64, 64, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "63 -1 1 36992 models.common.Conv [64, 64, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "64 [-1, -2, -3, -4] 1 0 models.common.Concat [1]\n", + "65 -1 1 33024 models.common.Conv [256, 128, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "66 -1 1 295424 models.common.Conv [128, 256, 3, 2, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "67 [-1, 37] 1 0 models.common.Concat [1]\n", + "68 -1 1 65792 models.common.Conv [512, 128, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "69 -2 1 65792 models.common.Conv [512, 128, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "70 -1 1 147712 models.common.Conv [128, 128, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "71 -1 1 147712 models.common.Conv [128, 128, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "72 [-1, -2, -3, -4] 1 0 models.common.Concat [1]\n", + "73 -1 1 131584 models.common.Conv [512, 256, 1, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "74 57 1 73984 models.common.Conv [64, 128, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "75 65 1 295424 models.common.Conv [128, 256, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "76 73 1 1180672 models.common.Conv [256, 512, 3, 1, None, 1, LeakyReLU(negative_slope=0.1)]\n", + "77 [74, 75, 76] 1 90194 models.yolo.IDetect [28, [[10, 13, 16, 30, 33, 23], [30, 61, 62, 45, 59, 119], [116, 90, 156, 198, 373, 326]], [128, 256, 512]]\n", + "/usr/local/lib/python3.10/dist-packages/torch/functional.py:504: UserWarning: torch.meshgrid: in an upcoming release, it will be required to pass the indexing argument. (Triggered internally at ../aten/src/ATen/native/TensorShape.cpp:3190.)\n", + "return _VF.meshgrid(tensors, **kwargs) # type: ignore[attr-defined]\n", + "Model Summary: 263 layers, 6088050 parameters, 6088050 gradients, 13.4 GFLOPS\n", + "\n", + "Scaled weight_decay = 0.00046875\n", + "Optimizer groups: 58 .bias, 58 conv.weight, 61 other\n", + "WARNING: --img-size 300 must be multiple of max stride 32, updating to 320\n", + "WARNING: --img-size 300 must be multiple of max stride 32, updating to 320\n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning '/content/ODRS/user_datasets/Warp-D/train/labels' images and labels... 2452 found, 0 missing, 0 empty, 0 corrupted: 100%|██████████| 2452/2452 [00:01<00:00, 2085.52it/s]\n", + "\u001b[34m\u001b[1mtrain: \u001b[0mNew cache created: /content/ODRS/user_datasets/Warp-D/train/labels.cache\n", + "/usr/lib/python3.10/multiprocessing/popen_fork.py:66: RuntimeWarning: os.fork() was called. os.fork() is incompatible with multithreaded code, and JAX is multithreaded, so this will likely lead to a deadlock.\n", + "self.pid = os.fork()\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning '/content/ODRS/user_datasets/Warp-D/valid/labels' images and labels... 522 found, 0 missing, 0 empty, 0 corrupted: 100%|██████████| 522/522 [00:00<00:00, 970.54it/s]\n", + "\u001b[34m\u001b[1mval: \u001b[0mNew cache created: /content/ODRS/user_datasets/Warp-D/valid/labels.cache\n", + "Image sizes 320 train, 320 test\n", + "Using 2 dataloader workers\n", + "Logging results to /content/ODRS/runs/2024-06-25_11-54-48_yolov7-tiny/exp\n", + "Starting training for 2 epochs...\n", + "\n", + "Epoch gpu_mem box obj cls total labels img_size\n", + "\n", + "\u001b[34m\u001b[1mautoanchor: \u001b[0mAnalyzing anchors... anchors/target = 5.87, Best Possible Recall (BPR) = 1.0000\n", + " 0/1 0.566G 0.0783 0.01306 0.04912 0.1405 90 320: 100%|██████████| 123/123 [05:25<00:00, 2.65s/it]\n", + " Class Images Labels P R mAP@.5 mAP@.5:.95: 100%|██████████| 14/14 [00:18<00:00, 1.29s/it]\n", + "\n", + "Epoch gpu_mem box obj cls total labels img_size\n", + "all 522 1551 4.67e-05 0.000153 7.83e-07 7.83e-08\n", + " 1/1 1.2G 0.07599 0.0135 0.04808 0.1376 112 320: 100%|██████████| 123/123 [05:10<00:00, 2.53s/it]\n", + " Class Images Labels P R mAP@.5 mAP@.5:.95: 100%|██████████| 14/14 [00:20<00:00, 1.43s/it]\n", + "2 epochs completed in 0.190 hours.\n", + "\n", + "all 522 1551 0.000316 0.00657 2.53e-05 4.05e-06\n", + "bottle-blue 522 104 0.000931 0.0192 1.94e-05 3.46e-06\n", + "bottle-green 522 74 0 0 0 0\n", + "bottle-dark 522 95 0 0 6.82e-06 6.82e-07\n", + "bottle-milk 522 57 0 0 1.69e-05 1.69e-06\n", + "bottle-transp 522 234 0.00122 0.0171 0.000157 2.27e-05\n", + "bottle-multicolor 522 28 0 0 0 0\n", + "bottle-yogurt 522 42 0 0 0 0\n", + "bottle-oil 522 48 0 0 0 0\n", + "cans 522 98 0 0 0 0\n", + "juice-cardboard 522 68 0 0 0 0\n", + "milk-cardboard 522 94 0.00184 0.0213 6.69e-05 8.55e-06\n", + "detergent-color 522 43 0 0 0 0\n", + "detergent-transparent 522 41 0.00199 0.0244 9.73e-05 1.62e-05\n", + "detergent-box 522 17 0 0 0 0\n", + "canister 522 30 0 0 1.95e-05 3.89e-06\n", + "bottle-blue-full 522 43 0 0 7.08e-05 1.31e-05\n", + "bottle-transp-full 522 92 0 0 1.05e-05 5.23e-06\n", + "bottle-dark-full 522 34 0 0 0 0\n", + "bottle-green-full 522 34 0 0 0 0\n", + "bottle-multicolorv-full 522 21 0 0 0 0\n", + "bottle-milk-full 522 21 0 0 0 0\n", + "bottle-oil-full 522 8 0 0 0 0\n", + "detergent-white 522 43 0.00173 0.0465 0.000144 2.16e-05\n", + "bottle-blue5l 522 72 0.000254 0.0139 2.45e-05 4.46e-06\n", + "bottle-blue5l-full 522 24 0.000887 0.0417 7.45e-05 1.17e-05\n", + "glass-transp 522 36 0 0 0 0\n", + "glass-dark 522 25 0 0 0 0\n", + "glass-green 522 25 0 0 0 0\n", + "Optimizer stripped from /content/ODRS/runs/2024-06-25_11-54-48_yolov7-tiny/exp/weights/last.pt, 12.4MB\n", + "Optimizer stripped from /content/ODRS/runs/2024-06-25_11-54-48_yolov7-tiny/exp/weights/best.pt, 12.4MB\n" + ] + } + ], + "source": [ + "!python3 train_detectors.py" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "gpuType": "T4", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/src/DL/config/train_config.yaml b/src/DL/config/train_config.yaml index 6ea57ea..effda01 100644 --- a/src/DL/config/train_config.yaml +++ b/src/DL/config/train_config.yaml @@ -1,10 +1,11 @@ BATCH_SIZE: 20 CLASSES: classes.txt -DATA_PATH: /media/space/ssd_1_tb_evo_sumsung1/exp-train/ODRS-update/pad_1 -EPOCHS: 2 -GPU_COUNT: 2 +DATA_PATH: /home/runner/work/ODRS/ODRS/user_datasets/WaRP/Warp-D +EPOCHS: 1 +GPU_COUNT: 0 IMG_SIZE: 300 -MODEL: yolov7-tiny -SELECT_GPU: 0,1 +MODEL: yolov8s +SELECT_GPU: cpu + SPLIT_TRAIN_VALUE: 0.85 -SPLIT_VAL_VALUE: 0.1 +SPLIT_VAL_VALUE: 0.10 diff --git a/src/ML/config/ml_config.yaml b/src/ML/config/ml_config.yaml index 02a4a70..3198808 100644 --- a/src/ML/config/ml_config.yaml +++ b/src/ML/config/ml_config.yaml @@ -1,6 +1,6 @@ -GPU: true -accuracy: false -balance: false classes_path: classes.txt -dataset_path: /media/space/ssd_1_tb_evo_sumsung1/exp-train/pad_1 -speed: true +dataset_path: /home/runner/work/ODRS/ODRS/user_datasets/WaRP/Warp-D +GPU: False +accuracy: False +speed: False +balance: True \ No newline at end of file