Skip to content

Commit

Permalink
Add YOLOv10 checkpoints and implement app launch from json preset
Browse files Browse the repository at this point in the history
  • Loading branch information
cxnt committed Aug 23, 2024
1 parent 6a06e24 commit 9570447
Show file tree
Hide file tree
Showing 10 changed files with 162 additions and 41 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ This application is a versatile tool designed for data transformation tasks (lik
| **Neural Networks** | | | |
| [Apply NN Inference](https://github.com/supervisely-ecosystem/data-nodes/blob/master/src/ui/dtl/actions/neural_networks/apply_nn_inference/README.md#apply-nn-inference) | Apply neural network inference. | + | - |
| [Deploy YOLOv5](https://github.com/supervisely-ecosystem/data-nodes/blob/master/src/ui/dtl/actions/neural_networks/deploy/yolov5.md) | Deploy pretrained or custom YOLOv5 model. | + | - |
| [Deploy YOLO (v8, v9)](https://github.com/supervisely-ecosystem/data-nodes/blob/master/src/ui/dtl/actions/neural_networks/deploy/yolov8.md) | Deploy pretrained or custom YOLO (v8, v9) model. | + | - |
| [Deploy YOLO (v8, v9, v10)](https://github.com/supervisely-ecosystem/data-nodes/blob/master/src/ui/dtl/actions/neural_networks/deploy/yolov8.md) | Deploy pretrained or custom YOLO (v8, v9, v10) model. | + | - |
| [Deploy MMDetection](https://github.com/supervisely-ecosystem/data-nodes/blob/master/src/ui/dtl/actions/neural_networks/deploy/mmdetection.md) | Deploy pretrained or custom MMDetection model. | + | - |
| [Deploy MMSegmentation](https://github.com/supervisely-ecosystem/data-nodes/blob/master/src/ui/dtl/actions/neural_networks/deploy/mmsegmentation.md) | Deploy pretrained or custom MMSegmentation model. | + | - |
| **Other** | | | |
Expand Down
3 changes: 2 additions & 1 deletion config.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@
"images_project",
"images_dataset",
"videos_project",
"videos_dataset"
"videos_dataset",
"files_file"
]
},
"instance_version": "6.11.8"
Expand Down
2 changes: 1 addition & 1 deletion dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
git+https://github.com/supervisely/supervisely.git@optimize-index

# supervisely==6.73.161
# supervisely==6.73.165
jsonschema==4.19.2
networkx==3.1
scikit-image==0.21.0
Expand Down
2 changes: 1 addition & 1 deletion src/globals.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
TEAM_FILES_PATH = "data-nodes"
PROJECT_ID = sly.env.project_id(raise_not_found=False)
DATASET_ID = sly.env.dataset_id(raise_not_found=False)
# FILE = sly.env.team_files_file(raise_not_found=False)
FILE = sly.env.team_files_file(raise_not_found=False)
SUPPORTED_MODALITIES = ["images", "videos"]

SUPPORTED_MODALITIES_MAP = {
Expand Down
11 changes: 7 additions & 4 deletions src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,10 @@ def generate_preview_for_project(layer: Layer):


layer = None
if g.PIPELINE_TEMPLATE is not None:

if g.FILE is not None:
load_json(g.FILE)
elif g.PIPELINE_TEMPLATE is not None:
template = templates[g.MODALITY_TYPE].get(g.PIPELINE_TEMPLATE, None)
if template is not None:
load_template(template)
Expand All @@ -146,8 +149,10 @@ def generate_preview_for_project(layer: Layer):

if pr.type == "images":
layer = create_new_layer(ImagesProjectAction.name)
layer.init_widgets()
elif pr.type == "videos":
layer = create_new_layer(VideosProjectAction.name)
layer.init_widgets()
else:
raise NotImplementedError(f"Project type {pr.type} is not supported")
layer.from_json({"src": src, "settings": {"classes_mapping": "default"}})
Expand All @@ -158,15 +163,13 @@ def generate_preview_for_project(layer: Layer):
pr: ProjectInfo = g.api.project.get_info_by_id(g.PROJECT_ID)
src = [f"{pr.name}/*"]
layer = create_new_layer(FilteredProjectAction.name)
layer.init_widgets()
layer.from_json({"src": src, "settings": {"classes_mapping": "default"}})
node = layer.create_node()
nodes_flow.add_node(node)

update_loop.start()

# if g.FILE:
# g.updater("load_json")

app.call_before_shutdown(u.on_app_shutdown)
if layer is not None:
if g.MODALITY_TYPE == "images":
Expand Down
4 changes: 2 additions & 2 deletions src/ui/dtl/actions/neural_networks/deploy/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -342,8 +342,8 @@ class DeployYOLOV5Action(DeployBaseAction):

class DeployYOLOV8Action(DeployBaseAction):
name = "deploy_yolo_v8"
title = "Deploy YOLO (v8, v9)"
description = "Deploy YOLO (v8, v9) models."
title = "YOLO v8 | v9 | v10"
description = "Deploy YOLO v8 | v9 | v10 models."
md_description = DeployBaseAction.read_md_file(dirname(realpath(__file__)) + "/yolov8.md")

# Framework settings
Expand Down
154 changes: 132 additions & 22 deletions src/ui/dtl/actions/neural_networks/deploy/layout/pretrained_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,58 +113,113 @@

yolov8 = [
{
"Model": "YOLOv8n-det",
"Model": "YOLOv8n-det (COCO)",
"Size (pixels)": "640",
"mAP": "37.3",
"params (M)": "3.2",
"FLOPs (B)": "8.7",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8n.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8n.pt",
},
},
{
"Model": "YOLOv8s-det",
"Model": "YOLOv8n-det (Open Images V7)",
"Size (pixels)": "640",
"mAP": "18.4",
"params (M)": "3.5",
"FLOPs (B)": "10.5",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n-oiv7.pt",
},
},
{
"Model": "YOLOv8s-det (COCO)",
"Size (pixels)": "640",
"mAP": "44.9",
"params (M)": "11.2",
"FLOPs (B)": "28.6",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8s.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8s.pt",
},
},
{
"Model": "YOLOv8s-det (Open Images V7)",
"Size (pixels)": "640",
"mAP": "27.7",
"params (M)": "11.4",
"FLOPs (B)": "29.7",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8s-oiv7.pt",
},
},
{
"Model": "YOLOv8m-det",
"Model": "YOLOv8m-det (COCO)",
"Size (pixels)": "640",
"mAP": "50.2",
"params (M)": "25.9",
"FLOPs (B)": "78.9",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8m.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8m.pt",
},
},
{
"Model": "YOLOv8l-det",
"Model": "YOLOv8m-det (Open Images V7)",
"Size (pixels)": "640",
"mAP": "33.6",
"params (M)": "26.2",
"FLOPs (B)": "80.6",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8m-oiv7.pt",
},
},
{
"Model": "YOLOv8l-det (COCO)",
"Size (pixels)": "640",
"mAP": "52.9",
"params (M)": "43.7",
"FLOPs (B)": "165.2",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8l.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8l.pt",
},
},
{
"Model": "YOLOv8l-det (Open Images V7)",
"Size (pixels)": "640",
"mAP": "34.9",
"params (M)": "44.1",
"FLOPs (B)": "167.4",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8l-oiv7.pt",
},
},
{
"Model": "YOLOv8x-det",
"Model": "YOLOv8x-det (COCO)",
"Size (pixels)": "640",
"mAP": "53.9",
"params (M)": "68.2",
"FLOPs (B)": "257.8",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8x.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8x.pt",
},
},
{
"Model": "YOLOv8x-det (Open Images V7)",
"Size (pixels)": "640",
"mAP": "36.3",
"params (M)": "68.7",
"FLOPs (B)": "260.6",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x-oiv7.pt",
},
},
{
Expand All @@ -176,7 +231,7 @@
"FLOPs (B)": "12.6",
"meta": {
"task_type": "instance segmentation",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8n-seg.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8n-seg.pt",
},
},
{
Expand All @@ -188,7 +243,7 @@
"FLOPs (B)": "42.6",
"meta": {
"task_type": "instance segmentation",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8s-seg.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8s-seg.pt",
},
},
{
Expand All @@ -200,7 +255,7 @@
"FLOPs (B)": "110.2",
"meta": {
"task_type": "instance segmentation",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8m-seg.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8m-seg.pt",
},
},
{
Expand All @@ -212,7 +267,7 @@
"FLOPs (B)": "220.5",
"meta": {
"task_type": "instance segmentation",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8l-seg.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8l-seg.pt",
},
},
{
Expand All @@ -224,7 +279,7 @@
"FLOPs (B)": "344.1",
"meta": {
"task_type": "instance segmentation",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8x-seg.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8x-seg.pt",
},
},
{
Expand All @@ -235,7 +290,7 @@
"FLOPs (B)": "9.2",
"meta": {
"task_type": "pose estimation",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8n-pose.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8n-pose.pt",
},
},
{
Expand All @@ -246,7 +301,7 @@
"FLOPs (B)": "30.2",
"meta": {
"task_type": "pose estimation",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8s-pose.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8s-pose.pt",
},
},
{
Expand All @@ -257,7 +312,7 @@
"FLOPs (B)": "81.0",
"meta": {
"task_type": "pose estimation",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8m-pose.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8m-pose.pt",
},
},
{
Expand All @@ -268,7 +323,7 @@
"FLOPs (B)": "168.6",
"meta": {
"task_type": "pose estimation",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8l-pose.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8l-pose.pt",
},
},
{
Expand All @@ -279,7 +334,7 @@
"FLOPs (B)": "263.2",
"meta": {
"task_type": "pose estimation",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8x-pose.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8x-pose.pt",
},
},
{
Expand All @@ -290,7 +345,7 @@
"FLOPs (B)": "1066.4",
"meta": {
"task_type": "pose estimation",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v0.0.0/YOLOv8x-pose-p6.pt",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/YOLOv8x-pose-p6.pt",
},
},
{
Expand Down Expand Up @@ -338,7 +393,62 @@
"task_type": "instance segmentation",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov9e-seg.pt",
},
}
},
{
"Model": "YOLOv10n-det",
"Size (pixels)": "640",
"mAP": "39.5",
"params (M)": "2.3",
"FLOPs (B)": "6.7",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov10n.pt",
},
},
{
"Model": "YOLOv10s-det",
"Size (pixels)": "640",
"mAP": "46.8",
"params (M)": "7.2",
"FLOPs (B)": "21.6",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov10s.pt",
},
},
{
"Model": "YOLOv10m-det",
"Size (pixels)": "640",
"mAP": "51.3",
"params (M)": "15.4",
"FLOPs (B)": "59.1",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov10m.pt",
},
},
{
"Model": "YOLOv10l-det",
"Size (pixels)": "640",
"mAP": "53.4",
"params (M)": "24.4",
"FLOPs (B)": "120.3",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov10l.pt",
},
},
{
"Model": "YOLOv10x-det",
"Size (pixels)": "640",
"mAP": "54.4",
"params (M)": "29.5",
"FLOPs (B)": "160.4",
"meta": {
"task_type": "object detection",
"weights_url": "https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov10x.pt",
},
},
]

mmdetection3 = [
Expand Down
6 changes: 3 additions & 3 deletions src/ui/dtl/actions/neural_networks/deploy/yolov8.md
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
# Deploy YOLO (v8, v9)
# YOLO v8 | v9 | v10

`Deploy YOLO (v8, v9)` using [Serve YOLO (v8, v9)](https://ecosystem.supervisely.com/apps/yolov8/serve) app to serve models and can be used to deploy custom and pretrained models that you can use via `Apply NN` layer. Custom models will appear in the custom tab of the table only if you have any trained YOLO (v8, v9) models in your Team Files. You can train your own model using [Train YOLO (v8, v9)](https://ecosystem.supervisely.com/apps/yolov8/train) app. If you want to use pretrained models, simply select "Pretrained public models" tab in model selector.
`Deploy YOLO v8 | v9 | v10` using [Serve YOLO YOLO v8 | v9 | v10](https://ecosystem.supervisely.com/apps/yolov8/serve) app to serve models and can be used to deploy custom and pretrained models that you can use via `Apply NN` layer. Custom models will appear in the custom tab of the table only if you have any trained YOLO YOLO v8 | v9 | v10 models in your Team Files. You can train your own model using [Train YOLO YOLO v8 | v9 | v10](https://ecosystem.supervisely.com/apps/yolov8/train) app. If you want to use pretrained models, simply select "Pretrained public models" tab in model selector.

![image](https://github.com/supervisely-ecosystem/data-nodes/assets/48913536/791a50bf-51b5-4bdb-8bda-0f4ed2a3d06b)

### Settings:

### How to use:

1. Add `Deploy YOLO (v8, v9)` layer
1. Add `Deploy YOLO YOLO v8 | v9 | v10` layer
2. Open agent settings and select agent and device
3. Open models selector and select one of the available models
4. Press `SERVE`
Expand Down
Loading

0 comments on commit 9570447

Please sign in to comment.