diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 2895f601baf8..9b348b557da0 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -1,36 +1,38 @@
-### Background
-
### Changes 🏗️
-
-### Testing 🔍
-> [!NOTE]
-Only for the new autogpt platform, currently in autogpt_platform/
-
-
-
-- Create from scratch and execute an agent with at least 3 blocks
-- Import an agent from file upload, and confirm it executes correctly
-- Upload agent to marketplace
-- Import an agent from marketplace and confirm it executes correctly
-- Edit an agent from monitor, and confirm it executes correctly
-
-### Configuration Changes 📝
-> [!NOTE]
-Only for the new autogpt platform, currently in autogpt_platform/
-
-If you're making configuration or infrastructure changes, please remember to check you've updated the related infrastructure code in the autogpt_platform/infra folder.
-
-Examples of such changes might include:
-
-- Changing ports
-- Adding new services that need to communicate with each other
-- Secrets or environment variable changes
-- New or infrastructure changes such as databases
+### Checklist 📋
+
+#### For code changes:
+- [ ] I have clearly listed my changes in the PR description
+- [ ] I have made a test plan
+- [ ] I have tested my changes according to the test plan:
+
+ - [ ] ...
+
+
+ Example test plan
+
+ - [ ] Create from scratch and execute an agent with at least 3 blocks
+ - [ ] Import an agent from file upload, and confirm it executes correctly
+ - [ ] Upload agent to marketplace
+ - [ ] Import an agent from marketplace and confirm it executes correctly
+ - [ ] Edit an agent from monitor, and confirm it executes correctly
+
+
+#### For configuration changes:
+- [ ] `.env.example` is updated or already compatible with my changes
+- [ ] `docker-compose.yml` is updated or already compatible with my changes
+- [ ] I have included a list of my configuration changes in the PR description (under **Changes**)
+
+
+ Examples of configuration changes
+
+ - Changing ports
+ - Adding new services that need to communicate with each other
+ - Secrets or environment variable changes
+ - New or infrastructure changes such as databases
+
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index d67aba9edc93..ed86498fd910 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -9,7 +9,7 @@ repos:
- id: check-merge-conflict
- id: check-symlinks
- id: debug-statements
-
+
- repo: https://github.com/Yelp/detect-secrets
rev: v1.5.0
hooks:
@@ -19,26 +19,116 @@ repos:
files: ^autogpt_platform/
stages: [push]
+ - repo: local
+ # For proper type checking, all dependencies need to be up-to-date.
+ # It's also a good idea to check that poetry.lock is consistent with pyproject.toml.
+ hooks:
+ - id: poetry-install
+ name: Check & Install dependencies - AutoGPT Platform - Backend
+ alias: poetry-install-platform-backend
+ entry: poetry -C autogpt_platform/backend install
+ # include autogpt_libs source (since it's a path dependency)
+ files: ^autogpt_platform/(backend|autogpt_libs)/poetry\.lock$
+ types: [file]
+ language: system
+ pass_filenames: false
+
+ - id: poetry-install
+ name: Check & Install dependencies - AutoGPT Platform - Libs
+ alias: poetry-install-platform-libs
+ entry: poetry -C autogpt_platform/autogpt_libs install
+ files: ^autogpt_platform/autogpt_libs/poetry\.lock$
+ types: [file]
+ language: system
+ pass_filenames: false
+
+ - id: poetry-install
+ name: Check & Install dependencies - Classic - AutoGPT
+ alias: poetry-install-classic-autogpt
+ entry: poetry -C classic/original_autogpt install
+ # include forge source (since it's a path dependency)
+ files: ^classic/(original_autogpt|forge)/poetry\.lock$
+ types: [file]
+ language: system
+ pass_filenames: false
+
+ - id: poetry-install
+ name: Check & Install dependencies - Classic - Forge
+ alias: poetry-install-classic-forge
+ entry: poetry -C classic/forge install
+ files: ^classic/forge/poetry\.lock$
+ types: [file]
+ language: system
+ pass_filenames: false
+
+ - id: poetry-install
+ name: Check & Install dependencies - Classic - Benchmark
+ alias: poetry-install-classic-benchmark
+ entry: poetry -C classic/benchmark install
+ files: ^classic/benchmark/poetry\.lock$
+ types: [file]
+ language: system
+ pass_filenames: false
+
+ - repo: local
+ # For proper type checking, Prisma client must be up-to-date.
+ hooks:
+ - id: prisma-generate
+ name: Prisma Generate - AutoGPT Platform - Backend
+ alias: prisma-generate-platform-backend
+ entry: bash -c 'cd autogpt_platform/backend && poetry run prisma generate'
+ # include everything that triggers poetry install + the prisma schema
+ files: ^autogpt_platform/((backend|autogpt_libs)/poetry\.lock|backend/schema.prisma)$
+ types: [file]
+ language: system
+ pass_filenames: false
+
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.7.2
+ hooks:
+ - id: ruff
+ name: Lint (Ruff) - AutoGPT Platform - Backend
+ alias: ruff-lint-platform-backend
+ files: ^autogpt_platform/backend/
+ args: [--fix]
+
+ - id: ruff
+ name: Lint (Ruff) - AutoGPT Platform - Libs
+ alias: ruff-lint-platform-libs
+ files: ^autogpt_platform/autogpt_libs/
+ args: [--fix]
+
- repo: local
# isort needs the context of which packages are installed to function, so we
# can't use a vendored isort pre-commit hook (which runs in its own isolated venv).
hooks:
- - id: isort-autogpt
- name: Lint (isort) - AutoGPT
+ - id: isort
+ name: Lint (isort) - AutoGPT Platform - Backend
+ alias: isort-platform-backend
+ entry: poetry -C autogpt_platform/backend run isort
+ files: ^autogpt_platform/backend/
+ types: [file, python]
+ language: system
+
+ - id: isort
+ name: Lint (isort) - Classic - AutoGPT
+ alias: isort-classic-autogpt
entry: poetry -C classic/original_autogpt run isort
files: ^classic/original_autogpt/
types: [file, python]
language: system
- - id: isort-forge
- name: Lint (isort) - Forge
+ - id: isort
+ name: Lint (isort) - Classic - Forge
+ alias: isort-classic-forge
entry: poetry -C classic/forge run isort
files: ^classic/forge/
types: [file, python]
language: system
- - id: isort-benchmark
- name: Lint (isort) - Benchmark
+ - id: isort
+ name: Lint (isort) - Classic - Benchmark
+ alias: isort-classic-benchmark
entry: poetry -C classic/benchmark run isort
files: ^classic/benchmark/
types: [file, python]
@@ -51,7 +141,6 @@ repos:
hooks:
- id: black
name: Lint (Black)
- language_version: python3.12
- repo: https://github.com/PyCQA/flake8
rev: 7.0.0
@@ -59,20 +148,20 @@ repos:
# them separately.
hooks:
- id: flake8
- name: Lint (Flake8) - AutoGPT
- alias: flake8-autogpt
+ name: Lint (Flake8) - Classic - AutoGPT
+ alias: flake8-classic-autogpt
files: ^classic/original_autogpt/(autogpt|scripts|tests)/
args: [--config=classic/original_autogpt/.flake8]
- id: flake8
- name: Lint (Flake8) - Forge
- alias: flake8-forge
+ name: Lint (Flake8) - Classic - Forge
+ alias: flake8-classic-forge
files: ^classic/forge/(forge|tests)/
args: [--config=classic/forge/.flake8]
- id: flake8
- name: Lint (Flake8) - Benchmark
- alias: flake8-benchmark
+ name: Lint (Flake8) - Classic - Benchmark
+ alias: flake8-classic-benchmark
files: ^classic/benchmark/(agbenchmark|tests)/((?!reports).)*[/.]
args: [--config=classic/benchmark/.flake8]
@@ -81,31 +170,52 @@ repos:
# project. To trigger on poetry.lock we also reset the file `types` filter.
hooks:
- id: pyright
- name: Typecheck - AutoGPT
- alias: pyright-autogpt
+ name: Typecheck - AutoGPT Platform - Backend
+ alias: pyright-platform-backend
+ entry: poetry -C autogpt_platform/backend run pyright
+ args: [-p, autogpt_platform/backend, autogpt_platform/backend]
+ # include forge source (since it's a path dependency) but exclude *_test.py files:
+ files: ^autogpt_platform/(backend/((backend|test)/|(\w+\.py|poetry\.lock)$)|autogpt_libs/(autogpt_libs/.*(?=0.1.3"
+[[package]]
+name = "ruff"
+version = "0.7.2"
+description = "An extremely fast Python linter and code formatter, written in Rust."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "ruff-0.7.2-py3-none-linux_armv6l.whl", hash = "sha256:b73f873b5f52092e63ed540adefc3c36f1f803790ecf2590e1df8bf0a9f72cb8"},
+ {file = "ruff-0.7.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5b813ef26db1015953daf476202585512afd6a6862a02cde63f3bafb53d0b2d4"},
+ {file = "ruff-0.7.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:853277dbd9675810c6826dad7a428d52a11760744508340e66bf46f8be9701d9"},
+ {file = "ruff-0.7.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21aae53ab1490a52bf4e3bf520c10ce120987b047c494cacf4edad0ba0888da2"},
+ {file = "ruff-0.7.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc7e0fc6e0cb3168443eeadb6445285abaae75142ee22b2b72c27d790ab60ba"},
+ {file = "ruff-0.7.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd77877a4e43b3a98e5ef4715ba3862105e299af0c48942cc6d51ba3d97dc859"},
+ {file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e00163fb897d35523c70d71a46fbaa43bf7bf9af0f4534c53ea5b96b2e03397b"},
+ {file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3c54b538633482dc342e9b634d91168fe8cc56b30a4b4f99287f4e339103e88"},
+ {file = "ruff-0.7.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b792468e9804a204be221b14257566669d1db5c00d6bb335996e5cd7004ba80"},
+ {file = "ruff-0.7.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba53ed84ac19ae4bfb4ea4bf0172550a2285fa27fbb13e3746f04c80f7fa088"},
+ {file = "ruff-0.7.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b19fafe261bf741bca2764c14cbb4ee1819b67adb63ebc2db6401dcd652e3748"},
+ {file = "ruff-0.7.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:28bd8220f4d8f79d590db9e2f6a0674f75ddbc3847277dd44ac1f8d30684b828"},
+ {file = "ruff-0.7.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9fd67094e77efbea932e62b5d2483006154794040abb3a5072e659096415ae1e"},
+ {file = "ruff-0.7.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:576305393998b7bd6c46018f8104ea3a9cb3fa7908c21d8580e3274a3b04b691"},
+ {file = "ruff-0.7.2-py3-none-win32.whl", hash = "sha256:fa993cfc9f0ff11187e82de874dfc3611df80852540331bc85c75809c93253a8"},
+ {file = "ruff-0.7.2-py3-none-win_amd64.whl", hash = "sha256:dd8800cbe0254e06b8fec585e97554047fb82c894973f7ff18558eee33d1cb88"},
+ {file = "ruff-0.7.2-py3-none-win_arm64.whl", hash = "sha256:bb8368cd45bba3f57bb29cbb8d64b4a33f8415d0149d2655c5c8539452ce7760"},
+ {file = "ruff-0.7.2.tar.gz", hash = "sha256:2b14e77293380e475b4e3a7a368e14549288ed2931fce259a6f99978669e844f"},
+]
+
[[package]]
name = "six"
version = "1.16.0"
@@ -1724,4 +1751,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<4.0"
-content-hash = "f80654aae542b1f2f3a44a01f197f87ffbaea52f474dd2cc2b72b8d56b155563"
+content-hash = "55475acb18a4fd5dc74bc64d89a24fff1f41e8cd61304c15ec3df2503bbeba56"
diff --git a/autogpt_platform/autogpt_libs/pyproject.toml b/autogpt_platform/autogpt_libs/pyproject.toml
index 5089b7d6810c..873cd6dae0f6 100644
--- a/autogpt_platform/autogpt_libs/pyproject.toml
+++ b/autogpt_platform/autogpt_libs/pyproject.toml
@@ -19,7 +19,14 @@ supabase = "^2.9.1"
[tool.poetry.group.dev.dependencies]
redis = "^5.2.0"
+ruff = "^0.7.2"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
+
+[tool.ruff]
+line-length = 88
+
+[tool.ruff.lint]
+extend-select = ["I"] # sort dependencies
diff --git a/autogpt_platform/backend/backend/blocks/pinecone.py b/autogpt_platform/backend/backend/blocks/pinecone.py
index 91364fce92f0..5ef8e639920f 100644
--- a/autogpt_platform/backend/backend/blocks/pinecone.py
+++ b/autogpt_platform/backend/backend/blocks/pinecone.py
@@ -1,4 +1,5 @@
-from typing import Literal
+import uuid
+from typing import Any, Literal
from autogpt_libs.supabase_integration_credentials_store import APIKeyCredentials
from pinecone import Pinecone, ServerlessSpec
@@ -98,10 +99,14 @@ class Input(BlockSchema):
include_metadata: bool = SchemaField(
description="Whether to include metadata in the response", default=True
)
- host: str = SchemaField(description="Host for pinecone")
+ host: str = SchemaField(description="Host for pinecone", default="")
+ idx_name: str = SchemaField(description="Index name for pinecone")
class Output(BlockSchema):
- results: dict = SchemaField(description="Query results from Pinecone")
+ results: Any = SchemaField(description="Query results from Pinecone")
+ combined_results: Any = SchemaField(
+ description="Combined results from Pinecone"
+ )
def __init__(self):
super().__init__(
@@ -119,13 +124,105 @@ def run(
credentials: APIKeyCredentials,
**kwargs,
) -> BlockOutput:
- pc = Pinecone(api_key=credentials.api_key.get_secret_value())
- idx = pc.Index(host=input_data.host)
- results = idx.query(
- namespace=input_data.namespace,
- vector=input_data.query_vector,
- top_k=input_data.top_k,
- include_values=input_data.include_values,
- include_metadata=input_data.include_metadata,
- )
- yield "results", results
+ try:
+ # Create a new client instance
+ pc = Pinecone(api_key=credentials.api_key.get_secret_value())
+
+ # Get the index
+ idx = pc.Index(input_data.idx_name)
+
+ # Ensure query_vector is in correct format
+ query_vector = input_data.query_vector
+ if isinstance(query_vector, list) and len(query_vector) > 0:
+ if isinstance(query_vector[0], list):
+ query_vector = query_vector[0]
+
+ results = idx.query(
+ namespace=input_data.namespace,
+ vector=query_vector,
+ top_k=input_data.top_k,
+ include_values=input_data.include_values,
+ include_metadata=input_data.include_metadata,
+ ).to_dict()
+ combined_text = ""
+ if results["matches"]:
+ texts = [
+ match["metadata"]["text"]
+ for match in results["matches"]
+ if match.get("metadata", {}).get("text")
+ ]
+ combined_text = "\n\n".join(texts)
+
+ # Return both the raw matches and combined text
+ yield "results", {
+ "matches": results["matches"],
+ "combined_text": combined_text,
+ }
+ yield "combined_results", combined_text
+
+ except Exception as e:
+ error_msg = f"Error querying Pinecone: {str(e)}"
+ raise RuntimeError(error_msg) from e
+
+
+class PineconeInsertBlock(Block):
+ class Input(BlockSchema):
+ credentials: PineconeCredentialsInput = PineconeCredentialsField()
+ index: str = SchemaField(description="Initialized Pinecone index")
+ chunks: list = SchemaField(description="List of text chunks to ingest")
+ embeddings: list = SchemaField(
+ description="List of embeddings corresponding to the chunks"
+ )
+ namespace: str = SchemaField(
+ description="Namespace to use in Pinecone", default=""
+ )
+ metadata: dict = SchemaField(
+ description="Additional metadata to store with each vector", default={}
+ )
+
+ class Output(BlockSchema):
+ upsert_response: str = SchemaField(
+ description="Response from Pinecone upsert operation"
+ )
+
+ def __init__(self):
+ super().__init__(
+ id="477f2168-cd91-475a-8146-9499a5982434",
+ description="Upload data to a Pinecone index",
+ categories={BlockCategory.LOGIC},
+ input_schema=PineconeInsertBlock.Input,
+ output_schema=PineconeInsertBlock.Output,
+ )
+
+ def run(
+ self,
+ input_data: Input,
+ *,
+ credentials: APIKeyCredentials,
+ **kwargs,
+ ) -> BlockOutput:
+ try:
+ # Create a new client instance
+ pc = Pinecone(api_key=credentials.api_key.get_secret_value())
+
+ # Get the index
+ idx = pc.Index(input_data.index)
+
+ vectors = []
+ for chunk, embedding in zip(input_data.chunks, input_data.embeddings):
+ vector_metadata = input_data.metadata.copy()
+ vector_metadata["text"] = chunk
+ vectors.append(
+ {
+ "id": str(uuid.uuid4()),
+ "values": embedding,
+ "metadata": vector_metadata,
+ }
+ )
+ idx.upsert(vectors=vectors, namespace=input_data.namespace)
+
+ yield "upsert_response", "successfully upserted"
+
+ except Exception as e:
+ error_msg = f"Error uploading to Pinecone: {str(e)}"
+ raise RuntimeError(error_msg) from e
diff --git a/autogpt_platform/backend/migrations/20241108170448_add_api_key_support/migration.sql b/autogpt_platform/backend/migrations/20241108170448_add_api_key_support/migration.sql
new file mode 100644
index 000000000000..6c4bbfd4ca7c
--- /dev/null
+++ b/autogpt_platform/backend/migrations/20241108170448_add_api_key_support/migration.sql
@@ -0,0 +1,44 @@
+-- CreateEnum
+CREATE TYPE "APIKeyPermission" AS ENUM ('EXECUTE_GRAPH', 'READ_GRAPH', 'EXECUTE_BLOCK', 'READ_BLOCK');
+
+-- CreateEnum
+CREATE TYPE "APIKeyStatus" AS ENUM ('ACTIVE', 'REVOKED', 'SUSPENDED');
+
+-- CreateTable
+CREATE TABLE "APIKey" (
+ "id" TEXT NOT NULL,
+ "name" TEXT NOT NULL,
+ "prefix" TEXT NOT NULL,
+ "postfix" TEXT NOT NULL,
+ "key" TEXT NOT NULL,
+ "status" "APIKeyStatus" NOT NULL DEFAULT 'ACTIVE',
+ "permissions" "APIKeyPermission"[],
+ "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ "lastUsedAt" TIMESTAMP(3),
+ "revokedAt" TIMESTAMP(3),
+ "description" TEXT,
+ "userId" TEXT NOT NULL,
+
+ CONSTRAINT "APIKey_pkey" PRIMARY KEY ("id")
+);
+
+-- CreateIndex
+CREATE UNIQUE INDEX "APIKey_key_key" ON "APIKey"("key");
+
+-- CreateIndex
+CREATE INDEX "APIKey_key_idx" ON "APIKey"("key");
+
+-- CreateIndex
+CREATE INDEX "APIKey_prefix_idx" ON "APIKey"("prefix");
+
+-- CreateIndex
+CREATE INDEX "APIKey_userId_idx" ON "APIKey"("userId");
+
+-- CreateIndex
+CREATE INDEX "APIKey_status_idx" ON "APIKey"("status");
+
+-- CreateIndex
+CREATE INDEX "APIKey_userId_status_idx" ON "APIKey"("userId", "status");
+
+-- AddForeignKey
+ALTER TABLE "APIKey" ADD CONSTRAINT "APIKey_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
diff --git a/autogpt_platform/backend/schema.prisma b/autogpt_platform/backend/schema.prisma
index 3fedcb37171c..b8df6b1ed7ae 100644
--- a/autogpt_platform/backend/schema.prisma
+++ b/autogpt_platform/backend/schema.prisma
@@ -27,6 +27,7 @@ model User {
AnalyticsDetails AnalyticsDetails[]
AnalyticsMetrics AnalyticsMetrics[]
UserBlockCredit UserBlockCredit[]
+ APIKeys APIKey[]
@@index([id])
@@index([email])
@@ -277,3 +278,42 @@ model UserBlockCredit {
@@id(name: "creditTransactionIdentifier", [transactionKey, userId])
}
+
+enum APIKeyPermission {
+ EXECUTE_GRAPH // Can execute agent graphs
+ READ_GRAPH // Can get graph versions and details
+ EXECUTE_BLOCK // Can execute individual blocks
+ READ_BLOCK // Can get block information
+}
+
+model APIKey {
+ id String @id @default(uuid())
+ name String
+ prefix String // First 8 chars for identification
+ postfix String
+ key String @unique // Hashed key
+ status APIKeyStatus @default(ACTIVE)
+ permissions APIKeyPermission[]
+
+ createdAt DateTime @default(now())
+ lastUsedAt DateTime?
+ revokedAt DateTime?
+
+ description String?
+
+ // Relation to user
+ userId String
+ user User @relation(fields: [userId], references: [id], onDelete: Cascade)
+
+ @@index([key])
+ @@index([prefix])
+ @@index([userId])
+ @@index([status])
+ @@index([userId, status])
+}
+
+enum APIKeyStatus {
+ ACTIVE
+ REVOKED
+ SUSPENDED
+}