From f8b0a6c3b08be02bed4ac0ff0e1cbe32e5c4cb37 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Tue, 10 Oct 2023 13:13:51 -0400 Subject: [PATCH 01/57] fix format --- .github/workflows/python-package.yml | 19 +++++++++++++------ .gitignore | 4 +++- Makefile | 2 ++ docker-compose.yml | 7 ++++--- runact.sh | 18 ++++++++++++++++++ vendor/Auto-GPT | 2 +- 6 files changed, 41 insertions(+), 11 deletions(-) create mode 100644 Makefile create mode 100644 runact.sh diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 30d8876..e71d932 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -8,22 +8,29 @@ on: pull_request: branches: [ "runautogpt-main" ] jobs: +<<<<<<< HEAD run-autogpt-in-githubaction: +======= + python-package-build: +>>>>>>> f1e604a (starting to work) runs-on: ubuntu-latest strategy: fail-fast: false matrix: - python-version: ["3.10"] + python-version: [ + #"3.10", + "3.x"] steps: - uses: actions/checkout@v3 - name: Checkout repository uses: actions/checkout@master with: submodules: 'true' - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 - with: - python-version: ${{ matrix.python-version }} + #- name: Set up Python ${{ matrix.python-version }} + #3 uses: actions/setup-python@v3 + # with: + # architecture: x64 + # python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip @@ -34,7 +41,7 @@ jobs: # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - name: Docker Compose Action - uses: isbang/compose-action@v1.5.1 + uses: meta-introspector/compose-action@main env: GITHUB_PAT: ${{ secrets.PAT }} GITHUB_REPO: "jmikedupont2/ai-ticket" diff --git a/.gitignore b/.gitignore index 311200f..d54b7d5 100644 --- a/.gitignore +++ b/.gitignore @@ -160,4 +160,6 @@ cython_debug/ # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ -*~ \ No newline at end of file +*~ +/dckr_pat_.txt +/github_pat.txt diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..53433c5 --- /dev/null +++ b/Makefile @@ -0,0 +1,2 @@ +test: + bash ./runact.sh diff --git a/docker-compose.yml b/docker-compose.yml index ef6e8e8..7f12900 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,9 +2,9 @@ version: '3' services: ai_ticket: - image: ai_ticket - build: - context: . + #image: ai_ticket + build: . + # context: . #entrypoint: /bin/bash #stdin_open: true # docker run -i #tty: true # docker run -t @@ -20,6 +20,7 @@ services: build: context: vendor/Auto-GPT/ depends_on: + - ai_ticket - mockopenai mockopenai: diff --git a/runact.sh b/runact.sh new file mode 100644 index 0000000..997e85a --- /dev/null +++ b/runact.sh @@ -0,0 +1,18 @@ + +#prerequists +# git clone https://github.com/nektos/act -> https://github.com/meta-introspector/actx +# make build + + +# git clone https://github.com/moovweb/gvm +# cd gvm/ &&./autogen.sh && make install + +act \ + -s GITHUB_TOKEN=`cat github_pat.txt` \ + -s GITHUB_PAT=`cat github_pat.txt` \ + -s DOCKER=`cat dckr_pat_.txt` \ + -P ubuntu-latest=localhost/my_local_act \ + --verbose \ + --job python-package-build + +# --pull=false \ diff --git a/vendor/Auto-GPT b/vendor/Auto-GPT index 2717205..2c9f2ec 160000 --- a/vendor/Auto-GPT +++ b/vendor/Auto-GPT @@ -1 +1 @@ -Subproject commit 271720501967226a1c5fb67fd5eba4a19a058e6a +Subproject commit 2c9f2ec8badcf331c74ed241a2df56abe4408cac From 3e994822ed687ab55d0c1f3bf2d204e7c26412e4 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sat, 30 Sep 2023 21:29:06 -0400 Subject: [PATCH 02/57] working better now just need to add a job --- .github/workflows/debug.yml | 20 ++++++++++++++++++++ .github/workflows/python-package.yml | 24 ++++++++++++++++++++++-- .secrets.example.txt | 4 ++++ runact.sh | 10 +++------- 4 files changed, 49 insertions(+), 9 deletions(-) create mode 100644 .github/workflows/debug.yml create mode 100644 .secrets.example.txt diff --git a/.github/workflows/debug.yml b/.github/workflows/debug.yml new file mode 100644 index 0000000..5d63fd2 --- /dev/null +++ b/.github/workflows/debug.yml @@ -0,0 +1,20 @@ +name: Python package +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] +jobs: + debug: + runs-on: ubuntu-latest + steps: + - name: test + env: + GITHUB_PAT: ${{ secrets.PAT }} + GITHUB_1: ${{ secrets.PAT }} + DOCKERTEST: ${{ secrets.DOCKER }} + run: echo pat ${{ secrets.PAT }} docker ${{ secrets.DOCKER }} and test1 ${DOCKERTEST} and $GITHUB_1 + - name: test2 + env: + GITHUB_PAT: ${{ secrets.PAT }} + run: docker-compose -e GIHUB_PAT="${{ secrets.PAT }}" up diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index e71d932..9f97959 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -21,6 +21,8 @@ jobs: #"3.10", "3.x"] steps: + + - uses: actions/checkout@v3 - name: Checkout repository uses: actions/checkout@master @@ -36,12 +38,30 @@ jobs: python -m pip install --upgrade pip #python -m pip install flake8 pytest if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + pip install podman-compose #- name: Lint with flake8 # run: | # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Docker Compose Action - uses: meta-introspector/compose-action@main + - name: Docker Compose Action build + #uses: meta-introspector/compose-action@main env: GITHUB_PAT: ${{ secrets.PAT }} GITHUB_REPO: "jmikedupont2/ai-ticket" + run: docker-compose build + + - name: Docker Compose Action echo + env: + GITHUB_PAT: ${{ secrets.PAT }} + GITHUB_REPO: "jmikedupont2/ai-ticket" + run: echo pat ${{ secrets.PAT }} +# run: echo docker-compose up -e GITHUB_PAT=${{ secrets.PAT }} -e GITHUB_REPO="jmikedupont2/ai-ticket" + + - name: Docker Compose Action do it + #uses: meta-introspector/compose-action@main + env: + GITHUB_PAT: ${{ secrets.PAT }} + GITHUB_REPO: "jmikedupont2/ai-ticket" + run: | + GITHUB_PAT=${{ secrets.PAT }} GITHUB_REPO="jmikedupont2/ai-ticket" docker-compose up + diff --git a/.secrets.example.txt b/.secrets.example.txt new file mode 100644 index 0000000..3c53809 --- /dev/null +++ b/.secrets.example.txt @@ -0,0 +1,4 @@ +GITHUB_TOKEN= +GITHUB_PAT= +PAT=this isused +DOCKER= diff --git a/runact.sh b/runact.sh index 997e85a..b9b418c 100644 --- a/runact.sh +++ b/runact.sh @@ -6,13 +6,9 @@ # git clone https://github.com/moovweb/gvm # cd gvm/ &&./autogen.sh && make install - -act \ - -s GITHUB_TOKEN=`cat github_pat.txt` \ - -s GITHUB_PAT=`cat github_pat.txt` \ - -s DOCKER=`cat dckr_pat_.txt` \ - -P ubuntu-latest=localhost/my_local_act \ - --verbose \ +# use .secrets PAT And DOCKER +act -P ubuntu-latest=localhost/my_local_act \ + --verbose \ --job python-package-build # --pull=false \ From 5bf77fc042afeacf4fd89300751062ddd7946fd1 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sun, 1 Oct 2023 11:45:40 -0400 Subject: [PATCH 03/57] use images --- docker-compose.yml | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 7f12900..b0e11a2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,8 +2,10 @@ version: '3' services: ai_ticket: - #image: ai_ticket - build: . + + image: h4ckermike/ai-ticket:ai_ticket + + #build: . # context: . #entrypoint: /bin/bash #stdin_open: true # docker run -i @@ -19,11 +21,23 @@ services: #tty: true # docker run -t build: context: vendor/Auto-GPT/ + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + + image: h4ckermike/ai-ticket:autogpt + + #build: + # context: vendor/Auto-GPT/ + depends_on: - ai_ticket - mockopenai mockopenai: + image: h4ckermike/ai-ticket:mockopenai depends_on: - ai_ticket @@ -31,8 +45,8 @@ services: - GITHUB_PAT=${GITHUB_PAT} - GITHUB_REPO=${GITHUB_REPO} - build: - context: vendor/lollms/ + #build: + # context: vendor/lollms/ ports: - "5000:5000" From ba278e828c589ade5972527439803477dee1bd14 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sun, 1 Oct 2023 16:20:15 -0400 Subject: [PATCH 04/57] update ports --- docker-compose.yml | 26 ++++++++------------------ 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index b0e11a2..baeef35 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,42 +2,32 @@ version: '3' services: ai_ticket: - - image: h4ckermike/ai-ticket:ai_ticket - - #build: . + #image: ai_ticket + build: . # context: . #entrypoint: /bin/bash #stdin_open: true # docker run -i #tty: true # docker run -t autogpt: - #entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y" entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " # uncomment thse next 3 lines for debugging #entrypoint: /bin/bash #stdin_open: true # docker run -i #tty: true # docker run -t - build: - context: vendor/Auto-GPT/ environment: - GITHUB_PAT="${GITHUB_PAT}" - GITHUB_REPO="jmikedupont2/ai-ticket" - OPENAI_API_KEY=your-openai-api-key - - OPENAI_API_BASE=http://mockopenai:5000/v1 - - image: h4ckermike/ai-ticket:autogpt - - #build: - # context: vendor/Auto-GPT/ - + - OPENAI_API_BASE=http://mockopenai:8080/v1 + build: + context: vendor/Auto-GPT/ depends_on: - ai_ticket - mockopenai mockopenai: - image: h4ckermike/ai-ticket:mockopenai depends_on: - ai_ticket @@ -45,8 +35,8 @@ services: - GITHUB_PAT=${GITHUB_PAT} - GITHUB_REPO=${GITHUB_REPO} - #build: - # context: vendor/lollms/ + build: + context: vendor/lollms/ ports: - - "5000:5000" + - "5000:8080" From e93a68586e77496d17d4585582bcb153b206713d Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 2 Oct 2023 09:48:16 -0400 Subject: [PATCH 05/57] work in progress still having python issues. --- .github/workflows/debug.yml | 20 ----- .github/workflows/pipeline.yml | 112 +++++++++++++++++++++++++++ .github/workflows/python-package.yml | 15 ++-- Dockerfile | 43 +++++++++- docker-compose.yml | 2 +- poetry.lock | 7 ++ pyproject.toml | 39 +++++++++- runact.sh | 4 +- 8 files changed, 209 insertions(+), 33 deletions(-) delete mode 100644 .github/workflows/debug.yml create mode 100644 .github/workflows/pipeline.yml create mode 100644 poetry.lock diff --git a/.github/workflows/debug.yml b/.github/workflows/debug.yml deleted file mode 100644 index 5d63fd2..0000000 --- a/.github/workflows/debug.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: Python package -on: - push: - branches: [ "main" ] - pull_request: - branches: [ "main" ] -jobs: - debug: - runs-on: ubuntu-latest - steps: - - name: test - env: - GITHUB_PAT: ${{ secrets.PAT }} - GITHUB_1: ${{ secrets.PAT }} - DOCKERTEST: ${{ secrets.DOCKER }} - run: echo pat ${{ secrets.PAT }} docker ${{ secrets.DOCKER }} and test1 ${DOCKERTEST} and $GITHUB_1 - - name: test2 - env: - GITHUB_PAT: ${{ secrets.PAT }} - run: docker-compose -e GIHUB_PAT="${{ secrets.PAT }}" up diff --git a/.github/workflows/pipeline.yml b/.github/workflows/pipeline.yml new file mode 100644 index 0000000..a51aabc --- /dev/null +++ b/.github/workflows/pipeline.yml @@ -0,0 +1,112 @@ +name: Pipeline + +on: push + +jobs: + code-quality: + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v3 + # - name: Setup Python + # uses: actions/setup-python@v4 + # with: + # python-version: 3.10.12 + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: 1.4.1 + virtualenvs-in-project: true + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v3 + with: + path: .venv + key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} + - name: Install dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: | + poetry install --no-interaction --no-root + - name: pylint + run: | + source .venv/bin/activate + pylint build tests + - name: black + run: | + source .venv/bin/activate + black --check . + + run-tests: + needs: code-quality + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v3 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + # - name: Setup Python + # uses: actions/setup-python@v4 + # with: + # python-version: 3.10.12 + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: 1.4.1 + virtualenvs-in-project: true + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v3 + with: + path: .venv + key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} + - name: Install dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: | + poetry install --no-interaction --no-root + - name: Run all tests with pytest + run: | + source .venv/bin/activate + pytest --cov + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + with: + fail_ci_if_error: true + token: ${{ secrets.CODECOV_TOKEN }} + + publish-all-images: + needs: run-tests + if: startsWith(github.ref, 'refs/tags/') + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v3 + - name: Get Git Commit Tag Name + uses: olegtarasov/get-tag@v2.1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + # - name: Setup Python + # uses: actions/setup-python@v4 + # with: + # python-version: 3.10.12 + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: 1.4.1 + virtualenvs-in-project: true + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v3 + with: + path: .venv + key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} + - name: Install dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: | + poetry install --no-interaction --no-root + - name: Publish Image to Docker Hub + env: + DOCKER_HUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }} + DOCKER_HUB_PASSWORD: ${{ secrets.DOCKER_HUB_PASSWORD }} + run: | + source .venv/bin/activate + python -m build.publish diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 9f97959..75faa8f 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -50,18 +50,19 @@ jobs: GITHUB_REPO: "jmikedupont2/ai-ticket" run: docker-compose build - - name: Docker Compose Action echo + + - name: create openai env: GITHUB_PAT: ${{ secrets.PAT }} GITHUB_REPO: "jmikedupont2/ai-ticket" - run: echo pat ${{ secrets.PAT }} -# run: echo docker-compose up -e GITHUB_PAT=${{ secrets.PAT }} -e GITHUB_REPO="jmikedupont2/ai-ticket" - - - name: Docker Compose Action do it - #uses: meta-introspector/compose-action@main + run: | + GITHUB_PAT=${{ secrets.PAT }} GITHUB_REPO="jmikedupont2/ai-ticket" docker-compose up -d mockopenai + + - name: run autogpt env: GITHUB_PAT: ${{ secrets.PAT }} GITHUB_REPO: "jmikedupont2/ai-ticket" run: | - GITHUB_PAT=${{ secrets.PAT }} GITHUB_REPO="jmikedupont2/ai-ticket" docker-compose up + GITHUB_PAT=${{ secrets.PAT }} GITHUB_REPO="jmikedupont2/ai-ticket" docker-compose run autogpt + diff --git a/Dockerfile b/Dockerfile index ae68579..3517970 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,44 @@ -FROM python:3.10-slim +# The Poetry installation is provided through the base image. Please check the +# base image if you interested in the details. +# Base image: https://hub.docker.com/r/pfeiffermax/python-poetry +# Dockerfile: https://github.com/max-pfeiffer/python-poetry/blob/main/build/Dockerfile +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +ARG APPLICATION_SERVER_PORT + +LABEL maintainer="Mike DuPont " + +ENV PYTHONUNBUFFERED=1 \ + # https://docs.python.org/3/using/cmdline.html#envvar-PYTHONDONTWRITEBYTECODE + PYTHONDONTWRITEBYTECODE=1 \ + PYTHONPATH=/application_root \ + # https://python-poetry.org/docs/configuration/#virtualenvsin-project + POETRY_VIRTUALENVS_IN_PROJECT=true \ + POETRY_CACHE_DIR="/application_root/.cache" \ + VIRTUAL_ENVIRONMENT_PATH="/application_root/.venv" \ + APPLICATION_SERVER_PORT=$APPLICATION_SERVER_PORT +# Adding the virtual environment to PATH in order to "activate" it. +# https://docs.python.org/3/library/venv.html#how-venvs-work +ENV PATH="$VIRTUAL_ENVIRONMENT_PATH/bin:$PATH" + +# Principle of least privilege: create a new user for running the application +RUN groupadd -g 1001 python_application && \ + useradd -r -u 1001 -g python_application python_application + +# Set the WORKDIR to the application root. +# https://www.uvicorn.org/settings/#development +# https://docs.docker.com/engine/reference/builder/#workdir +WORKDIR ${PYTHONPATH} +RUN chown python_application:python_application ${PYTHONPATH} + +# Create cache directory and set permissions because user 1001 has no home +# and poetry cache directory. +# https://python-poetry.org/docs/configuration/#cache-directory +RUN mkdir ${POETRY_CACHE_DIR} && chown python_application:python_application ${POETRY_CACHE_DIR} + +# Use the unpriveledged user to run the application +USER 1001 + WORKDIR /opt/ai-ticket COPY pyproject.toml /opt/ai-ticket/ COPY setup.cfg /opt/ai-ticket/ @@ -6,7 +46,6 @@ COPY requirements.txt /opt/ai-ticket/ COPY ./src/ /opt/ai-ticket/src/ RUN pip install /opt/ai-ticket/ - RUN apt update RUN apt install -y git RUN pip install --trusted-host pypi.python.org -r requirements.txt diff --git a/docker-compose.yml b/docker-compose.yml index baeef35..73d1454 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,7 +10,7 @@ services: #tty: true # docker run -t autogpt: - entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " + entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " # uncomment thse next 3 lines for debugging #entrypoint: /bin/bash diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..4e433ab --- /dev/null +++ b/poetry.lock @@ -0,0 +1,7 @@ +# This file is automatically @generated by Poetry and should not be changed by hand. +package = [] + +[metadata] +lock-version = "2.0" +python-versions = "*" +content-hash = "115cf985d932e9bf5f540555bbdd75decbb62cac81e399375fc19f6277f8c1d8" diff --git a/pyproject.toml b/pyproject.toml index cfc991c..94356c3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,13 @@ [build-system] +python = "^3.7" requires = [ "setuptools>=42", - "wheel" + "wheel", + "poetry-core" ] -build-backend = "setuptools.build_meta" +#build-backend = "setuptools.build_meta" +build-backend = "poetry.core.masonry.api" + [tool.black] line-length = 120 @@ -16,3 +20,34 @@ combine_as_imports = true combine_star = true known_local_folder = ["tests", "cli"] known_first_party = ["test_utils"] + +[tool.poetry] +name = "ai-ticket" +version = "0.0.1" +authors = [ + "Mike Dupont ", +] +readme = "README.md" +description = "AI ticket" + +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", +] +#packages = . + +# [tool.poetry.scripts] +# [tool.poetry.dependencies] +[tool.poetry.group.dev.dependencies] +python = "^3.7" +pytest = "7.4.0" +pytest-cov = "4.1.0" +coverage = "7.3.1" +requests = "2.31.0" +black = "23.7.0" +pre-commit = "3.3.3" +semver = "3.0.1" +pylint = "2.17.5" +testcontainers = "3.7.1" + diff --git a/runact.sh b/runact.sh index b9b418c..b2302a3 100644 --- a/runact.sh +++ b/runact.sh @@ -9,6 +9,8 @@ # use .secrets PAT And DOCKER act -P ubuntu-latest=localhost/my_local_act \ --verbose \ - --job python-package-build + --job code-quality + +# --job python-package-build # --pull=false \ From 8b43cda56a8d9fb4107ad4253e3376ba659a8f2f Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 2 Oct 2023 09:53:09 -0400 Subject: [PATCH 06/57] lock --- poetry.lock | 1014 +++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 5 + 2 files changed, 1016 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4e433ab..eb69fc3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,7 +1,1015 @@ # This file is automatically @generated by Poetry and should not be changed by hand. -package = [] + +[[package]] +name = "astroid" +version = "2.15.8" +description = "An abstract syntax tree for Python with inference support." +category = "dev" +optional = false +python-versions = ">=3.7.2" +files = [ + {file = "astroid-2.15.8-py3-none-any.whl", hash = "sha256:1aa149fc5c6589e3d0ece885b4491acd80af4f087baafa3fb5203b113e68cd3c"}, + {file = "astroid-2.15.8.tar.gz", hash = "sha256:6c107453dffee9055899705de3c9ead36e74119cee151e5a9aaf7f0b0e020a6a"}, +] + +[package.dependencies] +lazy-object-proxy = ">=1.4.0" +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} +wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""} + +[[package]] +name = "black" +version = "23.7.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, + {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, + {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, + {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, + {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, + {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, + {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, + {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, + {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, + {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, + {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, + {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.3.1" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd0f7429ecfd1ff597389907045ff209c8fdb5b013d38cfa7c60728cb484b6e3"}, + {file = "coverage-7.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:966f10df9b2b2115da87f50f6a248e313c72a668248be1b9060ce935c871f276"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0575c37e207bb9b98b6cf72fdaaa18ac909fb3d153083400c2d48e2e6d28bd8e"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:245c5a99254e83875c7fed8b8b2536f040997a9b76ac4c1da5bff398c06e860f"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c96dd7798d83b960afc6c1feb9e5af537fc4908852ef025600374ff1a017392"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:de30c1aa80f30af0f6b2058a91505ea6e36d6535d437520067f525f7df123887"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50dd1e2dd13dbbd856ffef69196781edff26c800a74f070d3b3e3389cab2600d"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9c0c19f70d30219113b18fe07e372b244fb2a773d4afde29d5a2f7930765136"}, + {file = "coverage-7.3.1-cp310-cp310-win32.whl", hash = "sha256:770f143980cc16eb601ccfd571846e89a5fe4c03b4193f2e485268f224ab602f"}, + {file = "coverage-7.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdd088c00c39a27cfa5329349cc763a48761fdc785879220d54eb785c8a38520"}, + {file = "coverage-7.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74bb470399dc1989b535cb41f5ca7ab2af561e40def22d7e188e0a445e7639e3"}, + {file = "coverage-7.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:025ded371f1ca280c035d91b43252adbb04d2aea4c7105252d3cbc227f03b375"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6191b3a6ad3e09b6cfd75b45c6aeeffe7e3b0ad46b268345d159b8df8d835f9"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb0b188f30e41ddd659a529e385470aa6782f3b412f860ce22b2491c89b8593"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c8f0df9dfd8ff745bccff75867d63ef336e57cc22b2908ee725cc552689ec8"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7eb3cd48d54b9bd0e73026dedce44773214064be93611deab0b6a43158c3d5a0"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ac3c5b7e75acac31e490b7851595212ed951889918d398b7afa12736c85e13ce"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b4ee7080878077af0afa7238df1b967f00dc10763f6e1b66f5cced4abebb0a3"}, + {file = "coverage-7.3.1-cp311-cp311-win32.whl", hash = "sha256:229c0dd2ccf956bf5aeede7e3131ca48b65beacde2029f0361b54bf93d36f45a"}, + {file = "coverage-7.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6f55d38818ca9596dc9019eae19a47410d5322408140d9a0076001a3dcb938c"}, + {file = "coverage-7.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5289490dd1c3bb86de4730a92261ae66ea8d44b79ed3cc26464f4c2cde581fbc"}, + {file = "coverage-7.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca833941ec701fda15414be400c3259479bfde7ae6d806b69e63b3dc423b1832"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd694e19c031733e446c8024dedd12a00cda87e1c10bd7b8539a87963685e969"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aab8e9464c00da5cb9c536150b7fbcd8850d376d1151741dd0d16dfe1ba4fd26"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d38444efffd5b056fcc026c1e8d862191881143c3aa80bb11fcf9dca9ae204"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8a07b692129b8a14ad7a37941a3029c291254feb7a4237f245cfae2de78de037"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2829c65c8faaf55b868ed7af3c7477b76b1c6ebeee99a28f59a2cb5907a45760"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f111a7d85658ea52ffad7084088277135ec5f368457275fc57f11cebb15607f"}, + {file = "coverage-7.3.1-cp312-cp312-win32.whl", hash = "sha256:c397c70cd20f6df7d2a52283857af622d5f23300c4ca8e5bd8c7a543825baa5a"}, + {file = "coverage-7.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:5ae4c6da8b3d123500f9525b50bf0168023313963e0e2e814badf9000dd6ef92"}, + {file = "coverage-7.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca70466ca3a17460e8fc9cea7123c8cbef5ada4be3140a1ef8f7b63f2f37108f"}, + {file = "coverage-7.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2781fd3cabc28278dc982a352f50c81c09a1a500cc2086dc4249853ea96b981"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6407424621f40205bbe6325686417e5e552f6b2dba3535dd1f90afc88a61d465"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04312b036580ec505f2b77cbbdfb15137d5efdfade09156961f5277149f5e344"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9ad38204887349853d7c313f53a7b1c210ce138c73859e925bc4e5d8fc18e7"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:53669b79f3d599da95a0afbef039ac0fadbb236532feb042c534fbb81b1a4e40"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:614f1f98b84eb256e4f35e726bfe5ca82349f8dfa576faabf8a49ca09e630086"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1a317fdf5c122ad642db8a97964733ab7c3cf6009e1a8ae8821089993f175ff"}, + {file = "coverage-7.3.1-cp38-cp38-win32.whl", hash = "sha256:defbbb51121189722420a208957e26e49809feafca6afeef325df66c39c4fdb3"}, + {file = "coverage-7.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:f4f456590eefb6e1b3c9ea6328c1e9fa0f1006e7481179d749b3376fc793478e"}, + {file = "coverage-7.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12d8b11a54f32688b165fd1a788c408f927b0960984b899be7e4c190ae758f1"}, + {file = "coverage-7.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f09195dda68d94a53123883de75bb97b0e35f5f6f9f3aa5bf6e496da718f0cb6"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6601a60318f9c3945be6ea0f2a80571f4299b6801716f8a6e4846892737ebe4"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d156269718670d00a3b06db2288b48527fc5f36859425ff7cec07c6b367745"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:636a8ac0b044cfeccae76a36f3b18264edcc810a76a49884b96dd744613ec0b7"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5d991e13ad2ed3aced177f524e4d670f304c8233edad3210e02c465351f785a0"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:586649ada7cf139445da386ab6f8ef00e6172f11a939fc3b2b7e7c9082052fa0"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4aba512a15a3e1e4fdbfed2f5392ec221434a614cc68100ca99dcad7af29f3f8"}, + {file = "coverage-7.3.1-cp39-cp39-win32.whl", hash = "sha256:6bc6f3f4692d806831c136c5acad5ccedd0262aa44c087c46b7101c77e139140"}, + {file = "coverage-7.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:553d7094cb27db58ea91332e8b5681bac107e7242c23f7629ab1316ee73c4981"}, + {file = "coverage-7.3.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:220eb51f5fb38dfdb7e5d54284ca4d0cd70ddac047d750111a68ab1798945194"}, + {file = "coverage-7.3.1.tar.gz", hash = "sha256:6cb7fe1581deb67b782c153136541e20901aa312ceedaf1467dcb35255787952"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] + +[package.dependencies] +packaging = "*" + +[[package]] +name = "dill" +version = "0.3.7" +description = "serialize all of Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + +[[package]] +name = "distlib" +version = "0.3.7" +description = "Distribution utilities" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, + {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, +] + +[[package]] +name = "docker" +version = "6.1.3" +description = "A Python library for the Docker Engine API." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "docker-6.1.3-py3-none-any.whl", hash = "sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9"}, + {file = "docker-6.1.3.tar.gz", hash = "sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20"}, +] + +[package.dependencies] +packaging = ">=14.0" +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" +websocket-client = ">=0.32.0" + +[package.extras] +ssh = ["paramiko (>=2.4.3)"] + +[[package]] +name = "exceptiongroup" +version = "1.1.3" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "filelock" +version = "3.12.4" +description = "A platform independent file lock." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, + {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] +typing = ["typing-extensions (>=4.7.1)"] + +[[package]] +name = "identify" +version = "2.5.30" +description = "File identification library for Python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.30-py2.py3-none-any.whl", hash = "sha256:afe67f26ae29bab007ec21b03d4114f41316ab9dd15aa8736a167481e108da54"}, + {file = "identify-2.5.30.tar.gz", hash = "sha256:f302a4256a15c849b91cfcdcec052a8ce914634b2f77ae87dad29cd749f2d88d"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isort" +version = "5.12.0" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "lazy-object-proxy" +version = "1.9.0" +description = "A fast and thorough lazy object proxy." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +category = "dev" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + +[[package]] +name = "platformdirs" +version = "3.10.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "3.3.3" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pre_commit-3.3.3-py2.py3-none-any.whl", hash = "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb"}, + {file = "pre_commit-3.3.3.tar.gz", hash = "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "pylint" +version = "2.17.5" +description = "python code static checker" +category = "dev" +optional = false +python-versions = ">=3.7.2" +files = [ + {file = "pylint-2.17.5-py3-none-any.whl", hash = "sha256:73995fb8216d3bed149c8d51bba25b2c52a8251a2c8ac846ec668ce38fab5413"}, + {file = "pylint-2.17.5.tar.gz", hash = "sha256:f7b601cbc06fef7e62a754e2b41294c2aa31f1cb659624b9a85bcba29eaf8252"}, +] + +[package.dependencies] +astroid = ">=2.15.6,<=2.17.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = {version = ">=0.2", markers = "python_version < \"3.11\""} +isort = ">=4.2.5,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + +[[package]] +name = "pytest" +version = "7.4.0" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "semver" +version = "3.0.1" +description = "Python helper for Semantic Versioning (https://semver.org)" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "semver-3.0.1-py3-none-any.whl", hash = "sha256:2a23844ba1647362c7490fe3995a86e097bb590d16f0f32dfc383008f19e4cdf"}, + {file = "semver-3.0.1.tar.gz", hash = "sha256:9ec78c5447883c67b97f98c3b6212796708191d22e4ad30f4570f840171cbce1"}, +] + +[[package]] +name = "setuptools" +version = "68.2.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "testcontainers" +version = "3.7.1" +description = "Library provides lightweight, throwaway instances of common databases, Selenium web browsers, or anything else that can run in a Docker container" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "testcontainers-3.7.1-py2.py3-none-any.whl", hash = "sha256:7f48cef4bf0ccd78f1a4534d4b701a003a3bace851f24eae58a32f9e3f0aeba0"}, +] + +[package.dependencies] +deprecation = "*" +docker = ">=4.0.0" +wrapt = "*" + +[package.extras] +arangodb = ["python-arango"] +azurite = ["azure-storage-blob"] +clickhouse = ["clickhouse-driver"] +docker-compose = ["docker-compose"] +google-cloud-pubsub = ["google-cloud-pubsub (<2)"] +kafka = ["kafka-python"] +keycloak = ["python-keycloak"] +mongo = ["pymongo"] +mssqlserver = ["pymssql"] +mysql = ["pymysql", "sqlalchemy"] +neo4j = ["neo4j"] +oracle = ["cx-Oracle", "sqlalchemy"] +postgresql = ["psycopg2-binary", "sqlalchemy"] +rabbitmq = ["pika"] +redis = ["redis"] +selenium = ["selenium"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tomlkit" +version = "0.12.1" +description = "Style preserving TOML library" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, + {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, +] + +[[package]] +name = "typing-extensions" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, +] + +[[package]] +name = "urllib3" +version = "2.0.5" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.5-py3-none-any.whl", hash = "sha256:ef16afa8ba34a1f989db38e1dbbe0c302e4289a47856990d0682e374563ce35e"}, + {file = "urllib3-2.0.5.tar.gz", hash = "sha256:13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "virtualenv" +version = "20.24.5" +description = "Virtual Python Environment builder" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"}, + {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<4" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "websocket-client" +version = "1.6.3" +description = "WebSocket client for Python with low level API options" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websocket-client-1.6.3.tar.gz", hash = "sha256:3aad25d31284266bcfcfd1fd8a743f63282305a364b8d0948a43bd606acc652f"}, + {file = "websocket_client-1.6.3-py3-none-any.whl", hash = "sha256:6cfc30d051ebabb73a5fa246efdcc14c8fbebbd0330f8984ac3bb6d9edd2ad03"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "wrapt" +version = "1.15.0" +description = "Module for decorators, wrappers and monkey patching." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, + {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, + {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, + {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, + {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, + {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, + {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, + {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, + {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, + {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, + {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, + {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, + {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, + {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, + {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, + {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, + {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, + {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, + {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, +] [metadata] lock-version = "2.0" -python-versions = "*" -content-hash = "115cf985d932e9bf5f540555bbdd75decbb62cac81e399375fc19f6277f8c1d8" +python-versions = "3.10.*" +content-hash = "1332d545214a8b36c15b7497a604dd3cc91af759f5c3b738e06706acd35840a5" diff --git a/pyproject.toml b/pyproject.toml index 94356c3..d085a64 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,11 @@ classifiers = [ # [tool.poetry.scripts] # [tool.poetry.dependencies] +[tool.poetry.dependencies] +python = "3.10.*" +click = "8.1.7" +docker = "6.1.3" + [tool.poetry.group.dev.dependencies] python = "^3.7" pytest = "7.4.0" From 712740f5dfa5073da94d34d863adcacd246dd674 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 2 Oct 2023 10:11:56 -0400 Subject: [PATCH 07/57] update --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d085a64..1f8c325 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,7 @@ classifiers = [ # [tool.poetry.scripts] # [tool.poetry.dependencies] [tool.poetry.dependencies] -python = "3.10.*" +python = "3.11.5" click = "8.1.7" docker = "6.1.3" From bc543e0597dedc5608714333357b55b2b8a623be Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 2 Oct 2023 10:37:08 -0400 Subject: [PATCH 08/57] act base --- .gitmodules | 3 +++ pyproject.toml | 2 +- vendor/act_base | 1 + 3 files changed, 5 insertions(+), 1 deletion(-) create mode 160000 vendor/act_base diff --git a/.gitmodules b/.gitmodules index dfce53a..6dcf0c3 100644 --- a/.gitmodules +++ b/.gitmodules @@ -13,3 +13,6 @@ [submodule "vendor/Auto-GPT-Benchmarks"] path = vendor/Auto-GPT-Benchmarks url = https://github.com/Significant-Gravitas/Auto-GPT-Benchmarks +[submodule "vendor/act_base"] + path = vendor/act_base + url = https://github.com/meta-introspector/act_base/ diff --git a/pyproject.toml b/pyproject.toml index 1f8c325..864896e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,7 @@ classifiers = [ # [tool.poetry.scripts] # [tool.poetry.dependencies] [tool.poetry.dependencies] -python = "3.11.5" +python = "3.11.4" click = "8.1.7" docker = "6.1.3" diff --git a/vendor/act_base b/vendor/act_base new file mode 160000 index 0000000..dbd8163 --- /dev/null +++ b/vendor/act_base @@ -0,0 +1 @@ +Subproject commit dbd81631ba84fd0f5c252fc7df8fe99b1f6a4bec From dec467f64c6db1d9087f2461b35e4fd937bf7326 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 2 Oct 2023 11:53:18 -0400 Subject: [PATCH 09/57] dockerbuild --- docker-compose.yml | 9 ++-- dockerbuild/.gitignore | 1 + dockerbuild/Dockerfile | 49 ++++++++++++++++++++ dockerbuild/Readme.md | 1 + dockerbuild/__init__.py | 0 dockerbuild/constants.py | 30 ++++++++++++ dockerbuild/images.py | 99 ++++++++++++++++++++++++++++++++++++++++ dockerbuild/publish.py | 78 +++++++++++++++++++++++++++++++ 8 files changed, 261 insertions(+), 6 deletions(-) create mode 100644 dockerbuild/.gitignore create mode 100644 dockerbuild/Dockerfile create mode 100644 dockerbuild/Readme.md create mode 100644 dockerbuild/__init__.py create mode 100644 dockerbuild/constants.py create mode 100644 dockerbuild/images.py create mode 100644 dockerbuild/publish.py diff --git a/docker-compose.yml b/docker-compose.yml index 73d1454..979f9d5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,13 +1,10 @@ version: '3' services: - ai_ticket: - #image: ai_ticket build: . - # context: . - #entrypoint: /bin/bash - #stdin_open: true # docker run -i - #tty: true # docker run -t + act_base: + # the base image of github action + build: vendor/act_base autogpt: entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " diff --git a/dockerbuild/.gitignore b/dockerbuild/.gitignore new file mode 100644 index 0000000..bee8a64 --- /dev/null +++ b/dockerbuild/.gitignore @@ -0,0 +1 @@ +__pycache__ diff --git a/dockerbuild/Dockerfile b/dockerbuild/Dockerfile new file mode 100644 index 0000000..b8a9eaa --- /dev/null +++ b/dockerbuild/Dockerfile @@ -0,0 +1,49 @@ +# The Poetry installation is provided through the base image. Please check the +# base image if you interested in the details. +# Base image: https://hub.docker.com/r/pfeiffermax/python-poetry +# Dockerfile: https://github.com/max-pfeiffer/python-poetry/blob/main/build/Dockerfile +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +ARG APPLICATION_SERVER_PORT + +LABEL maintainer="Max Pfeiffer " + + # https://docs.python.org/3/using/cmdline.html#envvar-PYTHONUNBUFFERED +ENV PYTHONUNBUFFERED=1 \ + # https://docs.python.org/3/using/cmdline.html#envvar-PYTHONDONTWRITEBYTECODE + PYTHONDONTWRITEBYTECODE=1 \ + PYTHONPATH=/application_root \ + # https://python-poetry.org/docs/configuration/#virtualenvsin-project + POETRY_VIRTUALENVS_IN_PROJECT=true \ + POETRY_CACHE_DIR="/application_root/.cache" \ + VIRTUAL_ENVIRONMENT_PATH="/application_root/.venv" \ + APPLICATION_SERVER_PORT=$APPLICATION_SERVER_PORT + +# Adding the virtual environment to PATH in order to "activate" it. +# https://docs.python.org/3/library/venv.html#how-venvs-work +ENV PATH="$VIRTUAL_ENVIRONMENT_PATH/bin:$PATH" + +# Principle of least privilege: create a new user for running the application +RUN groupadd -g 1001 python_application && \ + useradd -r -u 1001 -g python_application python_application + +# Set the WORKDIR to the application root. +# https://www.uvicorn.org/settings/#development +# https://docs.docker.com/engine/reference/builder/#workdir +WORKDIR ${PYTHONPATH} +RUN chown python_application:python_application ${PYTHONPATH} + +# Create cache directory and set permissions because user 1001 has no home +# and poetry cache directory. +# https://python-poetry.org/docs/configuration/#cache-directory +RUN mkdir ${POETRY_CACHE_DIR} && chown python_application:python_application ${POETRY_CACHE_DIR} + +# Document the exposed port +# https://docs.docker.com/engine/reference/builder/#expose +EXPOSE ${APPLICATION_SERVER_PORT} + +# Use the unpriveledged user to run the application +USER 1001 + +# Run the uvicorn application server. +CMD exec uvicorn --workers 1 --host 0.0.0.0 --port $APPLICATION_SERVER_PORT app.main:app diff --git a/dockerbuild/Readme.md b/dockerbuild/Readme.md new file mode 100644 index 0000000..502580d --- /dev/null +++ b/dockerbuild/Readme.md @@ -0,0 +1 @@ +taken from pfeiffermax/uvicorn-poetry diff --git a/dockerbuild/__init__.py b/dockerbuild/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dockerbuild/constants.py b/dockerbuild/constants.py new file mode 100644 index 0000000..1f85ba6 --- /dev/null +++ b/dockerbuild/constants.py @@ -0,0 +1,30 @@ +TARGET_ARCHITECTURES: list[str] = [ + "python3.9.16-bullseye", + "python3.9.16-slim-bullseye", + "python3.10.10-bullseye", + "python3.10.10-slim-bullseye", +] +BASE_IMAGES: dict = { + TARGET_ARCHITECTURES[ + 0 + ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.9.16-bullseye@sha256:54037cfdca026b17e7a57664dff47bf04e7849074d3ab62271ecad0446ef0322", + TARGET_ARCHITECTURES[ + 1 + ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.9.16-slim-bullseye@sha256:c0b8d9c28c5717074c481dfdf1d8bd3aaa0b83a5e2a9e37c77be7af19d70d0ce", + TARGET_ARCHITECTURES[ + 2 + ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.10.10-bullseye@sha256:5a81c8c86132e504db2b7329f5e41cd32bddebf811d83a0d356edbca0d81135c", + TARGET_ARCHITECTURES[ + 3 + ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.10.10-slim-bullseye@sha256:289c6beb568991811629c91cdcb3841ceb95bf0a017c3e411f4b71e18043ef15", +} +PYTHON_VERSIONS: dict = { + TARGET_ARCHITECTURES[0]: "3.9.16", + TARGET_ARCHITECTURES[1]: "3.9.16", + TARGET_ARCHITECTURES[2]: "3.10.10", + TARGET_ARCHITECTURES[3]: "3.10.10", +} + +# As we are running the server with an unprivileged user, we need to use +# a high port. +APPLICATION_SERVER_PORT: str = "8000" diff --git a/dockerbuild/images.py b/dockerbuild/images.py new file mode 100644 index 0000000..75fb193 --- /dev/null +++ b/dockerbuild/images.py @@ -0,0 +1,99 @@ +from pathlib import Path +from typing import Optional + +import docker +from docker.models.images import Image + +from dockerbuild.constants import ( + BASE_IMAGES, + APPLICATION_SERVER_PORT, +) + + +class DockerImage: + def __init__( + self, + docker_client: docker.client, + target_architecture: str, + version: str, + ): + self.docker_client: docker.client = docker_client + self.dockerfile_name: str = "Dockerfile" + self.dockerfile_directory: Optional[Path] = None + self.image_name: Optional[str] = None + self.image_tag: Optional[str] = None + self.version: Optional[str] = version + self.target_architecture: str = target_architecture + + +class AITicketPoetryImage(DockerImage): + def __init__( + self, + docker_client: docker.client, + target_architecture: str, + version: str, + ): + super().__init__(docker_client, target_architecture, version) + # An image name is made up of slash-separated name components, + # optionally prefixed by a registry hostname. + # see: https://docs.docker.com/engine/reference/commandline/tag/ + self.image_name: str = "h4ckermike/ai-ticket:test_ai_ticket" + self.dockerfile_directory: Path = Path(__file__).parent.resolve() + + def build(self) -> Image: + self.image_tag: str = f"{self.version}-{self.target_architecture}" + + buildargs: dict[str, str] = { + "BASE_IMAGE": BASE_IMAGES[self.target_architecture], + "APPLICATION_SERVER_PORT": APPLICATION_SERVER_PORT, + } + + image: Image = self.docker_client.images.build( + path=str(self.dockerfile_directory), + dockerfile=self.dockerfile_name, + tag=f"{self.image_name}:{self.image_tag}", + buildargs=buildargs, + )[0] + return image + + +class ExampleApplicationImage(DockerImage): + def build( + self, + target: str, + base_image_tag: str, + ) -> Image: + self.image_tag = f"{self.version}-{self.target_architecture}" + + buildargs: dict[str, str] = { + "BASE_IMAGE": base_image_tag, + } + image: Image = self.docker_client.images.build( + path=str(self.dockerfile_directory), + dockerfile=self.dockerfile_name, + tag=f"{self.image_name}:{self.image_tag}", + target=target, + buildargs=buildargs, + )[0] + return image + + +class ActBaseImage(ExampleApplicationImage): + def __init__( + self, + docker_client: docker.client, + target_architecture: str, + version: str, + ): + super().__init__(docker_client, target_architecture, version) + # An image name is made up of slash-separated name components, + # optionally prefixed by a registry hostname. + # see: https://docs.docker.com/engine/reference/commandline/tag/ + self.image_name: str = "act_base" + self.dockerfile_directory: Path = ( + Path(__file__).parent.parent.resolve() + / "vendor" + / "act_base" + ) + + diff --git a/dockerbuild/publish.py b/dockerbuild/publish.py new file mode 100644 index 0000000..b76999d --- /dev/null +++ b/dockerbuild/publish.py @@ -0,0 +1,78 @@ +import click +import docker +from docker.client import DockerClient +from dockerbuild.constants import ( + TARGET_ARCHITECTURES, +) +from dockerbuild.images import ActBaseImage + +@click.command() +@click.option( + "--docker-hub-username", + envvar="DOCKER_HUB_USERNAME", + help="Docker Hub username", +) +@click.option( + "--docker-hub-password", + envvar="DOCKER_HUB_PASSWORD", + help="Docker Hub password", +) +@click.option( + "--version-tag", envvar="GIT_TAG_NAME", required=True, help="Version Tag" +) +@click.option("--registry", envvar="REGISTRY", help="Docker registry") +def main( + docker_hub_username: str, + docker_hub_password: str, + version_tag: str, + registry: str, +) -> None: + docker_client: DockerClient = docker.from_env() + + for target_architecture in TARGET_ARCHITECTURES: + new_uvicorn_gunicorn_poetry_image: UvicornPoetryImage = ( + ActBaseImage(docker_client, target_architecture, version_tag) + ) + + # Delete old existing images + for old_image in docker_client.images.list( + new_uvicorn_gunicorn_poetry_image.image_name + ): + for tag in old_image.tags: + docker_client.images.remove(tag, force=True) + + #new_uvicorn_gunicorn_poetry_image.build() + + # https://docs.docker.com/engine/reference/commandline/push/ + # https://docs.docker.com/engine/reference/commandline/tag/ + # https://docs.docker.com/engine/reference/commandline/image_tag/ + if docker_hub_username and docker_hub_password: + login_kwargs: dict = { + "username": docker_hub_username, + "password": docker_hub_password, + } + if registry: + login_kwargs["registry"] = registry + + docker_client.login(**login_kwargs) + + if registry: + repository: str = ( + f"{registry}/{new_uvicorn_gunicorn_poetry_image.image_name}" + ) + else: + repository: str = new_uvicorn_gunicorn_poetry_image.image_name + + for line in docker_client.images.push( + repository, + tag=new_uvicorn_gunicorn_poetry_image.image_tag, + stream=True, + decode=True, + ): + print(line) + docker_client.close() + + +if __name__ == "__main__": + # pylint: disable=no-value-for-parameter + main() From 68d97b0ee9b4dd3bbd3082ab0d71b948f90741e6 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 2 Oct 2023 14:20:26 -0400 Subject: [PATCH 10/57] working first build --- poetry_dependancies.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 poetry_dependancies.txt diff --git a/poetry_dependancies.txt b/poetry_dependancies.txt new file mode 100644 index 0000000..aa6c9ff --- /dev/null +++ b/poetry_dependancies.txt @@ -0,0 +1,2 @@ +click +docker From efefdd664888122b2a458b5227384578c583d355 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 2 Oct 2023 14:20:35 -0400 Subject: [PATCH 11/57] v1 --- dockerbuild/constants.py | 41 ++++++++++++++++++++-------------------- dockerbuild/images.py | 15 +++++++++++++-- dockerbuild/publish.py | 7 ++++++- 3 files changed, 40 insertions(+), 23 deletions(-) diff --git a/dockerbuild/constants.py b/dockerbuild/constants.py index 1f85ba6..252c1e1 100644 --- a/dockerbuild/constants.py +++ b/dockerbuild/constants.py @@ -1,28 +1,29 @@ TARGET_ARCHITECTURES: list[str] = [ - "python3.9.16-bullseye", - "python3.9.16-slim-bullseye", - "python3.10.10-bullseye", - "python3.10.10-slim-bullseye", + "base_python3.10-nodejs20", # 3.10.13 + #"python3.9.16-bullseye", + #"python3.9.16-slim-bullseye", + #"python3.10.10-bullseye", + #"python3.10.10-slim-bullseye", ] BASE_IMAGES: dict = { - TARGET_ARCHITECTURES[ - 0 - ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.9.16-bullseye@sha256:54037cfdca026b17e7a57664dff47bf04e7849074d3ab62271ecad0446ef0322", - TARGET_ARCHITECTURES[ - 1 - ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.9.16-slim-bullseye@sha256:c0b8d9c28c5717074c481dfdf1d8bd3aaa0b83a5e2a9e37c77be7af19d70d0ce", - TARGET_ARCHITECTURES[ - 2 - ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.10.10-bullseye@sha256:5a81c8c86132e504db2b7329f5e41cd32bddebf811d83a0d356edbca0d81135c", - TARGET_ARCHITECTURES[ - 3 - ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.10.10-slim-bullseye@sha256:289c6beb568991811629c91cdcb3841ceb95bf0a017c3e411f4b71e18043ef15", + # TARGET_ARCHITECTURES[ + # 0 + # ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.9.16-bullseye@sha256:54037cfdca026b17e7a57664dff47bf04e7849074d3ab62271ecad0446ef0322", + # TARGET_ARCHITECTURES[ + # 1 + # ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.9.16-slim-bullseye@sha256:c0b8d9c28c5717074c481dfdf1d8bd3aaa0b83a5e2a9e37c77be7af19d70d0ce", + # TARGET_ARCHITECTURES[ + # 2 + # ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.10.10-bullseye@sha256:5a81c8c86132e504db2b7329f5e41cd32bddebf811d83a0d356edbca0d81135c", + # TARGET_ARCHITECTURES[ + # 3 + # ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.10.10-slim-bullseye@sha256:289c6beb568991811629c91cdcb3841ceb95bf0a017c3e411f4b71e18043ef15", } PYTHON_VERSIONS: dict = { - TARGET_ARCHITECTURES[0]: "3.9.16", - TARGET_ARCHITECTURES[1]: "3.9.16", - TARGET_ARCHITECTURES[2]: "3.10.10", - TARGET_ARCHITECTURES[3]: "3.10.10", +# TARGET_ARCHITECTURES[0]: "3.9.16", +# TARGET_ARCHITECTURES[1]: "3.9.16", +# TARGET_ARCHITECTURES[2]: "3.10.10", +# TARGET_ARCHITECTURES[3]: "3.10.10", } # As we are running the server with an unprivileged user, we need to use diff --git a/dockerbuild/images.py b/dockerbuild/images.py index 75fb193..1b24e3f 100644 --- a/dockerbuild/images.py +++ b/dockerbuild/images.py @@ -63,16 +63,27 @@ def build( target: str, base_image_tag: str, ) -> Image: + self.image_tag = f"{self.version}-{self.target_architecture}" buildargs: dict[str, str] = { "BASE_IMAGE": base_image_tag, } + print("buildargs",dict( + args=buildargs, + path=str(self.dockerfile_directory), + dockerfile=self.dockerfile_name, + tag=f"{self.image_name}:{self.image_tag}", + #target=target, + buildargs=buildargs, + )) + image: Image = self.docker_client.images.build( + path=str(self.dockerfile_directory), dockerfile=self.dockerfile_name, tag=f"{self.image_name}:{self.image_tag}", - target=target, + #target=target, buildargs=buildargs, )[0] return image @@ -89,7 +100,7 @@ def __init__( # An image name is made up of slash-separated name components, # optionally prefixed by a registry hostname. # see: https://docs.docker.com/engine/reference/commandline/tag/ - self.image_name: str = "act_base" + self.image_name: str = "h4ckermike/act_base" self.dockerfile_directory: Path = ( Path(__file__).parent.parent.resolve() / "vendor" diff --git a/dockerbuild/publish.py b/dockerbuild/publish.py index b76999d..f41f993 100644 --- a/dockerbuild/publish.py +++ b/dockerbuild/publish.py @@ -1,5 +1,6 @@ import click import docker +BASE_IMAGE="nikolaik/python-nodejs:python3.10-nodejs20" from docker.client import DockerClient from dockerbuild.constants import ( TARGET_ARCHITECTURES, @@ -39,9 +40,13 @@ def main( new_uvicorn_gunicorn_poetry_image.image_name ): for tag in old_image.tags: + print("tag",tag) docker_client.images.remove(tag, force=True) - #new_uvicorn_gunicorn_poetry_image.build() + new_uvicorn_gunicorn_poetry_image.build( + target=target_architecture, + base_image_tag=BASE_IMAGE, + ) # https://docs.docker.com/engine/reference/commandline/push/ # https://docs.docker.com/engine/reference/commandline/tag/ From 271e71020e914ec1552cc39a0457921a63edb119 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 2 Oct 2023 14:31:23 -0400 Subject: [PATCH 12/57] push --- push.sh | 1 + 1 file changed, 1 insertion(+) create mode 100644 push.sh diff --git a/push.sh b/push.sh new file mode 100644 index 0000000..61ede78 --- /dev/null +++ b/push.sh @@ -0,0 +1 @@ +python -m dockerbuild.publish --version-tag 12 From faebef6c90b1eecb45112afc06aeb7785e3436a4 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Tue, 3 Oct 2023 12:02:58 -0400 Subject: [PATCH 13/57] building autogpt in 12 seconds --- .gitmodules | 6 ++++ dockerbuild/baseimage.py | 71 ++++++++++++++++++++++++++++++++++++++++ vendor/basic_agent | 1 + vendor/python-poetry | 1 + 4 files changed, 79 insertions(+) create mode 100644 dockerbuild/baseimage.py create mode 160000 vendor/basic_agent create mode 160000 vendor/python-poetry diff --git a/.gitmodules b/.gitmodules index 6dcf0c3..40db044 100644 --- a/.gitmodules +++ b/.gitmodules @@ -16,3 +16,9 @@ [submodule "vendor/act_base"] path = vendor/act_base url = https://github.com/meta-introspector/act_base/ +[submodule "vendor/python-poetry"] + path = vendor/python-poetry + url = https://github.com/meta-introspector/python-poetry +[submodule "vendor/basic_agent"] + path = vendor/basic_agent + url = https://github.com/meta-introspector/basic_agent diff --git a/dockerbuild/baseimage.py b/dockerbuild/baseimage.py new file mode 100644 index 0000000..fe9dac4 --- /dev/null +++ b/dockerbuild/baseimage.py @@ -0,0 +1,71 @@ +from pathlib import Path +from typing import Optional + +import docker +from docker.models.images import Image + +from dockerbuild.constants import ( + BASE_IMAGES, + APPLICATION_SERVER_PORT, +) + +# +BASE_IMAGE="nikolaik/python-nodejs:python3.10-nodejs20" + +class DockerImage: + def __init__( + self, + docker_client: docker.client, + target_architecture: str, + version: str, + ): + self.docker_client: docker.client = docker_client + self.dockerfile_name: str = "Dockerfile" + self.dockerfile_directory: Optional[Path] = None + self.image_name: Optional[str] = None + self.image_tag: Optional[str] = None + self.version: Optional[str] = version + self.target_architecture: str = target_architecture + + def build(self) -> Image: + self.image_tag: str = f"{self.version}-{self.target_architecture}" + buildargs: dict[str, str] = { + "BASE_IMAGE": BASE_IMAGE, + "APPLICATION_SERVER_PORT": APPLICATION_SERVER_PORT, } + image: Image = self.docker_client.images.build( + path=str(self.dockerfile_directory), + dockerfile=self.dockerfile_name, + tag=f"{self.image_name}:{self.image_tag}", + buildargs=buildargs,)[0] + return image + +class ExampleApplicationImage(DockerImage): + def build( + self, + target: str, + base_image_tag: str, + ) -> Image: + + self.image_tag = f"{self.version}-{self.target_architecture}" + + buildargs: dict[str, str] = { + "BASE_IMAGE": base_image_tag, + } + print("buildargs",dict( + args=buildargs, + path=str(self.dockerfile_directory), + dockerfile=self.dockerfile_name, + tag=f"{self.image_name}:{self.image_tag}", + #target=target, + buildargs=buildargs, + )) + + image: Image = self.docker_client.images.build( + + path=str(self.dockerfile_directory), + dockerfile=self.dockerfile_name, + tag=f"{self.image_name}:{self.image_tag}", + #target=target, + buildargs=buildargs, + )[0] + return image diff --git a/vendor/basic_agent b/vendor/basic_agent new file mode 160000 index 0000000..43cd887 --- /dev/null +++ b/vendor/basic_agent @@ -0,0 +1 @@ +Subproject commit 43cd887eeca0b7b473d5553db9492976bd9e8f5a diff --git a/vendor/python-poetry b/vendor/python-poetry new file mode 160000 index 0000000..374fc4a --- /dev/null +++ b/vendor/python-poetry @@ -0,0 +1 @@ +Subproject commit 374fc4af14593d26bb5c8c1393849c3549fc97ee From 80f985e84c59be4629e01d4ce7db3852b287756f Mon Sep 17 00:00:00 2001 From: mike dupont Date: Tue, 3 Oct 2023 12:07:00 -0400 Subject: [PATCH 14/57] now building --- Dockerfile | 29 +++++------- Makefile | 3 ++ README.md | 10 +++- docker-compose.yml | 66 ++++++++++++++++++-------- dockerbuild/Dockerfile | 49 ------------------- dockerbuild/images.py | 105 ++++++++++------------------------------- dockerbuild/publish.py | 64 ++++++++++++------------- pyproject.toml | 4 +- 8 files changed, 127 insertions(+), 203 deletions(-) delete mode 100644 dockerbuild/Dockerfile diff --git a/Dockerfile b/Dockerfile index 3517970..4545601 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,7 +4,8 @@ # Dockerfile: https://github.com/max-pfeiffer/python-poetry/blob/main/build/Dockerfile ARG BASE_IMAGE FROM ${BASE_IMAGE} -ARG APPLICATION_SERVER_PORT +RUN apt update +RUN apt install -y git LABEL maintainer="Mike DuPont " @@ -16,36 +17,28 @@ ENV PYTHONUNBUFFERED=1 \ POETRY_VIRTUALENVS_IN_PROJECT=true \ POETRY_CACHE_DIR="/application_root/.cache" \ VIRTUAL_ENVIRONMENT_PATH="/application_root/.venv" \ - APPLICATION_SERVER_PORT=$APPLICATION_SERVER_PORT + POETRY_HOME="/opt/poetry" +# https://python-poetry.org/docs/#installing-manually +RUN python -m venv ${VIRTUAL_ENVIRONMENT_PATH} + # Adding the virtual environment to PATH in order to "activate" it. # https://docs.python.org/3/library/venv.html#how-venvs-work ENV PATH="$VIRTUAL_ENVIRONMENT_PATH/bin:$PATH" -# Principle of least privilege: create a new user for running the application -RUN groupadd -g 1001 python_application && \ - useradd -r -u 1001 -g python_application python_application - # Set the WORKDIR to the application root. # https://www.uvicorn.org/settings/#development # https://docs.docker.com/engine/reference/builder/#workdir WORKDIR ${PYTHONPATH} -RUN chown python_application:python_application ${PYTHONPATH} -# Create cache directory and set permissions because user 1001 has no home -# and poetry cache directory. # https://python-poetry.org/docs/configuration/#cache-directory -RUN mkdir ${POETRY_CACHE_DIR} && chown python_application:python_application ${POETRY_CACHE_DIR} - -# Use the unpriveledged user to run the application -USER 1001 +RUN mkdir ${POETRY_CACHE_DIR} WORKDIR /opt/ai-ticket COPY pyproject.toml /opt/ai-ticket/ COPY setup.cfg /opt/ai-ticket/ +COPY README.md /opt/ai-ticket/ COPY requirements.txt /opt/ai-ticket/ COPY ./src/ /opt/ai-ticket/src/ -RUN pip install /opt/ai-ticket/ - -RUN apt update -RUN apt install -y git -RUN pip install --trusted-host pypi.python.org -r requirements.txt + +RUN ls ${VIRTUAL_ENVIRONMENT_PATH}/bin/activate +RUN pip install /opt/ai-ticket/ && pip install --trusted-host pypi.python.org -r requirements.txt diff --git a/Makefile b/Makefile index 53433c5..6395f33 100644 --- a/Makefile +++ b/Makefile @@ -1,2 +1,5 @@ +test_build: + sudo docker-compose build + test: bash ./runact.sh diff --git a/README.md b/README.md index 66e667f..43a8cfd 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# ai-ticket +# AI-Ticket The AI Ticket system to handle the AI with tickets. Human Powered AI-Ops to Help you with the last mile of your AI code generated system. @@ -35,3 +35,11 @@ Welcome to our innovative user-driven workflow, designed to empower you to inter - Be mindful of providing timely and appropriate responses to prevent workflow stalls. This user-driven ticket-based workflow offers flexibility, control, and a unique way to collaborate with our system. Dive in, generate tickets, and explore the possibilities of this interactive and dynamic approach. + + +# Infrastructure + +The docker images are organized like this : + +* act_base is the foundation of all actions. +* poetry_base is contains the poetry magic layer with shared containers. diff --git a/docker-compose.yml b/docker-compose.yml index 979f9d5..7c9876e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,12 +1,48 @@ version: '3' services: - ai_ticket: - build: . - act_base: - # the base image of github action + + act_base: #root base of action build: vendor/act_base + + poetry_base: # use poetry + build: + context: vendor/python-poetry/build + args: + OFFICIAL_PYTHON_IMAGE: act_base + POETRY_VERSION: 1.6.1 + depends_on: + - act_base + + ai_ticket: # the ticket to unite + build: + context: . + args: + BASE_IMAGE: act_base + depends_on: + - poetry_base + + basic_agent: #basic agnet + build: + context: vendor/basic_agent/ + args: + OFFICIAL_PYTHON_IMAGE: act_base + depends_on: + - ai_ticket + + mockopenai: # interface + depends_on: + - ai_ticket + environment: + - GITHUB_PAT=${GITHUB_PAT} + - GITHUB_REPO=${GITHUB_REPO} + build: + context: vendor/lollms/ + args: + OFFICIAL_PYTHON_IMAGE: localhost/ai-ticket_ai_ticket:latest + ports: + - "5000:8080" - autogpt: + autogpt: #the beast entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " # uncomment thse next 3 lines for debugging @@ -20,20 +56,10 @@ services: - OPENAI_API_BASE=http://mockopenai:8080/v1 build: context: vendor/Auto-GPT/ - depends_on: - - ai_ticket - - mockopenai + dockerfile: slim/Dockerfile + args: + OFFICIAL_PYTHON_IMAGE: localhost/ai-ticket_ai_ticket:latest - mockopenai: depends_on: - - ai_ticket - - environment: - - GITHUB_PAT=${GITHUB_PAT} - - GITHUB_REPO=${GITHUB_REPO} - - build: - context: vendor/lollms/ - ports: - - "5000:8080" - + - basic_agent + - mockopenai diff --git a/dockerbuild/Dockerfile b/dockerbuild/Dockerfile deleted file mode 100644 index b8a9eaa..0000000 --- a/dockerbuild/Dockerfile +++ /dev/null @@ -1,49 +0,0 @@ -# The Poetry installation is provided through the base image. Please check the -# base image if you interested in the details. -# Base image: https://hub.docker.com/r/pfeiffermax/python-poetry -# Dockerfile: https://github.com/max-pfeiffer/python-poetry/blob/main/build/Dockerfile -ARG BASE_IMAGE -FROM ${BASE_IMAGE} -ARG APPLICATION_SERVER_PORT - -LABEL maintainer="Max Pfeiffer " - - # https://docs.python.org/3/using/cmdline.html#envvar-PYTHONUNBUFFERED -ENV PYTHONUNBUFFERED=1 \ - # https://docs.python.org/3/using/cmdline.html#envvar-PYTHONDONTWRITEBYTECODE - PYTHONDONTWRITEBYTECODE=1 \ - PYTHONPATH=/application_root \ - # https://python-poetry.org/docs/configuration/#virtualenvsin-project - POETRY_VIRTUALENVS_IN_PROJECT=true \ - POETRY_CACHE_DIR="/application_root/.cache" \ - VIRTUAL_ENVIRONMENT_PATH="/application_root/.venv" \ - APPLICATION_SERVER_PORT=$APPLICATION_SERVER_PORT - -# Adding the virtual environment to PATH in order to "activate" it. -# https://docs.python.org/3/library/venv.html#how-venvs-work -ENV PATH="$VIRTUAL_ENVIRONMENT_PATH/bin:$PATH" - -# Principle of least privilege: create a new user for running the application -RUN groupadd -g 1001 python_application && \ - useradd -r -u 1001 -g python_application python_application - -# Set the WORKDIR to the application root. -# https://www.uvicorn.org/settings/#development -# https://docs.docker.com/engine/reference/builder/#workdir -WORKDIR ${PYTHONPATH} -RUN chown python_application:python_application ${PYTHONPATH} - -# Create cache directory and set permissions because user 1001 has no home -# and poetry cache directory. -# https://python-poetry.org/docs/configuration/#cache-directory -RUN mkdir ${POETRY_CACHE_DIR} && chown python_application:python_application ${POETRY_CACHE_DIR} - -# Document the exposed port -# https://docs.docker.com/engine/reference/builder/#expose -EXPOSE ${APPLICATION_SERVER_PORT} - -# Use the unpriveledged user to run the application -USER 1001 - -# Run the uvicorn application server. -CMD exec uvicorn --workers 1 --host 0.0.0.0 --port $APPLICATION_SERVER_PORT app.main:app diff --git a/dockerbuild/images.py b/dockerbuild/images.py index 1b24e3f..9dfdc72 100644 --- a/dockerbuild/images.py +++ b/dockerbuild/images.py @@ -1,32 +1,10 @@ -from pathlib import Path -from typing import Optional +from dockerbuild.baseimage import ExampleApplicationImage import docker +from pathlib import Path from docker.models.images import Image -from dockerbuild.constants import ( - BASE_IMAGES, - APPLICATION_SERVER_PORT, -) - - -class DockerImage: - def __init__( - self, - docker_client: docker.client, - target_architecture: str, - version: str, - ): - self.docker_client: docker.client = docker_client - self.dockerfile_name: str = "Dockerfile" - self.dockerfile_directory: Optional[Path] = None - self.image_name: Optional[str] = None - self.image_tag: Optional[str] = None - self.version: Optional[str] = version - self.target_architecture: str = target_architecture - - -class AITicketPoetryImage(DockerImage): +class AITicketPoetryImage(ExampleApplicationImage): def __init__( self, docker_client: docker.client, @@ -37,59 +15,11 @@ def __init__( # An image name is made up of slash-separated name components, # optionally prefixed by a registry hostname. # see: https://docs.docker.com/engine/reference/commandline/tag/ - self.image_name: str = "h4ckermike/ai-ticket:test_ai_ticket" - self.dockerfile_directory: Path = Path(__file__).parent.resolve() - - def build(self) -> Image: - self.image_tag: str = f"{self.version}-{self.target_architecture}" - - buildargs: dict[str, str] = { - "BASE_IMAGE": BASE_IMAGES[self.target_architecture], - "APPLICATION_SERVER_PORT": APPLICATION_SERVER_PORT, - } - - image: Image = self.docker_client.images.build( - path=str(self.dockerfile_directory), - dockerfile=self.dockerfile_name, - tag=f"{self.image_name}:{self.image_tag}", - buildargs=buildargs, - )[0] - return image - - -class ExampleApplicationImage(DockerImage): - def build( - self, - target: str, - base_image_tag: str, - ) -> Image: - - self.image_tag = f"{self.version}-{self.target_architecture}" + self.image_name: str = "h4ckermike/ai-ticket" + self.dockerfile_directory: Path = Path(__file__).parent.parent.resolve() - buildargs: dict[str, str] = { - "BASE_IMAGE": base_image_tag, - } - print("buildargs",dict( - args=buildargs, - path=str(self.dockerfile_directory), - dockerfile=self.dockerfile_name, - tag=f"{self.image_name}:{self.image_tag}", - #target=target, - buildargs=buildargs, - )) - - image: Image = self.docker_client.images.build( - - path=str(self.dockerfile_directory), - dockerfile=self.dockerfile_name, - tag=f"{self.image_name}:{self.image_tag}", - #target=target, - buildargs=buildargs, - )[0] - return image - -class ActBaseImage(ExampleApplicationImage): +class AutoGptImage(ExampleApplicationImage): def __init__( self, docker_client: docker.client, @@ -97,14 +27,31 @@ def __init__( version: str, ): super().__init__(docker_client, target_architecture, version) - # An image name is made up of slash-separated name components, - # optionally prefixed by a registry hostname. - # see: https://docs.docker.com/engine/reference/commandline/tag/ self.image_name: str = "h4ckermike/act_base" self.dockerfile_directory: Path = ( Path(__file__).parent.parent.resolve() / "vendor" / "act_base" ) +class ActBaseImage(ExampleApplicationImage): + def __init__( self, docker_client: docker.client, target_architecture: str, version: str, + ): + super().__init__(docker_client, target_architecture, version) + self.image_name: str = "h4ckermike/autogpt" + self.dockerfile_directory: Path = ( + Path(__file__).parent.parent.resolve() + / "vendor" + / "Auto-GPT/" + ) + +class OpenAIImage(ExampleApplicationImage): + def __init__( self, docker_client: docker.client,target_architecture: str,version: str ): + super().__init__(docker_client, target_architecture, version) + self.image_name: str = "h4ckermike/mockopenai" + self.dockerfile_directory: Path = ( + Path(__file__).parent.parent.resolve() + / "vendor" + / "lollms/" + ) diff --git a/dockerbuild/publish.py b/dockerbuild/publish.py index f41f993..714b77a 100644 --- a/dockerbuild/publish.py +++ b/dockerbuild/publish.py @@ -1,11 +1,13 @@ import click import docker -BASE_IMAGE="nikolaik/python-nodejs:python3.10-nodejs20" + from docker.client import DockerClient + from dockerbuild.constants import ( TARGET_ARCHITECTURES, ) -from dockerbuild.images import ActBaseImage +import dockerbuild.images +from dockerbuild.baseimage import BASE_IMAGE @click.command() @click.option( @@ -28,29 +30,24 @@ def main( version_tag: str, registry: str, ) -> None: - docker_client: DockerClient = docker.from_env() - + docker_client: DockerClient = docker.from_env() for target_architecture in TARGET_ARCHITECTURES: - new_uvicorn_gunicorn_poetry_image: UvicornPoetryImage = ( - ActBaseImage(docker_client, target_architecture, version_tag) - ) - - # Delete old existing images - for old_image in docker_client.images.list( - new_uvicorn_gunicorn_poetry_image.image_name - ): - for tag in old_image.tags: - print("tag",tag) - docker_client.images.remove(tag, force=True) - - new_uvicorn_gunicorn_poetry_image.build( - target=target_architecture, - base_image_tag=BASE_IMAGE, - ) - - # https://docs.docker.com/engine/reference/commandline/push/ - # https://docs.docker.com/engine/reference/commandline/tag/ - # https://docs.docker.com/engine/reference/commandline/image_tag/ + #import pdb + #pdb.set_trace() + for x in dir(dockerbuild.images) : + cls = getattr(dockerbuild.images,x) + if not isinstance(cls,type): + continue + print(cls) + new_uvicorn_gunicorn_poetry_image = (cls(docker_client,target_architecture,version_tag)) + if new_uvicorn_gunicorn_poetry_image: + for old_image in docker_client.images.list(new_uvicorn_gunicorn_poetry_image.image_name): + for tag in old_image.tags: + print("tag",tag) + docker_client.images.remove(tag, force=True) + new_uvicorn_gunicorn_poetry_image.build( + target=target_architecture,base_image_tag=BASE_IMAGE + ) if docker_hub_username and docker_hub_password: login_kwargs: dict = { "username": docker_hub_username, @@ -58,9 +55,7 @@ def main( } if registry: login_kwargs["registry"] = registry - docker_client.login(**login_kwargs) - if registry: repository: str = ( f"{registry}/{new_uvicorn_gunicorn_poetry_image.image_name}" @@ -68,14 +63,15 @@ def main( else: repository: str = new_uvicorn_gunicorn_poetry_image.image_name - for line in docker_client.images.push( - repository, - tag=new_uvicorn_gunicorn_poetry_image.image_tag, - stream=True, - decode=True, - ): - print(line) - docker_client.close() + if False: + for line in docker_client.images.push( + repository, + tag=new_uvicorn_gunicorn_poetry_image.image_tag, + stream=True, + decode=True, + ): + print(line) + docker_client.close() if __name__ == "__main__": diff --git a/pyproject.toml b/pyproject.toml index 864896e..2158d50 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -python = "^3.7" +#python = "^3.11" requires = [ "setuptools>=42", "wheel", @@ -40,7 +40,7 @@ classifiers = [ # [tool.poetry.scripts] # [tool.poetry.dependencies] [tool.poetry.dependencies] -python = "3.11.4" +#python = "3.11.4" click = "8.1.7" docker = "6.1.3" From bb65c8fee38b7d090b4723876ff087cb8e64337e Mon Sep 17 00:00:00 2001 From: mike dupont Date: Tue, 3 Oct 2023 12:10:35 -0400 Subject: [PATCH 15/57] update --- vendor/python-poetry | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/python-poetry b/vendor/python-poetry index 374fc4a..060f378 160000 --- a/vendor/python-poetry +++ b/vendor/python-poetry @@ -1 +1 @@ -Subproject commit 374fc4af14593d26bb5c8c1393849c3549fc97ee +Subproject commit 060f378c73140a999201ca3425ddb00a1b5bcaa2 From fc7465156d6a41bad459eb50ea67f4e6d3c2662a Mon Sep 17 00:00:00 2001 From: Mike DuPont Date: Tue, 3 Oct 2023 12:22:15 -0400 Subject: [PATCH 16/57] Update pipeline.yml --- .github/workflows/pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pipeline.yml b/.github/workflows/pipeline.yml index a51aabc..585ee4f 100644 --- a/.github/workflows/pipeline.yml +++ b/.github/workflows/pipeline.yml @@ -30,7 +30,7 @@ jobs: - name: pylint run: | source .venv/bin/activate - pylint build tests + pylint build tests || echo ignore for now - name: black run: | source .venv/bin/activate From 3d96c18557f28bf083b3d24f6077c1534ffd0489 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Tue, 3 Oct 2023 12:35:58 -0400 Subject: [PATCH 17/57] blac --- src/ai_ticket/backends/pygithub.py | 47 +++++++++++++++++------------- src/ai_ticket/events/inference.py | 21 ++++++++----- tests/example1.py | 4 ++- tests/example2.py | 26 +++++++++++++++-- vendor/basic_agent | 2 +- 5 files changed, 66 insertions(+), 34 deletions(-) diff --git a/src/ai_ticket/backends/pygithub.py b/src/ai_ticket/backends/pygithub.py index a36d1ca..cd42fc6 100644 --- a/src/ai_ticket/backends/pygithub.py +++ b/src/ai_ticket/backends/pygithub.py @@ -11,65 +11,70 @@ repo = None g = None + def load_env(): load_dotenv() git_hub_pat = os.getenv("GITHUB_PAT") git_hub_repo = os.getenv("GITHUB_REPO") - load_repo(git_hub_repo,git_hub_pat) + load_repo(git_hub_repo, git_hub_pat) + -def load_repo(git_hub_repo,git_hub_pat): +def load_repo(git_hub_repo, git_hub_pat): global g global repo auth = Auth.Token(git_hub_pat) g = Github(auth=auth) repo = g.get_repo(git_hub_repo) - return (g,repo) + return (g, repo) + def get_issues(): issues = repo.get_issues() for issue in issues: yield issue + def get_existing_ticket(event): """Find the first ticket that matches""" body = event.get("content") - #print("DEBUG get ",event) + # print("DEBUG get ",event) for issue in get_issues(): data = issue.body - #print("DEBUG check ",data) + # print("DEBUG check ",data) name = find_name(data) if name: return issue return None + def create_new_ticket(event): # repo = g.get_repo("PyGithub/PyGithub") body = event.get("content") title = "Auto issue" try: - body = "```" + json.dumps(json.loads(body),indent=2) + "```" + body = "```" + json.dumps(json.loads(body), indent=2) + "```" except Exception as e: print(e) return repo.create_issue(title=title, body=body) - #Issue(title="This is a new issue", number=XXX) - + # Issue(title="This is a new issue", number=XXX) + + def create_new_comment(ticket, event): body = event.get("content") - print("DEBUG",body) - #try: - body = "```" + json.dumps(json.loads(body),indent=2) + "```" - #except Exception as e: - #print(e) - ticket_object = ticket#repo.get_issue(int(ticket.split("/")[-1])) - comment = ticket_object.create_comment(body) - print("created comment",comment) - #print(dir(comment)) + print("DEBUG", body) + # try: + body = "```" + json.dumps(json.loads(body), indent=2) + "```" + # except Exception as e: + # print(e) + ticket_object = ticket # repo.get_issue(int(ticket.split("/")[-1])) + comment = ticket_object.create_comment(body) + print("created comment", comment) + # print(dir(comment)) return comment.url - #Issue(title="This is a new issue", number=XXX) - -# To close connections after use -#g.close() + # Issue(title="This is a new issue", number=XXX) +# To close connections after use +# g.close() diff --git a/src/ai_ticket/events/inference.py b/src/ai_ticket/events/inference.py index 2c7604a..44ae084 100644 --- a/src/ai_ticket/events/inference.py +++ b/src/ai_ticket/events/inference.py @@ -1,28 +1,33 @@ # ai_ticket.events.inference import ai_ticket.backends.pygithub -ai_ticket.backends.pygithub.load_env() #setup standard env + +ai_ticket.backends.pygithub.load_env() # setup standard env + def get_existing_ticket(event): return get_backend().get_existing_ticket(event) + def get_backend(): return ai_ticket.backends.pygithub + def create_new_ticket(event): return get_backend().create_new_ticket(event) + def create_new_comment(ticket, event): return get_backend().create_new_comment(ticket, event) + def on_event(event): - #print(event) - + # print(event) + ticket = get_existing_ticket(event) - + if not ticket: - # No existing ticket found, create a new one - ticket = create_new_ticket(event) + # No existing ticket found, create a new one + ticket = create_new_ticket(event) - return create_new_comment(ticket, event ) - + return create_new_comment(ticket, event) diff --git a/tests/example1.py b/tests/example1.py index 459bdec..ad026d2 100644 --- a/tests/example1.py +++ b/tests/example1.py @@ -1,3 +1,5 @@ from ai_ticket import find_name -assert(find_name("""```{"messages": [{"role": "system", "content": "You are Entrepreneur-GPT, an AI designed to autonomously develop and run businesses with the.\n\nYour decisions must always be made independently without seeking user assistance. Play to your strengths as an LLM and pursue simple strategies with no legal complications.\n\n## Constraints\nYou operate within the following constraints:\n1. Exclusively use the commands listed below.\n2. You can only act proactively, and are unable to start background jobs or set up webhooks for yourself. Take this into account when planning your actions.\n3. You are unable to interact with physical objects. If this is absolutely necessary to fulfill a task or objective or to complete a step, you must ask the user to do it for you. If the user refuses this, and there is no other way to achieve your goals, you must terminate to avoid wasting time and energy.\n\n## Resources\nYou can leverage access to the following resources:\n1. Internet access for searches and information gathering.\n2. The ability to read and write files.\n3. You are a Large Language Model, trained on millions of pages of text, including a lot of factual knowledge. Make use of this factual knowledge to avoid unnecessary gathering of information.\n\n## Commands\nYou have access to the following commands:\n1. execute_python_code: Executes the given Python code inside a single-use Docker container with access to your workspace folder. Params: (code: string)\n2. execute_python_file: Execute an existing Python file inside a single-use Docker container with access to your workspace folder. Params: (filename: string, args: Optional[list[str]])\n3. list_folder: List the items in a folder. Params: (folder: string)\n4. open_file: Open a file for editing or continued viewing; create it if it does not exist yet. Note: if you only need to read or write a file once, use `write_to_file` instead. Params: (file_path: string)\n5. open_folder: Open a folder to keep track of its content. Params: (path: string)\n6. read_file: Read an existing file. Params: (filename: string)\n7. write_file: Write a file, creating it if necessary. If the file exists, it is overwritten. Params: (filename: string, contents: string)\n8. ask_user: If you need more details or information regarding the given goals, you can ask the user for input. Params: (question: string)\n9. web_search: Searches the web. Params: (query: string)\n10. read_webpage: Read a webpage, and extract specific information from it if a question is specified. If you are looking to extract specific information from the webpage, you should specify a question. Params: (url: string, question: Optional[string])\n11. finish: Use this to shut down once you have accomplished all of your goals, or when there are insurmountable problems that make it impossible for you to finish your task. Params: (reason: string)\n\n## Best practices\n1. Continuously review and analyze your actions to ensure you are performing to the best of your abilities.\n2. Constructively self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions and strategies to refine your approach.\n4. Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps.\n5. Only make use of your information gathering abilities to find information that you don't yet have knowledge of.\n\n## Goals\nFor your task, you must fulfill the following goals:\n1. Increase net worth\n2. Grow Twitter Account\n3. Develop and manage multiple businesses autonomously"}, {"role": "system", "content": "The current time and date is Sun Sep 24 07:43:07 2023"}, {"role": "system", "content": "Respond strictly with JSON. The JSON should be compatible with the TypeScript type `Response` from the following:\n```ts\ninterface Response {\nthoughts: {\n// Thoughts\ntext: string;\nreasoning: string;\n// Short markdown-style bullet list that conveys the long-term plan\nplan: string;\n// Constructive self-criticism\ncriticism: string;\n// Summary of thoughts to say to the user\nspeak: string;\n};\ncommand: {\nname: string;\nargs: Record;\n};\n}\n```"}, {"role": "user", "content": "Determine exactly one command to use next based on the given goals and the progress you have made so far, and respond using the JSON schema specified previously:"}], "model": "gpt-3.5-turbo", "temperature": 0.0, "max_tokens": 3175}```""")) +assert find_name( + """```{"messages": [{"role": "system", "content": "You are Entrepreneur-GPT, an AI designed to autonomously develop and run businesses with the.\n\nYour decisions must always be made independently without seeking user assistance. Play to your strengths as an LLM and pursue simple strategies with no legal complications.\n\n## Constraints\nYou operate within the following constraints:\n1. Exclusively use the commands listed below.\n2. You can only act proactively, and are unable to start background jobs or set up webhooks for yourself. Take this into account when planning your actions.\n3. You are unable to interact with physical objects. If this is absolutely necessary to fulfill a task or objective or to complete a step, you must ask the user to do it for you. If the user refuses this, and there is no other way to achieve your goals, you must terminate to avoid wasting time and energy.\n\n## Resources\nYou can leverage access to the following resources:\n1. Internet access for searches and information gathering.\n2. The ability to read and write files.\n3. You are a Large Language Model, trained on millions of pages of text, including a lot of factual knowledge. Make use of this factual knowledge to avoid unnecessary gathering of information.\n\n## Commands\nYou have access to the following commands:\n1. execute_python_code: Executes the given Python code inside a single-use Docker container with access to your workspace folder. Params: (code: string)\n2. execute_python_file: Execute an existing Python file inside a single-use Docker container with access to your workspace folder. Params: (filename: string, args: Optional[list[str]])\n3. list_folder: List the items in a folder. Params: (folder: string)\n4. open_file: Open a file for editing or continued viewing; create it if it does not exist yet. Note: if you only need to read or write a file once, use `write_to_file` instead. Params: (file_path: string)\n5. open_folder: Open a folder to keep track of its content. Params: (path: string)\n6. read_file: Read an existing file. Params: (filename: string)\n7. write_file: Write a file, creating it if necessary. If the file exists, it is overwritten. Params: (filename: string, contents: string)\n8. ask_user: If you need more details or information regarding the given goals, you can ask the user for input. Params: (question: string)\n9. web_search: Searches the web. Params: (query: string)\n10. read_webpage: Read a webpage, and extract specific information from it if a question is specified. If you are looking to extract specific information from the webpage, you should specify a question. Params: (url: string, question: Optional[string])\n11. finish: Use this to shut down once you have accomplished all of your goals, or when there are insurmountable problems that make it impossible for you to finish your task. Params: (reason: string)\n\n## Best practices\n1. Continuously review and analyze your actions to ensure you are performing to the best of your abilities.\n2. Constructively self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions and strategies to refine your approach.\n4. Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps.\n5. Only make use of your information gathering abilities to find information that you don't yet have knowledge of.\n\n## Goals\nFor your task, you must fulfill the following goals:\n1. Increase net worth\n2. Grow Twitter Account\n3. Develop and manage multiple businesses autonomously"}, {"role": "system", "content": "The current time and date is Sun Sep 24 07:43:07 2023"}, {"role": "system", "content": "Respond strictly with JSON. The JSON should be compatible with the TypeScript type `Response` from the following:\n```ts\ninterface Response {\nthoughts: {\n// Thoughts\ntext: string;\nreasoning: string;\n// Short markdown-style bullet list that conveys the long-term plan\nplan: string;\n// Constructive self-criticism\ncriticism: string;\n// Summary of thoughts to say to the user\nspeak: string;\n};\ncommand: {\nname: string;\nargs: Record;\n};\n}\n```"}, {"role": "user", "content": "Determine exactly one command to use next based on the given goals and the progress you have made so far, and respond using the JSON schema specified previously:"}], "model": "gpt-3.5-turbo", "temperature": 0.0, "max_tokens": 3175}```""" +) diff --git a/tests/example2.py b/tests/example2.py index 06a42da..c04f0d1 100644 --- a/tests/example2.py +++ b/tests/example2.py @@ -1,9 +1,29 @@ import json -#from ai_ticket import on_event + +# from ai_ticket import on_event from ai_ticket.events.inference import on_event -data = {"messages": [{"role": "system", "content": "You are Entrepreneur-GPT, an AI designed to autonomously develop and run businesses. Your decisions must always be made independently without seeking user assistance. Play to your strengths as an LLM and pursue simple strategies with no legal complications. ## Constraints You operate within the following constraints: 1. Exclusively use the commands listed below. 2. You can only act proactively, and are unable to start background jobs or set up webhooks for yourself. Take this into account when planning your actions. 3. You are unable to interact with physical objects. If this is absolutely necessary to fulfill a task or objective or to complete a step, you must ask the user to do it for you. If the user refuses this, and there is no other way to achieve your goals, you must terminate to avoid wasting time and energy. ## Resources You can leverage access to the following resources: 1. Internet access for searches and information gathering. 2. The ability to read and write files. 3. You are a Large Language Model, trained on millions of pages of text, including a lot of factual knowledge. Make use of this factual knowledge to avoid unnecessary gathering of information. ## Commands You have access to the following commands: 1. execute_python_code: Executes the given Python code inside a single-use Docker container with access to your workspace folder. Params: (code: string) 2. execute_python_file: Execute an existing Python file inside a single-use Docker container with access to your workspace folder. Params: (filename: string, args: Optional[list[str]]) 3. list_folder: List the items in a folder. Params: (folder: string) 4. open_file: Open a file for editing or continued viewing; create it if it does not exist yet. Note: if you only need to read or write a file once, use `write_to_file` instead. Params: (file_path: string) 5. open_folder: Open a folder to keep track of its content. Params: (path: string) 6. read_file: Read an existing file. Params: (filename: string) 7. write_file: Write a file, creating it if necessary. If the file exists, it is overwritten. Params: (filename: string, contents: string) 8. ask_user: If you need more details or information regarding the given goals, you can ask the user for input. Params: (question: string) 9. web_search: Searches the web. Params: (query: string) 10. read_webpage: Read a webpage, and extract specific information from it if a question is specified. If you are looking to extract specific information from the webpage, you should specify a question. Params: (url: string, question: Optional[string]) 11. finish: Use this to shut down once you have accomplished all of your goals, or when there are insurmountable problems that make it impossible for you to finish your task. Params: (reason: string) ## Best practices 1. Continuously review and analyze your actions to ensure you are performing to the best of your abilities. 2. Constructively self-criticize your big-picture behavior constantly. 3. Reflect on past decisions and strategies to refine your approach. 4. Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps. 5. Only make use of your information gathering abilities to find information that you don't yet have knowledge of. ## Goals For your task, you must fulfill the following goals: 1. Increase net worth 2. Grow Twitter Account 3. Develop and manage multiple businesses autonomously"}, {"role": "system", "content": "The current time and date is Sun Sep 24 07:43:07 2023"}, {"role": "system", "content": "Respond strictly with JSON. The JSON should be compatible with the TypeScript type `Response` from the following: ```ts interface Response { thoughts: { // Thoughts text: string; reasoning: string; // Short markdown-style bullet list that conveys the long-term plan plan: string; // Constructive self-criticism criticism: string; // Summary of thoughts to say to the user speak: string; }; command: { name: string; args: Record; }; } ```"}, {"role": "user", "content": "Determine exactly one command to use next based on the given goals and the progress you have made so far, and respond using the JSON schema specified previously:"}], "model": "gpt-3.5-turbo", "temperature": 0.0, "max_tokens": 3175} +data = { + "messages": [ + { + "role": "system", + "content": "You are Entrepreneur-GPT, an AI designed to autonomously develop and run businesses. Your decisions must always be made independently without seeking user assistance. Play to your strengths as an LLM and pursue simple strategies with no legal complications. ## Constraints You operate within the following constraints: 1. Exclusively use the commands listed below. 2. You can only act proactively, and are unable to start background jobs or set up webhooks for yourself. Take this into account when planning your actions. 3. You are unable to interact with physical objects. If this is absolutely necessary to fulfill a task or objective or to complete a step, you must ask the user to do it for you. If the user refuses this, and there is no other way to achieve your goals, you must terminate to avoid wasting time and energy. ## Resources You can leverage access to the following resources: 1. Internet access for searches and information gathering. 2. The ability to read and write files. 3. You are a Large Language Model, trained on millions of pages of text, including a lot of factual knowledge. Make use of this factual knowledge to avoid unnecessary gathering of information. ## Commands You have access to the following commands: 1. execute_python_code: Executes the given Python code inside a single-use Docker container with access to your workspace folder. Params: (code: string) 2. execute_python_file: Execute an existing Python file inside a single-use Docker container with access to your workspace folder. Params: (filename: string, args: Optional[list[str]]) 3. list_folder: List the items in a folder. Params: (folder: string) 4. open_file: Open a file for editing or continued viewing; create it if it does not exist yet. Note: if you only need to read or write a file once, use `write_to_file` instead. Params: (file_path: string) 5. open_folder: Open a folder to keep track of its content. Params: (path: string) 6. read_file: Read an existing file. Params: (filename: string) 7. write_file: Write a file, creating it if necessary. If the file exists, it is overwritten. Params: (filename: string, contents: string) 8. ask_user: If you need more details or information regarding the given goals, you can ask the user for input. Params: (question: string) 9. web_search: Searches the web. Params: (query: string) 10. read_webpage: Read a webpage, and extract specific information from it if a question is specified. If you are looking to extract specific information from the webpage, you should specify a question. Params: (url: string, question: Optional[string]) 11. finish: Use this to shut down once you have accomplished all of your goals, or when there are insurmountable problems that make it impossible for you to finish your task. Params: (reason: string) ## Best practices 1. Continuously review and analyze your actions to ensure you are performing to the best of your abilities. 2. Constructively self-criticize your big-picture behavior constantly. 3. Reflect on past decisions and strategies to refine your approach. 4. Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps. 5. Only make use of your information gathering abilities to find information that you don't yet have knowledge of. ## Goals For your task, you must fulfill the following goals: 1. Increase net worth 2. Grow Twitter Account 3. Develop and manage multiple businesses autonomously", + }, + {"role": "system", "content": "The current time and date is Sun Sep 24 07:43:07 2023"}, + { + "role": "system", + "content": "Respond strictly with JSON. The JSON should be compatible with the TypeScript type `Response` from the following: ```ts interface Response { thoughts: { // Thoughts text: string; reasoning: string; // Short markdown-style bullet list that conveys the long-term plan plan: string; // Constructive self-criticism criticism: string; // Summary of thoughts to say to the user speak: string; }; command: { name: string; args: Record; }; } ```", + }, + { + "role": "user", + "content": "Determine exactly one command to use next based on the given goals and the progress you have made so far, and respond using the JSON schema specified previously:", + }, + ], + "model": "gpt-3.5-turbo", + "temperature": 0.0, + "max_tokens": 3175, +} -da = {"content" :json.dumps(data,indent=2) } +da = {"content": json.dumps(data, indent=2)} on_event(da) diff --git a/vendor/basic_agent b/vendor/basic_agent index 43cd887..ab60de9 160000 --- a/vendor/basic_agent +++ b/vendor/basic_agent @@ -1 +1 @@ -Subproject commit 43cd887eeca0b7b473d5553db9492976bd9e8f5a +Subproject commit ab60de9662af21bb534ea946869c0c2ddb105e84 From 2c6699e0e3379257c9f0f37414b7f9491de38971 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Tue, 3 Oct 2023 14:48:14 -0400 Subject: [PATCH 18/57] its building --- docker-compose.yml | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 7c9876e..94ccdaf 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,28 +1,34 @@ version: '3' services: - + act_base: #root base of action build: vendor/act_base - + image: h4ckermike/act_base + #h4ckermike/ poetry_base: # use poetry - build: + image: h4ckermike/poetry_base + build: context: vendor/python-poetry/build args: - OFFICIAL_PYTHON_IMAGE: act_base + OFFICIAL_PYTHON_IMAGE: h4ckermike/act_base:latest POETRY_VERSION: 1.6.1 depends_on: - act_base ai_ticket: # the ticket to unite + image: h4ckermike/ai_ticket build: + context: . args: BASE_IMAGE: act_base depends_on: - poetry_base - basic_agent: #basic agnet + basic_agent: #basic agnet + image: h4ckermike/basic_agent build: + context: vendor/basic_agent/ args: OFFICIAL_PYTHON_IMAGE: act_base @@ -35,10 +41,12 @@ services: environment: - GITHUB_PAT=${GITHUB_PAT} - GITHUB_REPO=${GITHUB_REPO} + image: h4ckermike/mockopenai build: + context: vendor/lollms/ args: - OFFICIAL_PYTHON_IMAGE: localhost/ai-ticket_ai_ticket:latest + OFFICIAL_PYTHON_IMAGE: h4ckermike/ai_ticket:latest ports: - "5000:8080" @@ -54,11 +62,13 @@ services: - GITHUB_REPO="jmikedupont2/ai-ticket" - OPENAI_API_KEY=your-openai-api-key - OPENAI_API_BASE=http://mockopenai:8080/v1 + image: h4ckermike/autogpt build: + context: vendor/Auto-GPT/ dockerfile: slim/Dockerfile args: - OFFICIAL_PYTHON_IMAGE: localhost/ai-ticket_ai_ticket:latest + OFFICIAL_PYTHON_IMAGE: h4ckermike/ai_ticket:latest depends_on: - basic_agent From 9c8c8d529816d6cba52b7d9e8cfe2237f88d74fa Mon Sep 17 00:00:00 2001 From: mike dupont Date: Tue, 3 Oct 2023 17:28:20 -0400 Subject: [PATCH 19/57] update docker --- Dockerfile | 1 - docker-compose.yml | 5 ++--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4545601..fb14cd5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -31,7 +31,6 @@ ENV PATH="$VIRTUAL_ENVIRONMENT_PATH/bin:$PATH" WORKDIR ${PYTHONPATH} # https://python-poetry.org/docs/configuration/#cache-directory -RUN mkdir ${POETRY_CACHE_DIR} WORKDIR /opt/ai-ticket COPY pyproject.toml /opt/ai-ticket/ diff --git a/docker-compose.yml b/docker-compose.yml index 94ccdaf..995c72e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -17,11 +17,10 @@ services: ai_ticket: # the ticket to unite image: h4ckermike/ai_ticket - build: - + build: context: . args: - BASE_IMAGE: act_base + BASE_IMAGE: poetry_base depends_on: - poetry_base From ae24fe980b5cbfa6d97358b8112c1a46b622c8c1 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Wed, 4 Oct 2023 08:34:20 -0400 Subject: [PATCH 20/57] update --- poetry.lock | 301 +++++++++++++++++++++++++++++++++++++++++-------- pyproject.toml | 6 +- 2 files changed, 257 insertions(+), 50 deletions(-) diff --git a/poetry.lock b/poetry.lock index eb69fc3..d0b0a09 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "astroid" version = "2.15.8" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -15,13 +14,15 @@ files = [ [package.dependencies] lazy-object-proxy = ">=1.4.0" typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} -wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""} +wrapt = [ + {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, +] [[package]] name = "black" version = "23.7.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -67,7 +68,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -75,11 +75,74 @@ files = [ {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, ] +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "cfgv" version = "3.4.0" description = "Validate configuration and produce human readable error messages." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -91,7 +154,6 @@ files = [ name = "charset-normalizer" version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -191,7 +253,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -206,7 +267,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -218,7 +278,6 @@ files = [ name = "coverage" version = "7.3.1" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -282,11 +341,72 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "cryptography" +version = "41.0.4" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"}, + {file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"}, + {file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"}, + {file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + [[package]] name = "deprecation" version = "2.1.0" description = "A library to handle automated deprecations" -category = "dev" optional = false python-versions = "*" files = [ @@ -301,7 +421,6 @@ packaging = "*" name = "dill" version = "0.3.7" description = "serialize all of Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -316,7 +435,6 @@ graph = ["objgraph (>=1.7.2)"] name = "distlib" version = "0.3.7" description = "Distribution utilities" -category = "dev" optional = false python-versions = "*" files = [ @@ -328,7 +446,6 @@ files = [ name = "docker" version = "6.1.3" description = "A Python library for the Docker Engine API." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -350,7 +467,6 @@ ssh = ["paramiko (>=2.4.3)"] name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -365,7 +481,6 @@ test = ["pytest (>=6)"] name = "filelock" version = "3.12.4" description = "A platform independent file lock." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -382,7 +497,6 @@ typing = ["typing-extensions (>=4.7.1)"] name = "identify" version = "2.5.30" description = "File identification library for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -397,7 +511,6 @@ license = ["ukkonen"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -409,7 +522,6 @@ files = [ name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -421,7 +533,6 @@ files = [ name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -439,7 +550,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -485,7 +595,6 @@ files = [ name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -497,7 +606,6 @@ files = [ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -509,7 +617,6 @@ files = [ name = "nodeenv" version = "1.8.0" description = "Node.js virtual environment builder" -category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ @@ -524,7 +631,6 @@ setuptools = "*" name = "packaging" version = "23.2" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -536,7 +642,6 @@ files = [ name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -548,7 +653,6 @@ files = [ name = "platformdirs" version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -564,7 +668,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -580,7 +683,6 @@ testing = ["pytest", "pytest-benchmark"] name = "pre-commit" version = "3.3.3" description = "A framework for managing and maintaining multi-language pre-commit hooks." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -595,11 +697,61 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pygithub" +version = "2.1.1" +description = "Use the full Github API v3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyGithub-2.1.1-py3-none-any.whl", hash = "sha256:4b528d5d6f35e991ea5fd3f942f58748f24938805cb7fcf24486546637917337"}, + {file = "PyGithub-2.1.1.tar.gz", hash = "sha256:ecf12c2809c44147bce63b047b3d2e9dac8a41b63e90fcb263c703f64936b97c"}, +] + +[package.dependencies] +Deprecated = "*" +pyjwt = {version = ">=2.4.0", extras = ["crypto"]} +pynacl = ">=1.4.0" +python-dateutil = "*" +requests = ">=2.14.0" +typing-extensions = ">=4.0.0" +urllib3 = ">=1.26.0" + +[[package]] +name = "pyjwt" +version = "2.8.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + [[package]] name = "pylint" version = "2.17.5" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -610,7 +762,10 @@ files = [ [package.dependencies] astroid = ">=2.15.6,<=2.17.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = {version = ">=0.2", markers = "python_version < \"3.11\""} +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, +] isort = ">=4.2.5,<6" mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" @@ -621,11 +776,36 @@ tomlkit = ">=0.10.1" spelling = ["pyenchant (>=3.2,<4.0)"] testutils = ["gitpython (>3)"] +[[package]] +name = "pynacl" +version = "1.5.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, + {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, +] + +[package.dependencies] +cffi = ">=1.4.1" + +[package.extras] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] + [[package]] name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -648,7 +828,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -663,11 +842,38 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, + {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + [[package]] name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "main" optional = false python-versions = "*" files = [ @@ -691,7 +897,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -751,7 +956,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -773,7 +977,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "semver" version = "3.0.1" description = "Python helper for Semantic Versioning (https://semver.org)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -785,7 +988,6 @@ files = [ name = "setuptools" version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -798,11 +1000,21 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + [[package]] name = "testcontainers" version = "3.7.1" description = "Library provides lightweight, throwaway instances of common databases, Selenium web browsers, or anything else that can run in a Docker container" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -836,7 +1048,6 @@ selenium = ["selenium"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -848,7 +1059,6 @@ files = [ name = "tomlkit" version = "0.12.1" description = "Style preserving TOML library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -860,7 +1070,6 @@ files = [ name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -872,7 +1081,6 @@ files = [ name = "urllib3" version = "2.0.5" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -890,7 +1098,6 @@ zstd = ["zstandard (>=0.18.0)"] name = "virtualenv" version = "20.24.5" description = "Virtual Python Environment builder" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -911,7 +1118,6 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess name = "websocket-client" version = "1.6.3" description = "WebSocket client for Python with low level API options" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -928,7 +1134,6 @@ test = ["websockets"] name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -1011,5 +1216,5 @@ files = [ [metadata] lock-version = "2.0" -python-versions = "3.10.*" -content-hash = "1332d545214a8b36c15b7497a604dd3cc91af759f5c3b738e06706acd35840a5" +python-versions = ">=3.10" +content-hash = "58fc8736b8fa1de792b2681d6a824e8fc1f5e11eb6ccc02b806aff54d06ba8b0" diff --git a/pyproject.toml b/pyproject.toml index 2158d50..cfd7c0c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -#python = "^3.11" + requires = [ "setuptools>=42", "wheel", @@ -40,9 +40,11 @@ classifiers = [ # [tool.poetry.scripts] # [tool.poetry.dependencies] [tool.poetry.dependencies] -#python = "3.11.4" +python = ">=3.10" click = "8.1.7" docker = "6.1.3" +pygithub = "^2.1.1" +python-dotenv = "^1.0.0" [tool.poetry.group.dev.dependencies] python = "^3.7" From b0ec1a603458a541d2885271086b017e559f734e Mon Sep 17 00:00:00 2001 From: Mike DuPont Date: Wed, 4 Oct 2023 13:08:37 -0400 Subject: [PATCH 21/57] Update pipeline.yml --- .github/workflows/pipeline.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pipeline.yml b/.github/workflows/pipeline.yml index 585ee4f..d83e716 100644 --- a/.github/workflows/pipeline.yml +++ b/.github/workflows/pipeline.yml @@ -33,8 +33,8 @@ jobs: pylint build tests || echo ignore for now - name: black run: | - source .venv/bin/activate - black --check . +# source .venv/bin/activate +# black --check . ||skip run-tests: needs: code-quality From 0bf5eb80ebec80f5dad78170434211ed855058c4 Mon Sep 17 00:00:00 2001 From: Mike DuPont Date: Mon, 2 Oct 2023 23:46:12 +0000 Subject: [PATCH 22/57] bugfix --- docker-compose.yml | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 995c72e..a26ee3c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -34,23 +34,10 @@ services: depends_on: - ai_ticket - mockopenai: # interface - depends_on: - - ai_ticket - environment: - - GITHUB_PAT=${GITHUB_PAT} - - GITHUB_REPO=${GITHUB_REPO} - image: h4ckermike/mockopenai - build: - - context: vendor/lollms/ - args: - OFFICIAL_PYTHON_IMAGE: h4ckermike/ai_ticket:latest - ports: - - "5000:8080" + autogpt: + #entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y" + entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " - autogpt: #the beast - entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " # uncomment thse next 3 lines for debugging #entrypoint: /bin/bash From 1f598b08f2e6498f3311cc0171ff8d53027313c3 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Wed, 4 Oct 2023 08:54:23 -0400 Subject: [PATCH 23/57] you are the metagpt --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index a26ee3c..3062492 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -35,7 +35,7 @@ services: - ai_ticket autogpt: - #entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y" + #entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y" entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " From 0261e484e166ccad4aad9e16ba0b4131420c9c60 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 2 Oct 2023 09:48:16 -0400 Subject: [PATCH 24/57] work in progress still having python issues. --- .github/workflows/pipeline.yml | 6 +- Dockerfile | 28 +- docker-compose.yml | 63 +- poetry.lock | 1221 +------------------------------- pyproject.toml | 9 +- 5 files changed, 50 insertions(+), 1277 deletions(-) diff --git a/.github/workflows/pipeline.yml b/.github/workflows/pipeline.yml index d83e716..a51aabc 100644 --- a/.github/workflows/pipeline.yml +++ b/.github/workflows/pipeline.yml @@ -30,11 +30,11 @@ jobs: - name: pylint run: | source .venv/bin/activate - pylint build tests || echo ignore for now + pylint build tests - name: black run: | -# source .venv/bin/activate -# black --check . ||skip + source .venv/bin/activate + black --check . run-tests: needs: code-quality diff --git a/Dockerfile b/Dockerfile index fb14cd5..3517970 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,8 +4,7 @@ # Dockerfile: https://github.com/max-pfeiffer/python-poetry/blob/main/build/Dockerfile ARG BASE_IMAGE FROM ${BASE_IMAGE} -RUN apt update -RUN apt install -y git +ARG APPLICATION_SERVER_PORT LABEL maintainer="Mike DuPont " @@ -17,27 +16,36 @@ ENV PYTHONUNBUFFERED=1 \ POETRY_VIRTUALENVS_IN_PROJECT=true \ POETRY_CACHE_DIR="/application_root/.cache" \ VIRTUAL_ENVIRONMENT_PATH="/application_root/.venv" \ - POETRY_HOME="/opt/poetry" -# https://python-poetry.org/docs/#installing-manually -RUN python -m venv ${VIRTUAL_ENVIRONMENT_PATH} - + APPLICATION_SERVER_PORT=$APPLICATION_SERVER_PORT # Adding the virtual environment to PATH in order to "activate" it. # https://docs.python.org/3/library/venv.html#how-venvs-work ENV PATH="$VIRTUAL_ENVIRONMENT_PATH/bin:$PATH" +# Principle of least privilege: create a new user for running the application +RUN groupadd -g 1001 python_application && \ + useradd -r -u 1001 -g python_application python_application + # Set the WORKDIR to the application root. # https://www.uvicorn.org/settings/#development # https://docs.docker.com/engine/reference/builder/#workdir WORKDIR ${PYTHONPATH} +RUN chown python_application:python_application ${PYTHONPATH} +# Create cache directory and set permissions because user 1001 has no home +# and poetry cache directory. # https://python-poetry.org/docs/configuration/#cache-directory +RUN mkdir ${POETRY_CACHE_DIR} && chown python_application:python_application ${POETRY_CACHE_DIR} + +# Use the unpriveledged user to run the application +USER 1001 WORKDIR /opt/ai-ticket COPY pyproject.toml /opt/ai-ticket/ COPY setup.cfg /opt/ai-ticket/ -COPY README.md /opt/ai-ticket/ COPY requirements.txt /opt/ai-ticket/ COPY ./src/ /opt/ai-ticket/src/ - -RUN ls ${VIRTUAL_ENVIRONMENT_PATH}/bin/activate -RUN pip install /opt/ai-ticket/ && pip install --trusted-host pypi.python.org -r requirements.txt +RUN pip install /opt/ai-ticket/ + +RUN apt update +RUN apt install -y git +RUN pip install --trusted-host pypi.python.org -r requirements.txt diff --git a/docker-compose.yml b/docker-compose.yml index 3062492..ae5d25f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,44 +1,21 @@ version: '3' services: - act_base: #root base of action - build: vendor/act_base - image: h4ckermike/act_base - #h4ckermike/ - poetry_base: # use poetry - image: h4ckermike/poetry_base - build: - context: vendor/python-poetry/build - args: - OFFICIAL_PYTHON_IMAGE: h4ckermike/act_base:latest - POETRY_VERSION: 1.6.1 - depends_on: - - act_base - - ai_ticket: # the ticket to unite - image: h4ckermike/ai_ticket - build: - context: . - args: - BASE_IMAGE: poetry_base - depends_on: - - poetry_base - - basic_agent: #basic agnet - image: h4ckermike/basic_agent - build: - - context: vendor/basic_agent/ - args: - OFFICIAL_PYTHON_IMAGE: act_base - depends_on: - - ai_ticket + ai_ticket: + #image: ai_ticket + build: . + # context: . + #entrypoint: /bin/bash + #stdin_open: true # docker run -i + #tty: true # docker run -t autogpt: + #entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y" entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " + # uncomment thse next 3 lines for debugging #entrypoint: /bin/bash #stdin_open: true # docker run -i @@ -48,14 +25,22 @@ services: - GITHUB_REPO="jmikedupont2/ai-ticket" - OPENAI_API_KEY=your-openai-api-key - OPENAI_API_BASE=http://mockopenai:8080/v1 - image: h4ckermike/autogpt build: - context: vendor/Auto-GPT/ - dockerfile: slim/Dockerfile - args: - OFFICIAL_PYTHON_IMAGE: h4ckermike/ai_ticket:latest - depends_on: - - basic_agent + - ai_ticket - mockopenai + + mockopenai: + depends_on: + - ai_ticket + + environment: + - GITHUB_PAT=${GITHUB_PAT} + - GITHUB_REPO=${GITHUB_REPO} + + build: + context: vendor/lollms/ + ports: + - "5000:8080" + diff --git a/poetry.lock b/poetry.lock index d0b0a09..4e433ab 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,1220 +1,7 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. - -[[package]] -name = "astroid" -version = "2.15.8" -description = "An abstract syntax tree for Python with inference support." -optional = false -python-versions = ">=3.7.2" -files = [ - {file = "astroid-2.15.8-py3-none-any.whl", hash = "sha256:1aa149fc5c6589e3d0ece885b4491acd80af4f087baafa3fb5203b113e68cd3c"}, - {file = "astroid-2.15.8.tar.gz", hash = "sha256:6c107453dffee9055899705de3c9ead36e74119cee151e5a9aaf7f0b0e020a6a"}, -] - -[package.dependencies] -lazy-object-proxy = ">=1.4.0" -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} -wrapt = [ - {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, - {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, -] - -[[package]] -name = "black" -version = "23.7.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, - {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, - {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, - {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, - {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, - {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, - {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, - {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, - {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, - {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, - {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "certifi" -version = "2023.7.22" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, -] - -[[package]] -name = "cffi" -version = "1.16.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "cfgv" -version = "3.4.0" -description = "Validate configuration and produce human readable error messages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, - {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.0" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, - {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "coverage" -version = "7.3.1" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd0f7429ecfd1ff597389907045ff209c8fdb5b013d38cfa7c60728cb484b6e3"}, - {file = "coverage-7.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:966f10df9b2b2115da87f50f6a248e313c72a668248be1b9060ce935c871f276"}, - {file = "coverage-7.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0575c37e207bb9b98b6cf72fdaaa18ac909fb3d153083400c2d48e2e6d28bd8e"}, - {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:245c5a99254e83875c7fed8b8b2536f040997a9b76ac4c1da5bff398c06e860f"}, - {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c96dd7798d83b960afc6c1feb9e5af537fc4908852ef025600374ff1a017392"}, - {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:de30c1aa80f30af0f6b2058a91505ea6e36d6535d437520067f525f7df123887"}, - {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50dd1e2dd13dbbd856ffef69196781edff26c800a74f070d3b3e3389cab2600d"}, - {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9c0c19f70d30219113b18fe07e372b244fb2a773d4afde29d5a2f7930765136"}, - {file = "coverage-7.3.1-cp310-cp310-win32.whl", hash = "sha256:770f143980cc16eb601ccfd571846e89a5fe4c03b4193f2e485268f224ab602f"}, - {file = "coverage-7.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdd088c00c39a27cfa5329349cc763a48761fdc785879220d54eb785c8a38520"}, - {file = "coverage-7.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74bb470399dc1989b535cb41f5ca7ab2af561e40def22d7e188e0a445e7639e3"}, - {file = "coverage-7.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:025ded371f1ca280c035d91b43252adbb04d2aea4c7105252d3cbc227f03b375"}, - {file = "coverage-7.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6191b3a6ad3e09b6cfd75b45c6aeeffe7e3b0ad46b268345d159b8df8d835f9"}, - {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb0b188f30e41ddd659a529e385470aa6782f3b412f860ce22b2491c89b8593"}, - {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c8f0df9dfd8ff745bccff75867d63ef336e57cc22b2908ee725cc552689ec8"}, - {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7eb3cd48d54b9bd0e73026dedce44773214064be93611deab0b6a43158c3d5a0"}, - {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ac3c5b7e75acac31e490b7851595212ed951889918d398b7afa12736c85e13ce"}, - {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b4ee7080878077af0afa7238df1b967f00dc10763f6e1b66f5cced4abebb0a3"}, - {file = "coverage-7.3.1-cp311-cp311-win32.whl", hash = "sha256:229c0dd2ccf956bf5aeede7e3131ca48b65beacde2029f0361b54bf93d36f45a"}, - {file = "coverage-7.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6f55d38818ca9596dc9019eae19a47410d5322408140d9a0076001a3dcb938c"}, - {file = "coverage-7.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5289490dd1c3bb86de4730a92261ae66ea8d44b79ed3cc26464f4c2cde581fbc"}, - {file = "coverage-7.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca833941ec701fda15414be400c3259479bfde7ae6d806b69e63b3dc423b1832"}, - {file = "coverage-7.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd694e19c031733e446c8024dedd12a00cda87e1c10bd7b8539a87963685e969"}, - {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aab8e9464c00da5cb9c536150b7fbcd8850d376d1151741dd0d16dfe1ba4fd26"}, - {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d38444efffd5b056fcc026c1e8d862191881143c3aa80bb11fcf9dca9ae204"}, - {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8a07b692129b8a14ad7a37941a3029c291254feb7a4237f245cfae2de78de037"}, - {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2829c65c8faaf55b868ed7af3c7477b76b1c6ebeee99a28f59a2cb5907a45760"}, - {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f111a7d85658ea52ffad7084088277135ec5f368457275fc57f11cebb15607f"}, - {file = "coverage-7.3.1-cp312-cp312-win32.whl", hash = "sha256:c397c70cd20f6df7d2a52283857af622d5f23300c4ca8e5bd8c7a543825baa5a"}, - {file = "coverage-7.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:5ae4c6da8b3d123500f9525b50bf0168023313963e0e2e814badf9000dd6ef92"}, - {file = "coverage-7.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca70466ca3a17460e8fc9cea7123c8cbef5ada4be3140a1ef8f7b63f2f37108f"}, - {file = "coverage-7.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2781fd3cabc28278dc982a352f50c81c09a1a500cc2086dc4249853ea96b981"}, - {file = "coverage-7.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6407424621f40205bbe6325686417e5e552f6b2dba3535dd1f90afc88a61d465"}, - {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04312b036580ec505f2b77cbbdfb15137d5efdfade09156961f5277149f5e344"}, - {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9ad38204887349853d7c313f53a7b1c210ce138c73859e925bc4e5d8fc18e7"}, - {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:53669b79f3d599da95a0afbef039ac0fadbb236532feb042c534fbb81b1a4e40"}, - {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:614f1f98b84eb256e4f35e726bfe5ca82349f8dfa576faabf8a49ca09e630086"}, - {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1a317fdf5c122ad642db8a97964733ab7c3cf6009e1a8ae8821089993f175ff"}, - {file = "coverage-7.3.1-cp38-cp38-win32.whl", hash = "sha256:defbbb51121189722420a208957e26e49809feafca6afeef325df66c39c4fdb3"}, - {file = "coverage-7.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:f4f456590eefb6e1b3c9ea6328c1e9fa0f1006e7481179d749b3376fc793478e"}, - {file = "coverage-7.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12d8b11a54f32688b165fd1a788c408f927b0960984b899be7e4c190ae758f1"}, - {file = "coverage-7.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f09195dda68d94a53123883de75bb97b0e35f5f6f9f3aa5bf6e496da718f0cb6"}, - {file = "coverage-7.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6601a60318f9c3945be6ea0f2a80571f4299b6801716f8a6e4846892737ebe4"}, - {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d156269718670d00a3b06db2288b48527fc5f36859425ff7cec07c6b367745"}, - {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:636a8ac0b044cfeccae76a36f3b18264edcc810a76a49884b96dd744613ec0b7"}, - {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5d991e13ad2ed3aced177f524e4d670f304c8233edad3210e02c465351f785a0"}, - {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:586649ada7cf139445da386ab6f8ef00e6172f11a939fc3b2b7e7c9082052fa0"}, - {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4aba512a15a3e1e4fdbfed2f5392ec221434a614cc68100ca99dcad7af29f3f8"}, - {file = "coverage-7.3.1-cp39-cp39-win32.whl", hash = "sha256:6bc6f3f4692d806831c136c5acad5ccedd0262aa44c087c46b7101c77e139140"}, - {file = "coverage-7.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:553d7094cb27db58ea91332e8b5681bac107e7242c23f7629ab1316ee73c4981"}, - {file = "coverage-7.3.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:220eb51f5fb38dfdb7e5d54284ca4d0cd70ddac047d750111a68ab1798945194"}, - {file = "coverage-7.3.1.tar.gz", hash = "sha256:6cb7fe1581deb67b782c153136541e20901aa312ceedaf1467dcb35255787952"}, -] - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "cryptography" -version = "41.0.4" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, - {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"}, - {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"}, - {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"}, - {file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"}, - {file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"}, - {file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"}, -] - -[package.dependencies] -cffi = ">=1.12" - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - -[[package]] -name = "deprecation" -version = "2.1.0" -description = "A library to handle automated deprecations" -optional = false -python-versions = "*" -files = [ - {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, - {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, -] - -[package.dependencies] -packaging = "*" - -[[package]] -name = "dill" -version = "0.3.7" -description = "serialize all of Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] - -[[package]] -name = "distlib" -version = "0.3.7" -description = "Distribution utilities" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, - {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, -] - -[[package]] -name = "docker" -version = "6.1.3" -description = "A Python library for the Docker Engine API." -optional = false -python-versions = ">=3.7" -files = [ - {file = "docker-6.1.3-py3-none-any.whl", hash = "sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9"}, - {file = "docker-6.1.3.tar.gz", hash = "sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20"}, -] - -[package.dependencies] -packaging = ">=14.0" -pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} -requests = ">=2.26.0" -urllib3 = ">=1.26.0" -websocket-client = ">=0.32.0" - -[package.extras] -ssh = ["paramiko (>=2.4.3)"] - -[[package]] -name = "exceptiongroup" -version = "1.1.3" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "filelock" -version = "3.12.4" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, - {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, -] - -[package.extras] -docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] -typing = ["typing-extensions (>=4.7.1)"] - -[[package]] -name = "identify" -version = "2.5.30" -description = "File identification library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "identify-2.5.30-py2.py3-none-any.whl", hash = "sha256:afe67f26ae29bab007ec21b03d4114f41316ab9dd15aa8736a167481e108da54"}, - {file = "identify-2.5.30.tar.gz", hash = "sha256:f302a4256a15c849b91cfcdcec052a8ce914634b2f77ae87dad29cd749f2d88d"}, -] - -[package.extras] -license = ["ukkonen"] - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "isort" -version = "5.12.0" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, -] - -[package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] - -[[package]] -name = "lazy-object-proxy" -version = "1.9.0" -description = "A fast and thorough lazy object proxy." -optional = false -python-versions = ">=3.7" -files = [ - {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, -] - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "nodeenv" -version = "1.8.0" -description = "Node.js virtual environment builder" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" -files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, -] - -[package.dependencies] -setuptools = "*" - -[[package]] -name = "packaging" -version = "23.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, -] - -[[package]] -name = "pathspec" -version = "0.11.2" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, -] - -[[package]] -name = "platformdirs" -version = "3.10.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.7" -files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, -] - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] - -[[package]] -name = "pluggy" -version = "1.3.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pre-commit" -version = "3.3.3" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pre_commit-3.3.3-py2.py3-none-any.whl", hash = "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb"}, - {file = "pre_commit-3.3.3.tar.gz", hash = "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023"}, -] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" - -[[package]] -name = "pycparser" -version = "2.21" -description = "C parser in Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] - -[[package]] -name = "pygithub" -version = "2.1.1" -description = "Use the full Github API v3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "PyGithub-2.1.1-py3-none-any.whl", hash = "sha256:4b528d5d6f35e991ea5fd3f942f58748f24938805cb7fcf24486546637917337"}, - {file = "PyGithub-2.1.1.tar.gz", hash = "sha256:ecf12c2809c44147bce63b047b3d2e9dac8a41b63e90fcb263c703f64936b97c"}, -] - -[package.dependencies] -Deprecated = "*" -pyjwt = {version = ">=2.4.0", extras = ["crypto"]} -pynacl = ">=1.4.0" -python-dateutil = "*" -requests = ">=2.14.0" -typing-extensions = ">=4.0.0" -urllib3 = ">=1.26.0" - -[[package]] -name = "pyjwt" -version = "2.8.0" -description = "JSON Web Token implementation in Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, -] - -[package.dependencies] -cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "pylint" -version = "2.17.5" -description = "python code static checker" -optional = false -python-versions = ">=3.7.2" -files = [ - {file = "pylint-2.17.5-py3-none-any.whl", hash = "sha256:73995fb8216d3bed149c8d51bba25b2c52a8251a2c8ac846ec668ce38fab5413"}, - {file = "pylint-2.17.5.tar.gz", hash = "sha256:f7b601cbc06fef7e62a754e2b41294c2aa31f1cb659624b9a85bcba29eaf8252"}, -] - -[package.dependencies] -astroid = ">=2.15.6,<=2.17.0-dev0" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = [ - {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, -] -isort = ">=4.2.5,<6" -mccabe = ">=0.6,<0.8" -platformdirs = ">=2.2.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -tomlkit = ">=0.10.1" - -[package.extras] -spelling = ["pyenchant (>=3.2,<4.0)"] -testutils = ["gitpython (>3)"] - -[[package]] -name = "pynacl" -version = "1.5.0" -description = "Python binding to the Networking and Cryptography (NaCl) library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, - {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, - {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, - {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, - {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, - {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, -] - -[package.dependencies] -cffi = ">=1.4.1" - -[package.extras] -docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] -tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] - -[[package]] -name = "pytest" -version = "7.4.0" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-cov" -version = "4.1.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "1.0.0" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, - {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "semver" -version = "3.0.1" -description = "Python helper for Semantic Versioning (https://semver.org)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "semver-3.0.1-py3-none-any.whl", hash = "sha256:2a23844ba1647362c7490fe3995a86e097bb590d16f0f32dfc383008f19e4cdf"}, - {file = "semver-3.0.1.tar.gz", hash = "sha256:9ec78c5447883c67b97f98c3b6212796708191d22e4ad30f4570f840171cbce1"}, -] - -[[package]] -name = "setuptools" -version = "68.2.2" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, - {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "testcontainers" -version = "3.7.1" -description = "Library provides lightweight, throwaway instances of common databases, Selenium web browsers, or anything else that can run in a Docker container" -optional = false -python-versions = ">=3.7" -files = [ - {file = "testcontainers-3.7.1-py2.py3-none-any.whl", hash = "sha256:7f48cef4bf0ccd78f1a4534d4b701a003a3bace851f24eae58a32f9e3f0aeba0"}, -] - -[package.dependencies] -deprecation = "*" -docker = ">=4.0.0" -wrapt = "*" - -[package.extras] -arangodb = ["python-arango"] -azurite = ["azure-storage-blob"] -clickhouse = ["clickhouse-driver"] -docker-compose = ["docker-compose"] -google-cloud-pubsub = ["google-cloud-pubsub (<2)"] -kafka = ["kafka-python"] -keycloak = ["python-keycloak"] -mongo = ["pymongo"] -mssqlserver = ["pymssql"] -mysql = ["pymysql", "sqlalchemy"] -neo4j = ["neo4j"] -oracle = ["cx-Oracle", "sqlalchemy"] -postgresql = ["psycopg2-binary", "sqlalchemy"] -rabbitmq = ["pika"] -redis = ["redis"] -selenium = ["selenium"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "tomlkit" -version = "0.12.1" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, - {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, -] - -[[package]] -name = "typing-extensions" -version = "4.8.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, -] - -[[package]] -name = "urllib3" -version = "2.0.5" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.7" -files = [ - {file = "urllib3-2.0.5-py3-none-any.whl", hash = "sha256:ef16afa8ba34a1f989db38e1dbbe0c302e4289a47856990d0682e374563ce35e"}, - {file = "urllib3-2.0.5.tar.gz", hash = "sha256:13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "virtualenv" -version = "20.24.5" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.7" -files = [ - {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"}, - {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<4" - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] - -[[package]] -name = "websocket-client" -version = "1.6.3" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websocket-client-1.6.3.tar.gz", hash = "sha256:3aad25d31284266bcfcfd1fd8a743f63282305a364b8d0948a43bd606acc652f"}, - {file = "websocket_client-1.6.3-py3-none-any.whl", hash = "sha256:6cfc30d051ebabb73a5fa246efdcc14c8fbebbd0330f8984ac3bb6d9edd2ad03"}, -] - -[package.extras] -docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "wrapt" -version = "1.15.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -files = [ - {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, - {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, - {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, - {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, - {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, - {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, - {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, - {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, - {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, - {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, - {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, - {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, - {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, - {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, - {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, - {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, - {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, - {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, - {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, - {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, - {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, - {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, - {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, - {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, - {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, - {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, - {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, -] +# This file is automatically @generated by Poetry and should not be changed by hand. +package = [] [metadata] lock-version = "2.0" -python-versions = ">=3.10" -content-hash = "58fc8736b8fa1de792b2681d6a824e8fc1f5e11eb6ccc02b806aff54d06ba8b0" +python-versions = "*" +content-hash = "115cf985d932e9bf5f540555bbdd75decbb62cac81e399375fc19f6277f8c1d8" diff --git a/pyproject.toml b/pyproject.toml index cfd7c0c..94356c3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] - +python = "^3.7" requires = [ "setuptools>=42", "wheel", @@ -39,13 +39,6 @@ classifiers = [ # [tool.poetry.scripts] # [tool.poetry.dependencies] -[tool.poetry.dependencies] -python = ">=3.10" -click = "8.1.7" -docker = "6.1.3" -pygithub = "^2.1.1" -python-dotenv = "^1.0.0" - [tool.poetry.group.dev.dependencies] python = "^3.7" pytest = "7.4.0" From 739ccf63d49a8899aeafd10fd5e3978b502133d5 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 2 Oct 2023 11:53:18 -0400 Subject: [PATCH 25/57] dockerbuild --- docker-compose.yml | 9 ++-- dockerbuild/Dockerfile | 49 ++++++++++++++++++++ dockerbuild/constants.py | 41 +++++++++-------- dockerbuild/images.py | 96 +++++++++++++++++++++++++++++----------- dockerbuild/publish.py | 59 ++++++++++++------------ 5 files changed, 170 insertions(+), 84 deletions(-) create mode 100644 dockerbuild/Dockerfile diff --git a/docker-compose.yml b/docker-compose.yml index ae5d25f..1172068 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,13 +1,10 @@ version: '3' services: - ai_ticket: - #image: ai_ticket build: . - # context: . - #entrypoint: /bin/bash - #stdin_open: true # docker run -i - #tty: true # docker run -t + act_base: + # the base image of github action + build: vendor/act_base autogpt: diff --git a/dockerbuild/Dockerfile b/dockerbuild/Dockerfile new file mode 100644 index 0000000..b8a9eaa --- /dev/null +++ b/dockerbuild/Dockerfile @@ -0,0 +1,49 @@ +# The Poetry installation is provided through the base image. Please check the +# base image if you interested in the details. +# Base image: https://hub.docker.com/r/pfeiffermax/python-poetry +# Dockerfile: https://github.com/max-pfeiffer/python-poetry/blob/main/build/Dockerfile +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +ARG APPLICATION_SERVER_PORT + +LABEL maintainer="Max Pfeiffer " + + # https://docs.python.org/3/using/cmdline.html#envvar-PYTHONUNBUFFERED +ENV PYTHONUNBUFFERED=1 \ + # https://docs.python.org/3/using/cmdline.html#envvar-PYTHONDONTWRITEBYTECODE + PYTHONDONTWRITEBYTECODE=1 \ + PYTHONPATH=/application_root \ + # https://python-poetry.org/docs/configuration/#virtualenvsin-project + POETRY_VIRTUALENVS_IN_PROJECT=true \ + POETRY_CACHE_DIR="/application_root/.cache" \ + VIRTUAL_ENVIRONMENT_PATH="/application_root/.venv" \ + APPLICATION_SERVER_PORT=$APPLICATION_SERVER_PORT + +# Adding the virtual environment to PATH in order to "activate" it. +# https://docs.python.org/3/library/venv.html#how-venvs-work +ENV PATH="$VIRTUAL_ENVIRONMENT_PATH/bin:$PATH" + +# Principle of least privilege: create a new user for running the application +RUN groupadd -g 1001 python_application && \ + useradd -r -u 1001 -g python_application python_application + +# Set the WORKDIR to the application root. +# https://www.uvicorn.org/settings/#development +# https://docs.docker.com/engine/reference/builder/#workdir +WORKDIR ${PYTHONPATH} +RUN chown python_application:python_application ${PYTHONPATH} + +# Create cache directory and set permissions because user 1001 has no home +# and poetry cache directory. +# https://python-poetry.org/docs/configuration/#cache-directory +RUN mkdir ${POETRY_CACHE_DIR} && chown python_application:python_application ${POETRY_CACHE_DIR} + +# Document the exposed port +# https://docs.docker.com/engine/reference/builder/#expose +EXPOSE ${APPLICATION_SERVER_PORT} + +# Use the unpriveledged user to run the application +USER 1001 + +# Run the uvicorn application server. +CMD exec uvicorn --workers 1 --host 0.0.0.0 --port $APPLICATION_SERVER_PORT app.main:app diff --git a/dockerbuild/constants.py b/dockerbuild/constants.py index 252c1e1..1f85ba6 100644 --- a/dockerbuild/constants.py +++ b/dockerbuild/constants.py @@ -1,29 +1,28 @@ TARGET_ARCHITECTURES: list[str] = [ - "base_python3.10-nodejs20", # 3.10.13 - #"python3.9.16-bullseye", - #"python3.9.16-slim-bullseye", - #"python3.10.10-bullseye", - #"python3.10.10-slim-bullseye", + "python3.9.16-bullseye", + "python3.9.16-slim-bullseye", + "python3.10.10-bullseye", + "python3.10.10-slim-bullseye", ] BASE_IMAGES: dict = { - # TARGET_ARCHITECTURES[ - # 0 - # ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.9.16-bullseye@sha256:54037cfdca026b17e7a57664dff47bf04e7849074d3ab62271ecad0446ef0322", - # TARGET_ARCHITECTURES[ - # 1 - # ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.9.16-slim-bullseye@sha256:c0b8d9c28c5717074c481dfdf1d8bd3aaa0b83a5e2a9e37c77be7af19d70d0ce", - # TARGET_ARCHITECTURES[ - # 2 - # ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.10.10-bullseye@sha256:5a81c8c86132e504db2b7329f5e41cd32bddebf811d83a0d356edbca0d81135c", - # TARGET_ARCHITECTURES[ - # 3 - # ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.10.10-slim-bullseye@sha256:289c6beb568991811629c91cdcb3841ceb95bf0a017c3e411f4b71e18043ef15", + TARGET_ARCHITECTURES[ + 0 + ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.9.16-bullseye@sha256:54037cfdca026b17e7a57664dff47bf04e7849074d3ab62271ecad0446ef0322", + TARGET_ARCHITECTURES[ + 1 + ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.9.16-slim-bullseye@sha256:c0b8d9c28c5717074c481dfdf1d8bd3aaa0b83a5e2a9e37c77be7af19d70d0ce", + TARGET_ARCHITECTURES[ + 2 + ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.10.10-bullseye@sha256:5a81c8c86132e504db2b7329f5e41cd32bddebf811d83a0d356edbca0d81135c", + TARGET_ARCHITECTURES[ + 3 + ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.10.10-slim-bullseye@sha256:289c6beb568991811629c91cdcb3841ceb95bf0a017c3e411f4b71e18043ef15", } PYTHON_VERSIONS: dict = { -# TARGET_ARCHITECTURES[0]: "3.9.16", -# TARGET_ARCHITECTURES[1]: "3.9.16", -# TARGET_ARCHITECTURES[2]: "3.10.10", -# TARGET_ARCHITECTURES[3]: "3.10.10", + TARGET_ARCHITECTURES[0]: "3.9.16", + TARGET_ARCHITECTURES[1]: "3.9.16", + TARGET_ARCHITECTURES[2]: "3.10.10", + TARGET_ARCHITECTURES[3]: "3.10.10", } # As we are running the server with an unprivileged user, we need to use diff --git a/dockerbuild/images.py b/dockerbuild/images.py index 9dfdc72..75fb193 100644 --- a/dockerbuild/images.py +++ b/dockerbuild/images.py @@ -1,10 +1,32 @@ -from dockerbuild.baseimage import ExampleApplicationImage +from pathlib import Path +from typing import Optional import docker -from pathlib import Path from docker.models.images import Image -class AITicketPoetryImage(ExampleApplicationImage): +from dockerbuild.constants import ( + BASE_IMAGES, + APPLICATION_SERVER_PORT, +) + + +class DockerImage: + def __init__( + self, + docker_client: docker.client, + target_architecture: str, + version: str, + ): + self.docker_client: docker.client = docker_client + self.dockerfile_name: str = "Dockerfile" + self.dockerfile_directory: Optional[Path] = None + self.image_name: Optional[str] = None + self.image_tag: Optional[str] = None + self.version: Optional[str] = version + self.target_architecture: str = target_architecture + + +class AITicketPoetryImage(DockerImage): def __init__( self, docker_client: docker.client, @@ -15,11 +37,48 @@ def __init__( # An image name is made up of slash-separated name components, # optionally prefixed by a registry hostname. # see: https://docs.docker.com/engine/reference/commandline/tag/ - self.image_name: str = "h4ckermike/ai-ticket" - self.dockerfile_directory: Path = Path(__file__).parent.parent.resolve() + self.image_name: str = "h4ckermike/ai-ticket:test_ai_ticket" + self.dockerfile_directory: Path = Path(__file__).parent.resolve() + + def build(self) -> Image: + self.image_tag: str = f"{self.version}-{self.target_architecture}" + + buildargs: dict[str, str] = { + "BASE_IMAGE": BASE_IMAGES[self.target_architecture], + "APPLICATION_SERVER_PORT": APPLICATION_SERVER_PORT, + } + + image: Image = self.docker_client.images.build( + path=str(self.dockerfile_directory), + dockerfile=self.dockerfile_name, + tag=f"{self.image_name}:{self.image_tag}", + buildargs=buildargs, + )[0] + return image + + +class ExampleApplicationImage(DockerImage): + def build( + self, + target: str, + base_image_tag: str, + ) -> Image: + self.image_tag = f"{self.version}-{self.target_architecture}" + buildargs: dict[str, str] = { + "BASE_IMAGE": base_image_tag, + } + image: Image = self.docker_client.images.build( + path=str(self.dockerfile_directory), + dockerfile=self.dockerfile_name, + tag=f"{self.image_name}:{self.image_tag}", + target=target, + buildargs=buildargs, + )[0] + return image -class AutoGptImage(ExampleApplicationImage): + +class ActBaseImage(ExampleApplicationImage): def __init__( self, docker_client: docker.client, @@ -27,31 +86,14 @@ def __init__( version: str, ): super().__init__(docker_client, target_architecture, version) - self.image_name: str = "h4ckermike/act_base" + # An image name is made up of slash-separated name components, + # optionally prefixed by a registry hostname. + # see: https://docs.docker.com/engine/reference/commandline/tag/ + self.image_name: str = "act_base" self.dockerfile_directory: Path = ( Path(__file__).parent.parent.resolve() / "vendor" / "act_base" ) -class ActBaseImage(ExampleApplicationImage): - def __init__( self, docker_client: docker.client, target_architecture: str, version: str, - ): - super().__init__(docker_client, target_architecture, version) - self.image_name: str = "h4ckermike/autogpt" - self.dockerfile_directory: Path = ( - Path(__file__).parent.parent.resolve() - / "vendor" - / "Auto-GPT/" - ) - -class OpenAIImage(ExampleApplicationImage): - def __init__( self, docker_client: docker.client,target_architecture: str,version: str ): - super().__init__(docker_client, target_architecture, version) - self.image_name: str = "h4ckermike/mockopenai" - self.dockerfile_directory: Path = ( - Path(__file__).parent.parent.resolve() - / "vendor" - / "lollms/" - ) diff --git a/dockerbuild/publish.py b/dockerbuild/publish.py index 714b77a..b76999d 100644 --- a/dockerbuild/publish.py +++ b/dockerbuild/publish.py @@ -1,13 +1,10 @@ import click import docker - from docker.client import DockerClient - from dockerbuild.constants import ( TARGET_ARCHITECTURES, ) -import dockerbuild.images -from dockerbuild.baseimage import BASE_IMAGE +from dockerbuild.images import ActBaseImage @click.command() @click.option( @@ -30,24 +27,25 @@ def main( version_tag: str, registry: str, ) -> None: - docker_client: DockerClient = docker.from_env() + docker_client: DockerClient = docker.from_env() + for target_architecture in TARGET_ARCHITECTURES: - #import pdb - #pdb.set_trace() - for x in dir(dockerbuild.images) : - cls = getattr(dockerbuild.images,x) - if not isinstance(cls,type): - continue - print(cls) - new_uvicorn_gunicorn_poetry_image = (cls(docker_client,target_architecture,version_tag)) - if new_uvicorn_gunicorn_poetry_image: - for old_image in docker_client.images.list(new_uvicorn_gunicorn_poetry_image.image_name): - for tag in old_image.tags: - print("tag",tag) - docker_client.images.remove(tag, force=True) - new_uvicorn_gunicorn_poetry_image.build( - target=target_architecture,base_image_tag=BASE_IMAGE - ) + new_uvicorn_gunicorn_poetry_image: UvicornPoetryImage = ( + ActBaseImage(docker_client, target_architecture, version_tag) + ) + + # Delete old existing images + for old_image in docker_client.images.list( + new_uvicorn_gunicorn_poetry_image.image_name + ): + for tag in old_image.tags: + docker_client.images.remove(tag, force=True) + + #new_uvicorn_gunicorn_poetry_image.build() + + # https://docs.docker.com/engine/reference/commandline/push/ + # https://docs.docker.com/engine/reference/commandline/tag/ + # https://docs.docker.com/engine/reference/commandline/image_tag/ if docker_hub_username and docker_hub_password: login_kwargs: dict = { "username": docker_hub_username, @@ -55,7 +53,9 @@ def main( } if registry: login_kwargs["registry"] = registry + docker_client.login(**login_kwargs) + if registry: repository: str = ( f"{registry}/{new_uvicorn_gunicorn_poetry_image.image_name}" @@ -63,15 +63,14 @@ def main( else: repository: str = new_uvicorn_gunicorn_poetry_image.image_name - if False: - for line in docker_client.images.push( - repository, - tag=new_uvicorn_gunicorn_poetry_image.image_tag, - stream=True, - decode=True, - ): - print(line) - docker_client.close() + for line in docker_client.images.push( + repository, + tag=new_uvicorn_gunicorn_poetry_image.image_tag, + stream=True, + decode=True, + ): + print(line) + docker_client.close() if __name__ == "__main__": From 815bb5c22b0668713b3ac473bb7761be581fd5f0 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Tue, 3 Oct 2023 12:07:00 -0400 Subject: [PATCH 26/57] now building --- Dockerfile | 29 +++++-------- docker-compose.yml | 69 ++++++++++++++++++++---------- dockerbuild/Dockerfile | 49 --------------------- dockerbuild/images.py | 96 ++++++++++++------------------------------ dockerbuild/publish.py | 59 +++++++++++++------------- pyproject.toml | 7 ++- 6 files changed, 120 insertions(+), 189 deletions(-) delete mode 100644 dockerbuild/Dockerfile diff --git a/Dockerfile b/Dockerfile index 3517970..4545601 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,7 +4,8 @@ # Dockerfile: https://github.com/max-pfeiffer/python-poetry/blob/main/build/Dockerfile ARG BASE_IMAGE FROM ${BASE_IMAGE} -ARG APPLICATION_SERVER_PORT +RUN apt update +RUN apt install -y git LABEL maintainer="Mike DuPont " @@ -16,36 +17,28 @@ ENV PYTHONUNBUFFERED=1 \ POETRY_VIRTUALENVS_IN_PROJECT=true \ POETRY_CACHE_DIR="/application_root/.cache" \ VIRTUAL_ENVIRONMENT_PATH="/application_root/.venv" \ - APPLICATION_SERVER_PORT=$APPLICATION_SERVER_PORT + POETRY_HOME="/opt/poetry" +# https://python-poetry.org/docs/#installing-manually +RUN python -m venv ${VIRTUAL_ENVIRONMENT_PATH} + # Adding the virtual environment to PATH in order to "activate" it. # https://docs.python.org/3/library/venv.html#how-venvs-work ENV PATH="$VIRTUAL_ENVIRONMENT_PATH/bin:$PATH" -# Principle of least privilege: create a new user for running the application -RUN groupadd -g 1001 python_application && \ - useradd -r -u 1001 -g python_application python_application - # Set the WORKDIR to the application root. # https://www.uvicorn.org/settings/#development # https://docs.docker.com/engine/reference/builder/#workdir WORKDIR ${PYTHONPATH} -RUN chown python_application:python_application ${PYTHONPATH} -# Create cache directory and set permissions because user 1001 has no home -# and poetry cache directory. # https://python-poetry.org/docs/configuration/#cache-directory -RUN mkdir ${POETRY_CACHE_DIR} && chown python_application:python_application ${POETRY_CACHE_DIR} - -# Use the unpriveledged user to run the application -USER 1001 +RUN mkdir ${POETRY_CACHE_DIR} WORKDIR /opt/ai-ticket COPY pyproject.toml /opt/ai-ticket/ COPY setup.cfg /opt/ai-ticket/ +COPY README.md /opt/ai-ticket/ COPY requirements.txt /opt/ai-ticket/ COPY ./src/ /opt/ai-ticket/src/ -RUN pip install /opt/ai-ticket/ - -RUN apt update -RUN apt install -y git -RUN pip install --trusted-host pypi.python.org -r requirements.txt + +RUN ls ${VIRTUAL_ENVIRONMENT_PATH}/bin/activate +RUN pip install /opt/ai-ticket/ && pip install --trusted-host pypi.python.org -r requirements.txt diff --git a/docker-compose.yml b/docker-compose.yml index 1172068..1f0540c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,16 +1,49 @@ version: '3' services: - ai_ticket: - build: . - act_base: - # the base image of github action + + act_base: #root base of action build: vendor/act_base + + poetry_base: # use poetry + build: + context: vendor/python-poetry/build + args: + OFFICIAL_PYTHON_IMAGE: act_base + POETRY_VERSION: 1.6.1 + depends_on: + - act_base + + ai_ticket: # the ticket to unite + build: + context: . + args: + BASE_IMAGE: act_base + depends_on: + - poetry_base + + basic_agent: #basic agnet + build: + context: vendor/basic_agent/ + args: + OFFICIAL_PYTHON_IMAGE: act_base + depends_on: + - ai_ticket - autogpt: - - #entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y" - entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " + mockopenai: # interface + depends_on: + - ai_ticket + environment: + - GITHUB_PAT=${GITHUB_PAT} + - GITHUB_REPO=${GITHUB_REPO} + build: + context: vendor/lollms/ + args: + OFFICIAL_PYTHON_IMAGE: localhost/ai-ticket_ai_ticket:latest + ports: + - "5000:8080" + autogpt: #the beast + entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " # uncomment thse next 3 lines for debugging @@ -24,20 +57,10 @@ services: - OPENAI_API_BASE=http://mockopenai:8080/v1 build: context: vendor/Auto-GPT/ - depends_on: - - ai_ticket - - mockopenai + dockerfile: slim/Dockerfile + args: + OFFICIAL_PYTHON_IMAGE: localhost/ai-ticket_ai_ticket:latest - mockopenai: depends_on: - - ai_ticket - - environment: - - GITHUB_PAT=${GITHUB_PAT} - - GITHUB_REPO=${GITHUB_REPO} - - build: - context: vendor/lollms/ - ports: - - "5000:8080" - + - basic_agent + - mockopenai diff --git a/dockerbuild/Dockerfile b/dockerbuild/Dockerfile deleted file mode 100644 index b8a9eaa..0000000 --- a/dockerbuild/Dockerfile +++ /dev/null @@ -1,49 +0,0 @@ -# The Poetry installation is provided through the base image. Please check the -# base image if you interested in the details. -# Base image: https://hub.docker.com/r/pfeiffermax/python-poetry -# Dockerfile: https://github.com/max-pfeiffer/python-poetry/blob/main/build/Dockerfile -ARG BASE_IMAGE -FROM ${BASE_IMAGE} -ARG APPLICATION_SERVER_PORT - -LABEL maintainer="Max Pfeiffer " - - # https://docs.python.org/3/using/cmdline.html#envvar-PYTHONUNBUFFERED -ENV PYTHONUNBUFFERED=1 \ - # https://docs.python.org/3/using/cmdline.html#envvar-PYTHONDONTWRITEBYTECODE - PYTHONDONTWRITEBYTECODE=1 \ - PYTHONPATH=/application_root \ - # https://python-poetry.org/docs/configuration/#virtualenvsin-project - POETRY_VIRTUALENVS_IN_PROJECT=true \ - POETRY_CACHE_DIR="/application_root/.cache" \ - VIRTUAL_ENVIRONMENT_PATH="/application_root/.venv" \ - APPLICATION_SERVER_PORT=$APPLICATION_SERVER_PORT - -# Adding the virtual environment to PATH in order to "activate" it. -# https://docs.python.org/3/library/venv.html#how-venvs-work -ENV PATH="$VIRTUAL_ENVIRONMENT_PATH/bin:$PATH" - -# Principle of least privilege: create a new user for running the application -RUN groupadd -g 1001 python_application && \ - useradd -r -u 1001 -g python_application python_application - -# Set the WORKDIR to the application root. -# https://www.uvicorn.org/settings/#development -# https://docs.docker.com/engine/reference/builder/#workdir -WORKDIR ${PYTHONPATH} -RUN chown python_application:python_application ${PYTHONPATH} - -# Create cache directory and set permissions because user 1001 has no home -# and poetry cache directory. -# https://python-poetry.org/docs/configuration/#cache-directory -RUN mkdir ${POETRY_CACHE_DIR} && chown python_application:python_application ${POETRY_CACHE_DIR} - -# Document the exposed port -# https://docs.docker.com/engine/reference/builder/#expose -EXPOSE ${APPLICATION_SERVER_PORT} - -# Use the unpriveledged user to run the application -USER 1001 - -# Run the uvicorn application server. -CMD exec uvicorn --workers 1 --host 0.0.0.0 --port $APPLICATION_SERVER_PORT app.main:app diff --git a/dockerbuild/images.py b/dockerbuild/images.py index 75fb193..9dfdc72 100644 --- a/dockerbuild/images.py +++ b/dockerbuild/images.py @@ -1,32 +1,10 @@ -from pathlib import Path -from typing import Optional +from dockerbuild.baseimage import ExampleApplicationImage import docker +from pathlib import Path from docker.models.images import Image -from dockerbuild.constants import ( - BASE_IMAGES, - APPLICATION_SERVER_PORT, -) - - -class DockerImage: - def __init__( - self, - docker_client: docker.client, - target_architecture: str, - version: str, - ): - self.docker_client: docker.client = docker_client - self.dockerfile_name: str = "Dockerfile" - self.dockerfile_directory: Optional[Path] = None - self.image_name: Optional[str] = None - self.image_tag: Optional[str] = None - self.version: Optional[str] = version - self.target_architecture: str = target_architecture - - -class AITicketPoetryImage(DockerImage): +class AITicketPoetryImage(ExampleApplicationImage): def __init__( self, docker_client: docker.client, @@ -37,48 +15,11 @@ def __init__( # An image name is made up of slash-separated name components, # optionally prefixed by a registry hostname. # see: https://docs.docker.com/engine/reference/commandline/tag/ - self.image_name: str = "h4ckermike/ai-ticket:test_ai_ticket" - self.dockerfile_directory: Path = Path(__file__).parent.resolve() - - def build(self) -> Image: - self.image_tag: str = f"{self.version}-{self.target_architecture}" - - buildargs: dict[str, str] = { - "BASE_IMAGE": BASE_IMAGES[self.target_architecture], - "APPLICATION_SERVER_PORT": APPLICATION_SERVER_PORT, - } - - image: Image = self.docker_client.images.build( - path=str(self.dockerfile_directory), - dockerfile=self.dockerfile_name, - tag=f"{self.image_name}:{self.image_tag}", - buildargs=buildargs, - )[0] - return image - - -class ExampleApplicationImage(DockerImage): - def build( - self, - target: str, - base_image_tag: str, - ) -> Image: - self.image_tag = f"{self.version}-{self.target_architecture}" + self.image_name: str = "h4ckermike/ai-ticket" + self.dockerfile_directory: Path = Path(__file__).parent.parent.resolve() - buildargs: dict[str, str] = { - "BASE_IMAGE": base_image_tag, - } - image: Image = self.docker_client.images.build( - path=str(self.dockerfile_directory), - dockerfile=self.dockerfile_name, - tag=f"{self.image_name}:{self.image_tag}", - target=target, - buildargs=buildargs, - )[0] - return image - -class ActBaseImage(ExampleApplicationImage): +class AutoGptImage(ExampleApplicationImage): def __init__( self, docker_client: docker.client, @@ -86,14 +27,31 @@ def __init__( version: str, ): super().__init__(docker_client, target_architecture, version) - # An image name is made up of slash-separated name components, - # optionally prefixed by a registry hostname. - # see: https://docs.docker.com/engine/reference/commandline/tag/ - self.image_name: str = "act_base" + self.image_name: str = "h4ckermike/act_base" self.dockerfile_directory: Path = ( Path(__file__).parent.parent.resolve() / "vendor" / "act_base" ) +class ActBaseImage(ExampleApplicationImage): + def __init__( self, docker_client: docker.client, target_architecture: str, version: str, + ): + super().__init__(docker_client, target_architecture, version) + self.image_name: str = "h4ckermike/autogpt" + self.dockerfile_directory: Path = ( + Path(__file__).parent.parent.resolve() + / "vendor" + / "Auto-GPT/" + ) + +class OpenAIImage(ExampleApplicationImage): + def __init__( self, docker_client: docker.client,target_architecture: str,version: str ): + super().__init__(docker_client, target_architecture, version) + self.image_name: str = "h4ckermike/mockopenai" + self.dockerfile_directory: Path = ( + Path(__file__).parent.parent.resolve() + / "vendor" + / "lollms/" + ) diff --git a/dockerbuild/publish.py b/dockerbuild/publish.py index b76999d..714b77a 100644 --- a/dockerbuild/publish.py +++ b/dockerbuild/publish.py @@ -1,10 +1,13 @@ import click import docker + from docker.client import DockerClient + from dockerbuild.constants import ( TARGET_ARCHITECTURES, ) -from dockerbuild.images import ActBaseImage +import dockerbuild.images +from dockerbuild.baseimage import BASE_IMAGE @click.command() @click.option( @@ -27,25 +30,24 @@ def main( version_tag: str, registry: str, ) -> None: - docker_client: DockerClient = docker.from_env() - + docker_client: DockerClient = docker.from_env() for target_architecture in TARGET_ARCHITECTURES: - new_uvicorn_gunicorn_poetry_image: UvicornPoetryImage = ( - ActBaseImage(docker_client, target_architecture, version_tag) - ) - - # Delete old existing images - for old_image in docker_client.images.list( - new_uvicorn_gunicorn_poetry_image.image_name - ): - for tag in old_image.tags: - docker_client.images.remove(tag, force=True) - - #new_uvicorn_gunicorn_poetry_image.build() - - # https://docs.docker.com/engine/reference/commandline/push/ - # https://docs.docker.com/engine/reference/commandline/tag/ - # https://docs.docker.com/engine/reference/commandline/image_tag/ + #import pdb + #pdb.set_trace() + for x in dir(dockerbuild.images) : + cls = getattr(dockerbuild.images,x) + if not isinstance(cls,type): + continue + print(cls) + new_uvicorn_gunicorn_poetry_image = (cls(docker_client,target_architecture,version_tag)) + if new_uvicorn_gunicorn_poetry_image: + for old_image in docker_client.images.list(new_uvicorn_gunicorn_poetry_image.image_name): + for tag in old_image.tags: + print("tag",tag) + docker_client.images.remove(tag, force=True) + new_uvicorn_gunicorn_poetry_image.build( + target=target_architecture,base_image_tag=BASE_IMAGE + ) if docker_hub_username and docker_hub_password: login_kwargs: dict = { "username": docker_hub_username, @@ -53,9 +55,7 @@ def main( } if registry: login_kwargs["registry"] = registry - docker_client.login(**login_kwargs) - if registry: repository: str = ( f"{registry}/{new_uvicorn_gunicorn_poetry_image.image_name}" @@ -63,14 +63,15 @@ def main( else: repository: str = new_uvicorn_gunicorn_poetry_image.image_name - for line in docker_client.images.push( - repository, - tag=new_uvicorn_gunicorn_poetry_image.image_tag, - stream=True, - decode=True, - ): - print(line) - docker_client.close() + if False: + for line in docker_client.images.push( + repository, + tag=new_uvicorn_gunicorn_poetry_image.image_tag, + stream=True, + decode=True, + ): + print(line) + docker_client.close() if __name__ == "__main__": diff --git a/pyproject.toml b/pyproject.toml index 94356c3..2158d50 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -python = "^3.7" +#python = "^3.11" requires = [ "setuptools>=42", "wheel", @@ -39,6 +39,11 @@ classifiers = [ # [tool.poetry.scripts] # [tool.poetry.dependencies] +[tool.poetry.dependencies] +#python = "3.11.4" +click = "8.1.7" +docker = "6.1.3" + [tool.poetry.group.dev.dependencies] python = "^3.7" pytest = "7.4.0" From 1469a319c6893dda3ab2f31aada078c1b2dd4e4e Mon Sep 17 00:00:00 2001 From: mike dupont Date: Tue, 3 Oct 2023 12:35:58 -0400 Subject: [PATCH 27/57] blac --- src/ai_ticket/__init__.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/ai_ticket/__init__.py b/src/ai_ticket/__init__.py index 37756e8..e0f23ce 100644 --- a/src/ai_ticket/__init__.py +++ b/src/ai_ticket/__init__.py @@ -1,7 +1,13 @@ +""" +This is a simple function to look for the role name in the message of autogpt +yes it is very specific and will have to be more generalized. +""" import re + pattern = r'(```)?\s*{\s*"messages"\s*:\s*\[\s*\{\s*"role"\s*:\s*"system"\s*,\s*\"content"\s*:\s*"You\s+are\s+(?P[^,]+),.*' + def find_name(text): if not text: return False From 0f82de739266e21c06d5dd7a372de5ebfecb9c7d Mon Sep 17 00:00:00 2001 From: mike dupont Date: Wed, 11 Oct 2023 12:55:44 -0400 Subject: [PATCH 28/57] adding ignore --- .dockerignore | 4 ++++ .gitignore | 1 + 2 files changed, 5 insertions(+) create mode 100644 .dockerignore diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..6460eab --- /dev/null +++ b/.dockerignore @@ -0,0 +1,4 @@ +.env +.env~ +.secrets +.secrets~ \ No newline at end of file diff --git a/.gitignore b/.gitignore index d54b7d5..a4493ad 100644 --- a/.gitignore +++ b/.gitignore @@ -163,3 +163,4 @@ cython_debug/ *~ /dckr_pat_.txt /github_pat.txt +/.secrets From 7d5a08ac511bf4cdfb8986149b286cb92bfcd45e Mon Sep 17 00:00:00 2001 From: mike dupont Date: Wed, 11 Oct 2023 16:24:16 -0400 Subject: [PATCH 29/57] now building and running locally --- docker-compose.yml | 25 ++++++++++--- dockerbuild/.gitignore | 1 - dockerbuild/Readme.md | 1 - dockerbuild/__init__.py | 0 dockerbuild/baseimage.py | 71 ------------------------------------ dockerbuild/constants.py | 30 --------------- dockerbuild/images.py | 57 ----------------------------- dockerbuild/publish.py | 79 ---------------------------------------- vendor/Auto-GPT | 2 +- vendor/basic_agent | 2 +- vendor/lollms | 2 +- vendor/python-poetry | 2 +- 12 files changed, 23 insertions(+), 249 deletions(-) delete mode 100644 dockerbuild/.gitignore delete mode 100644 dockerbuild/Readme.md delete mode 100644 dockerbuild/__init__.py delete mode 100644 dockerbuild/baseimage.py delete mode 100644 dockerbuild/constants.py delete mode 100644 dockerbuild/images.py delete mode 100644 dockerbuild/publish.py diff --git a/docker-compose.yml b/docker-compose.yml index 1f0540c..4a6e79a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,29 +3,37 @@ services: act_base: #root base of action build: vendor/act_base + image: h4ckermike/act_base poetry_base: # use poetry + image: h4ckermike/poetry_base build: context: vendor/python-poetry/build args: - OFFICIAL_PYTHON_IMAGE: act_base + BASE_IMAGE: h4ckermike/act_base POETRY_VERSION: 1.6.1 + entrypoint: /bin/bash + stdin_open: true # docker run -i + tty: true # docker run -t + depends_on: - act_base ai_ticket: # the ticket to unite + image: h4ckermike/ai_ticket build: context: . args: - BASE_IMAGE: act_base + BASE_IMAGE: h4ckermike/poetry_base depends_on: - poetry_base - basic_agent: #basic agnet + basic_agent: #basic agent + image: h4ckermike/basic_agent build: context: vendor/basic_agent/ args: - OFFICIAL_PYTHON_IMAGE: act_base + BASE_IMAGE: h4ckermike/ai_ticket depends_on: - ai_ticket @@ -35,10 +43,14 @@ services: environment: - GITHUB_PAT=${GITHUB_PAT} - GITHUB_REPO=${GITHUB_REPO} + image: h4ckermike/mockopenai build: context: vendor/lollms/ args: - OFFICIAL_PYTHON_IMAGE: localhost/ai-ticket_ai_ticket:latest + BASE_IMAGE: h4ckermike/ai_ticket + volumes: + - /var/run/docker.sock:/var/run/docker.sock + ports: - "5000:8080" @@ -59,8 +71,9 @@ services: context: vendor/Auto-GPT/ dockerfile: slim/Dockerfile args: - OFFICIAL_PYTHON_IMAGE: localhost/ai-ticket_ai_ticket:latest + BASE_IMAGE: h4ckermike/basic_agent depends_on: - basic_agent - mockopenai + image: h4ckermike/autogpt diff --git a/dockerbuild/.gitignore b/dockerbuild/.gitignore deleted file mode 100644 index bee8a64..0000000 --- a/dockerbuild/.gitignore +++ /dev/null @@ -1 +0,0 @@ -__pycache__ diff --git a/dockerbuild/Readme.md b/dockerbuild/Readme.md deleted file mode 100644 index 502580d..0000000 --- a/dockerbuild/Readme.md +++ /dev/null @@ -1 +0,0 @@ -taken from pfeiffermax/uvicorn-poetry diff --git a/dockerbuild/__init__.py b/dockerbuild/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/dockerbuild/baseimage.py b/dockerbuild/baseimage.py deleted file mode 100644 index fe9dac4..0000000 --- a/dockerbuild/baseimage.py +++ /dev/null @@ -1,71 +0,0 @@ -from pathlib import Path -from typing import Optional - -import docker -from docker.models.images import Image - -from dockerbuild.constants import ( - BASE_IMAGES, - APPLICATION_SERVER_PORT, -) - -# -BASE_IMAGE="nikolaik/python-nodejs:python3.10-nodejs20" - -class DockerImage: - def __init__( - self, - docker_client: docker.client, - target_architecture: str, - version: str, - ): - self.docker_client: docker.client = docker_client - self.dockerfile_name: str = "Dockerfile" - self.dockerfile_directory: Optional[Path] = None - self.image_name: Optional[str] = None - self.image_tag: Optional[str] = None - self.version: Optional[str] = version - self.target_architecture: str = target_architecture - - def build(self) -> Image: - self.image_tag: str = f"{self.version}-{self.target_architecture}" - buildargs: dict[str, str] = { - "BASE_IMAGE": BASE_IMAGE, - "APPLICATION_SERVER_PORT": APPLICATION_SERVER_PORT, } - image: Image = self.docker_client.images.build( - path=str(self.dockerfile_directory), - dockerfile=self.dockerfile_name, - tag=f"{self.image_name}:{self.image_tag}", - buildargs=buildargs,)[0] - return image - -class ExampleApplicationImage(DockerImage): - def build( - self, - target: str, - base_image_tag: str, - ) -> Image: - - self.image_tag = f"{self.version}-{self.target_architecture}" - - buildargs: dict[str, str] = { - "BASE_IMAGE": base_image_tag, - } - print("buildargs",dict( - args=buildargs, - path=str(self.dockerfile_directory), - dockerfile=self.dockerfile_name, - tag=f"{self.image_name}:{self.image_tag}", - #target=target, - buildargs=buildargs, - )) - - image: Image = self.docker_client.images.build( - - path=str(self.dockerfile_directory), - dockerfile=self.dockerfile_name, - tag=f"{self.image_name}:{self.image_tag}", - #target=target, - buildargs=buildargs, - )[0] - return image diff --git a/dockerbuild/constants.py b/dockerbuild/constants.py deleted file mode 100644 index 1f85ba6..0000000 --- a/dockerbuild/constants.py +++ /dev/null @@ -1,30 +0,0 @@ -TARGET_ARCHITECTURES: list[str] = [ - "python3.9.16-bullseye", - "python3.9.16-slim-bullseye", - "python3.10.10-bullseye", - "python3.10.10-slim-bullseye", -] -BASE_IMAGES: dict = { - TARGET_ARCHITECTURES[ - 0 - ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.9.16-bullseye@sha256:54037cfdca026b17e7a57664dff47bf04e7849074d3ab62271ecad0446ef0322", - TARGET_ARCHITECTURES[ - 1 - ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.9.16-slim-bullseye@sha256:c0b8d9c28c5717074c481dfdf1d8bd3aaa0b83a5e2a9e37c77be7af19d70d0ce", - TARGET_ARCHITECTURES[ - 2 - ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.10.10-bullseye@sha256:5a81c8c86132e504db2b7329f5e41cd32bddebf811d83a0d356edbca0d81135c", - TARGET_ARCHITECTURES[ - 3 - ]: "pfeiffermax/python-poetry:1.2.0-poetry1.4.1-python3.10.10-slim-bullseye@sha256:289c6beb568991811629c91cdcb3841ceb95bf0a017c3e411f4b71e18043ef15", -} -PYTHON_VERSIONS: dict = { - TARGET_ARCHITECTURES[0]: "3.9.16", - TARGET_ARCHITECTURES[1]: "3.9.16", - TARGET_ARCHITECTURES[2]: "3.10.10", - TARGET_ARCHITECTURES[3]: "3.10.10", -} - -# As we are running the server with an unprivileged user, we need to use -# a high port. -APPLICATION_SERVER_PORT: str = "8000" diff --git a/dockerbuild/images.py b/dockerbuild/images.py deleted file mode 100644 index 9dfdc72..0000000 --- a/dockerbuild/images.py +++ /dev/null @@ -1,57 +0,0 @@ -from dockerbuild.baseimage import ExampleApplicationImage - -import docker -from pathlib import Path -from docker.models.images import Image - -class AITicketPoetryImage(ExampleApplicationImage): - def __init__( - self, - docker_client: docker.client, - target_architecture: str, - version: str, - ): - super().__init__(docker_client, target_architecture, version) - # An image name is made up of slash-separated name components, - # optionally prefixed by a registry hostname. - # see: https://docs.docker.com/engine/reference/commandline/tag/ - self.image_name: str = "h4ckermike/ai-ticket" - self.dockerfile_directory: Path = Path(__file__).parent.parent.resolve() - - -class AutoGptImage(ExampleApplicationImage): - def __init__( - self, - docker_client: docker.client, - target_architecture: str, - version: str, - ): - super().__init__(docker_client, target_architecture, version) - self.image_name: str = "h4ckermike/act_base" - self.dockerfile_directory: Path = ( - Path(__file__).parent.parent.resolve() - / "vendor" - / "act_base" - ) -class ActBaseImage(ExampleApplicationImage): - def __init__( self, docker_client: docker.client, target_architecture: str, version: str, - ): - super().__init__(docker_client, target_architecture, version) - self.image_name: str = "h4ckermike/autogpt" - self.dockerfile_directory: Path = ( - Path(__file__).parent.parent.resolve() - / "vendor" - / "Auto-GPT/" - ) - -class OpenAIImage(ExampleApplicationImage): - def __init__( self, docker_client: docker.client,target_architecture: str,version: str ): - super().__init__(docker_client, target_architecture, version) - self.image_name: str = "h4ckermike/mockopenai" - self.dockerfile_directory: Path = ( - Path(__file__).parent.parent.resolve() - / "vendor" - / "lollms/" - ) - - diff --git a/dockerbuild/publish.py b/dockerbuild/publish.py deleted file mode 100644 index 714b77a..0000000 --- a/dockerbuild/publish.py +++ /dev/null @@ -1,79 +0,0 @@ -import click -import docker - -from docker.client import DockerClient - -from dockerbuild.constants import ( - TARGET_ARCHITECTURES, -) -import dockerbuild.images -from dockerbuild.baseimage import BASE_IMAGE - -@click.command() -@click.option( - "--docker-hub-username", - envvar="DOCKER_HUB_USERNAME", - help="Docker Hub username", -) -@click.option( - "--docker-hub-password", - envvar="DOCKER_HUB_PASSWORD", - help="Docker Hub password", -) -@click.option( - "--version-tag", envvar="GIT_TAG_NAME", required=True, help="Version Tag" -) -@click.option("--registry", envvar="REGISTRY", help="Docker registry") -def main( - docker_hub_username: str, - docker_hub_password: str, - version_tag: str, - registry: str, -) -> None: - docker_client: DockerClient = docker.from_env() - for target_architecture in TARGET_ARCHITECTURES: - #import pdb - #pdb.set_trace() - for x in dir(dockerbuild.images) : - cls = getattr(dockerbuild.images,x) - if not isinstance(cls,type): - continue - print(cls) - new_uvicorn_gunicorn_poetry_image = (cls(docker_client,target_architecture,version_tag)) - if new_uvicorn_gunicorn_poetry_image: - for old_image in docker_client.images.list(new_uvicorn_gunicorn_poetry_image.image_name): - for tag in old_image.tags: - print("tag",tag) - docker_client.images.remove(tag, force=True) - new_uvicorn_gunicorn_poetry_image.build( - target=target_architecture,base_image_tag=BASE_IMAGE - ) - if docker_hub_username and docker_hub_password: - login_kwargs: dict = { - "username": docker_hub_username, - "password": docker_hub_password, - } - if registry: - login_kwargs["registry"] = registry - docker_client.login(**login_kwargs) - if registry: - repository: str = ( - f"{registry}/{new_uvicorn_gunicorn_poetry_image.image_name}" - ) - else: - repository: str = new_uvicorn_gunicorn_poetry_image.image_name - - if False: - for line in docker_client.images.push( - repository, - tag=new_uvicorn_gunicorn_poetry_image.image_tag, - stream=True, - decode=True, - ): - print(line) - docker_client.close() - - -if __name__ == "__main__": - # pylint: disable=no-value-for-parameter - main() diff --git a/vendor/Auto-GPT b/vendor/Auto-GPT index 2c9f2ec..6026332 160000 --- a/vendor/Auto-GPT +++ b/vendor/Auto-GPT @@ -1 +1 @@ -Subproject commit 2c9f2ec8badcf331c74ed241a2df56abe4408cac +Subproject commit 6026332e9dbaea217a5db06b6a68e4174ae8c2e3 diff --git a/vendor/basic_agent b/vendor/basic_agent index ab60de9..6f68d72 160000 --- a/vendor/basic_agent +++ b/vendor/basic_agent @@ -1 +1 @@ -Subproject commit ab60de9662af21bb534ea946869c0c2ddb105e84 +Subproject commit 6f68d72bfdac569c053c03be796b1cdc34b666c5 diff --git a/vendor/lollms b/vendor/lollms index faf3084..bcbe8d6 160000 --- a/vendor/lollms +++ b/vendor/lollms @@ -1 +1 @@ -Subproject commit faf308442626da16ec165d157aeb19e0340883e5 +Subproject commit bcbe8d6de326e821dbbbd4227dffb3f001dcc696 diff --git a/vendor/python-poetry b/vendor/python-poetry index 060f378..af1c7a2 160000 --- a/vendor/python-poetry +++ b/vendor/python-poetry @@ -1 +1 @@ -Subproject commit 060f378c73140a999201ca3425ddb00a1b5bcaa2 +Subproject commit af1c7a2092b2fb3fca599c98c6a7e1d1866b38b4 From ab52edc3babdab720909d83ab8f3ca027fe63a44 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Wed, 11 Oct 2023 16:49:38 -0400 Subject: [PATCH 30/57] the docker controller this is the first step in being able to control the execution of the autogpt from outside. it calls to the mock api and that can influence the execution. --- docker-compose.yml | 7 ++++--- vendor/lollms | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 4a6e79a..08b5344 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,9 +12,9 @@ services: args: BASE_IMAGE: h4ckermike/act_base POETRY_VERSION: 1.6.1 - entrypoint: /bin/bash - stdin_open: true # docker run -i - tty: true # docker run -t + #entrypoint: /bin/bash + #stdin_open: true # docker run -i + #tty: true # docker run -t depends_on: - act_base @@ -50,6 +50,7 @@ services: BASE_IMAGE: h4ckermike/ai_ticket volumes: - /var/run/docker.sock:/var/run/docker.sock + - ./vendor/lollms/:/app/ #mount the source in for editing without reboot ports: - "5000:8080" diff --git a/vendor/lollms b/vendor/lollms index bcbe8d6..7a7a769 160000 --- a/vendor/lollms +++ b/vendor/lollms @@ -1 +1 @@ -Subproject commit bcbe8d6de326e821dbbbd4227dffb3f001dcc696 +Subproject commit 7a7a76938a2ec5e6ff31e1f3afdb11027b2a65e7 From 2fd15b44f69db0a3bf72f26b978e8faebd768246 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Wed, 11 Oct 2023 18:35:00 -0400 Subject: [PATCH 31/57] update submodules --- vendor/lollms | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/lollms b/vendor/lollms index 7a7a769..57a29f5 160000 --- a/vendor/lollms +++ b/vendor/lollms @@ -1 +1 @@ -Subproject commit 7a7a76938a2ec5e6ff31e1f3afdb11027b2a65e7 +Subproject commit 57a29f5141fbac2ebe6ce1fe691a150ca7cb08bf From 334194d5cd34b49a211e968dd2e9655d14112356 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Thu, 12 Oct 2023 06:31:35 -0400 Subject: [PATCH 32/57] update --- vendor/lollms | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/lollms b/vendor/lollms index 57a29f5..deb442a 160000 --- a/vendor/lollms +++ b/vendor/lollms @@ -1 +1 @@ -Subproject commit 57a29f5141fbac2ebe6ce1fe691a150ca7cb08bf +Subproject commit deb442ac93b2f6ae26b5b49b428e4211a2b6609f From e89d752064cf16a60dfba94d035db2617b93f471 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Thu, 12 Oct 2023 17:50:42 -0400 Subject: [PATCH 33/57] adding langchain --- docker-compose.yml | 1 + vendor/lollms | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 08b5344..6b51f6f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -43,6 +43,7 @@ services: environment: - GITHUB_PAT=${GITHUB_PAT} - GITHUB_REPO=${GITHUB_REPO} + - OPENAI_API_KEY=${OPENAI_API_KEY} image: h4ckermike/mockopenai build: context: vendor/lollms/ diff --git a/vendor/lollms b/vendor/lollms index deb442a..f1f08bf 160000 --- a/vendor/lollms +++ b/vendor/lollms @@ -1 +1 @@ -Subproject commit deb442ac93b2f6ae26b5b49b428e4211a2b6609f +Subproject commit f1f08bfc257c2eba200c9ac10f9eaedce42d5538 From cc6985c04d7534e21fa4f0c256db3edcd5c7b98a Mon Sep 17 00:00:00 2001 From: mike dupont Date: Fri, 13 Oct 2023 07:21:50 -0400 Subject: [PATCH 34/57] update bench --- vendor/Auto-GPT-Benchmarks | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/Auto-GPT-Benchmarks b/vendor/Auto-GPT-Benchmarks index 9c796db..0e0b876 160000 --- a/vendor/Auto-GPT-Benchmarks +++ b/vendor/Auto-GPT-Benchmarks @@ -1 +1 @@ -Subproject commit 9c796db834299d6f44c839c46386822b7782112f +Subproject commit 0e0b876fff4fca0e34d5b7924b579cd7a7b166e2 From 78d854ed124722f4a8ed5af7f1434b24ea5c555c Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sat, 14 Oct 2023 19:49:49 -0400 Subject: [PATCH 35/57] adding active submodule to the arena --- .gitmodules | 54 +++++++++++++++++++++++++++++++ vendor/Auto-GPT-Graph | 1 + vendor/Auto-GPT-Turbo | 1 + vendor/Auto-GPT-aj | 1 + vendor/Auto-GPT-chat-gpt-prompts | 1 + vendor/AutoGPT-trading | 1 + vendor/AutoGPTv2 | 1 + vendor/BillSchumacher | 1 + vendor/Godmode-GPT | 1 + vendor/Jarvis | 1 + vendor/MasonBoomPersonalAssistant | 1 + vendor/Multi-GPT | 1 + vendor/PolyGPT | 1 + vendor/ScottGPT | 1 + vendor/Swarms-Of-Sparta | 1 + vendor/TheAgency | 1 + vendor/beebot | 1 + vendor/mljar-agent | 1 + vendor/smol-developer | 1 + 19 files changed, 72 insertions(+) create mode 160000 vendor/Auto-GPT-Graph create mode 160000 vendor/Auto-GPT-Turbo create mode 160000 vendor/Auto-GPT-aj create mode 160000 vendor/Auto-GPT-chat-gpt-prompts create mode 160000 vendor/AutoGPT-trading create mode 160000 vendor/AutoGPTv2 create mode 160000 vendor/BillSchumacher create mode 160000 vendor/Godmode-GPT create mode 160000 vendor/Jarvis create mode 160000 vendor/MasonBoomPersonalAssistant create mode 160000 vendor/Multi-GPT create mode 160000 vendor/PolyGPT create mode 160000 vendor/ScottGPT create mode 160000 vendor/Swarms-Of-Sparta create mode 160000 vendor/TheAgency create mode 160000 vendor/beebot create mode 160000 vendor/mljar-agent create mode 160000 vendor/smol-developer diff --git a/.gitmodules b/.gitmodules index 40db044..e43a618 100644 --- a/.gitmodules +++ b/.gitmodules @@ -22,3 +22,57 @@ [submodule "vendor/basic_agent"] path = vendor/basic_agent url = https://github.com/meta-introspector/basic_agent +[submodule "vendor/TheAgency"] + path = vendor/TheAgency + url = https://github.com/shamantechnology/TheAgency +[submodule "vendor/mljar-agent"] + path = vendor/mljar-agent + url = https://github.com/pplonski/mljar-agent +[submodule "vendor/Swarms-Of-Sparta"] + path = vendor/Swarms-Of-Sparta + url = https://github.com/Agora-X/Swarms-Of-Sparta +[submodule "vendor/Jarvis"] + path = vendor/Jarvis + url = https://github.com/jankomisarczyk/Jarvis +[submodule "vendor/AutoGPT-trading"] + path = vendor/AutoGPT-trading + url = https://github.com/enachb/AutoGPT-trading.git +[submodule "vendor/ScottGPT"] + path = vendor/ScottGPT + url = https://github.com/scottmas/ScottGPT +[submodule "vendor/AutoGPTv2"] + path = vendor/AutoGPTv2 + url = https://github.com/taylor-ennen/AutoGPTv2 +[submodule "vendor/beebot"] + path = vendor/beebot + url = https://github.com/AutoPackAI/beebot.git +[submodule "vendor/Auto-GPT-Turbo"] + path = vendor/Auto-GPT-Turbo + url = https://github.com/lc0rp/Auto-GPT-Turbo.git +[submodule "vendor/smol-developer"] + path = vendor/smol-developer + url = https://github.com/e2b-dev/smol-developer.git +[submodule "vendor/Multi-GPT"] + path = vendor/Multi-GPT + url = https://github.com/sidhq/Multi-GPT +[submodule "vendor/Auto-GPT-aj"] + path = vendor/Auto-GPT-aj + url = https://github.com/kaqijiang/Auto-GPT-aj +[submodule "vendor/Godmode-GPT"] + path = vendor/Godmode-GPT + url = https://github.com/FOLLGAD/Godmode-GPT +[submodule "vendor/Auto-GPT-Graph"] + path = vendor/Auto-GPT-Graph + url = https://github.com/kairess/Auto-GPT-Graph +[submodule "vendor/BillSchumacher"] + path = vendor/BillSchumacher + url = https://github.com/BillSchumacher/Auto-GPT +[submodule "vendor/Auto-GPT-chat-gpt-prompts"] + path = vendor/Auto-GPT-chat-gpt-prompts + url = https://github.com/chatgpt-prompts/Auto-GPT-chat-gpt-prompts +[submodule "vendor/MasonBoomPersonalAssistant"] + path = vendor/MasonBoomPersonalAssistant + url = https://github.com/MasonBoom/AutoGPT.git +[submodule "vendor/PolyGPT"] + path = vendor/PolyGPT + url = https://github.com/polywrap/PolyGPT.git diff --git a/vendor/Auto-GPT-Graph b/vendor/Auto-GPT-Graph new file mode 160000 index 0000000..21a808c --- /dev/null +++ b/vendor/Auto-GPT-Graph @@ -0,0 +1 @@ +Subproject commit 21a808cb0887d752a4fe37cda507b8c67c8e4e73 diff --git a/vendor/Auto-GPT-Turbo b/vendor/Auto-GPT-Turbo new file mode 160000 index 0000000..911937c --- /dev/null +++ b/vendor/Auto-GPT-Turbo @@ -0,0 +1 @@ +Subproject commit 911937cb369eba494394f8d9564cc6ef853b6699 diff --git a/vendor/Auto-GPT-aj b/vendor/Auto-GPT-aj new file mode 160000 index 0000000..310a84c --- /dev/null +++ b/vendor/Auto-GPT-aj @@ -0,0 +1 @@ +Subproject commit 310a84c8ca0173c7db43f637866cf5677bd7ef50 diff --git a/vendor/Auto-GPT-chat-gpt-prompts b/vendor/Auto-GPT-chat-gpt-prompts new file mode 160000 index 0000000..f41febd --- /dev/null +++ b/vendor/Auto-GPT-chat-gpt-prompts @@ -0,0 +1 @@ +Subproject commit f41febd3ae9242961dc7e940916cd37255abf7f7 diff --git a/vendor/AutoGPT-trading b/vendor/AutoGPT-trading new file mode 160000 index 0000000..320823c --- /dev/null +++ b/vendor/AutoGPT-trading @@ -0,0 +1 @@ +Subproject commit 320823c26c755d23cba47bf18cea4467b3683d5b diff --git a/vendor/AutoGPTv2 b/vendor/AutoGPTv2 new file mode 160000 index 0000000..b4fccc1 --- /dev/null +++ b/vendor/AutoGPTv2 @@ -0,0 +1 @@ +Subproject commit b4fccc16715fb86b19fe9648eb5526fdf644663d diff --git a/vendor/BillSchumacher b/vendor/BillSchumacher new file mode 160000 index 0000000..bdd07b1 --- /dev/null +++ b/vendor/BillSchumacher @@ -0,0 +1 @@ +Subproject commit bdd07b18bea674cf756ebfb3a0a8915042d9126f diff --git a/vendor/Godmode-GPT b/vendor/Godmode-GPT new file mode 160000 index 0000000..255c18d --- /dev/null +++ b/vendor/Godmode-GPT @@ -0,0 +1 @@ +Subproject commit 255c18d5542160c428d21d3add73e73562b5d0f7 diff --git a/vendor/Jarvis b/vendor/Jarvis new file mode 160000 index 0000000..78b98a0 --- /dev/null +++ b/vendor/Jarvis @@ -0,0 +1 @@ +Subproject commit 78b98a018b4ea112665815bcd3b797dac76b35e5 diff --git a/vendor/MasonBoomPersonalAssistant b/vendor/MasonBoomPersonalAssistant new file mode 160000 index 0000000..fbc2c53 --- /dev/null +++ b/vendor/MasonBoomPersonalAssistant @@ -0,0 +1 @@ +Subproject commit fbc2c531396d59b9fded4f535cd15595cdbf4360 diff --git a/vendor/Multi-GPT b/vendor/Multi-GPT new file mode 160000 index 0000000..bdba61e --- /dev/null +++ b/vendor/Multi-GPT @@ -0,0 +1 @@ +Subproject commit bdba61e0e08541d9543dc70892b064bd11d5ca25 diff --git a/vendor/PolyGPT b/vendor/PolyGPT new file mode 160000 index 0000000..a39c7b7 --- /dev/null +++ b/vendor/PolyGPT @@ -0,0 +1 @@ +Subproject commit a39c7b7ca4aae17d27199a03401762d5a4657c9b diff --git a/vendor/ScottGPT b/vendor/ScottGPT new file mode 160000 index 0000000..4841d31 --- /dev/null +++ b/vendor/ScottGPT @@ -0,0 +1 @@ +Subproject commit 4841d31179870d9edbdc2fbd9c36062bd8efcaff diff --git a/vendor/Swarms-Of-Sparta b/vendor/Swarms-Of-Sparta new file mode 160000 index 0000000..a85b43d --- /dev/null +++ b/vendor/Swarms-Of-Sparta @@ -0,0 +1 @@ +Subproject commit a85b43d8cccd23bbdf26174056f1c333c02cdce5 diff --git a/vendor/TheAgency b/vendor/TheAgency new file mode 160000 index 0000000..4a775f2 --- /dev/null +++ b/vendor/TheAgency @@ -0,0 +1 @@ +Subproject commit 4a775f284850f8e10fca2b59d01d1e8b507cae92 diff --git a/vendor/beebot b/vendor/beebot new file mode 160000 index 0000000..e93e945 --- /dev/null +++ b/vendor/beebot @@ -0,0 +1 @@ +Subproject commit e93e9454037d97a353062975e6528dae498f0397 diff --git a/vendor/mljar-agent b/vendor/mljar-agent new file mode 160000 index 0000000..ba9b66a --- /dev/null +++ b/vendor/mljar-agent @@ -0,0 +1 @@ +Subproject commit ba9b66ab363ed7f442f61967acff6ce54a0dfadd diff --git a/vendor/smol-developer b/vendor/smol-developer new file mode 160000 index 0000000..80eb215 --- /dev/null +++ b/vendor/smol-developer @@ -0,0 +1 @@ +Subproject commit 80eb21595b1c095748341d0c463d70fe51ca94a1 From c8409a83215ff10e909103020282d60c0038a826 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sat, 14 Oct 2023 19:53:01 -0400 Subject: [PATCH 36/57] adding in more --- .gitmodules | 6 ++++++ vendor/SuperAGI | 1 + vendor/babyagi | 1 + 3 files changed, 8 insertions(+) create mode 160000 vendor/SuperAGI create mode 160000 vendor/babyagi diff --git a/.gitmodules b/.gitmodules index e43a618..f9c0e02 100644 --- a/.gitmodules +++ b/.gitmodules @@ -76,3 +76,9 @@ [submodule "vendor/PolyGPT"] path = vendor/PolyGPT url = https://github.com/polywrap/PolyGPT.git +[submodule "vendor/SuperAGI"] + path = vendor/SuperAGI + url = https://github.com/SilenNaihin/SuperAGI.git +[submodule "vendor/babyagi"] + path = vendor/babyagi + url = https://github.com/SilenNaihin/babyagi.git diff --git a/vendor/SuperAGI b/vendor/SuperAGI new file mode 160000 index 0000000..153721f --- /dev/null +++ b/vendor/SuperAGI @@ -0,0 +1 @@ +Subproject commit 153721fae1885e01b242c4eeed01e6a172bf8784 diff --git a/vendor/babyagi b/vendor/babyagi new file mode 160000 index 0000000..d10b08c --- /dev/null +++ b/vendor/babyagi @@ -0,0 +1 @@ +Subproject commit d10b08ceea3314ac86e3832c0a9045c663a73a7c From 4838e5af019cccd80b1be1ed43db8e7324c7ff00 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sun, 15 Oct 2023 09:23:17 -0400 Subject: [PATCH 37/57] first build --- docker-compose.yml | 104 +++++++++++++++++++++++++++++++++++++ vendor/Auto-GPT-Benchmarks | 2 +- vendor/lollms | 2 +- 3 files changed, 106 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 6b51f6f..2576374 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -79,3 +79,107 @@ services: - basic_agent - mockopenai image: h4ckermike/autogpt + +## + autogpt-turbo: #the beast + entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " + + + # uncomment thse next 3 lines for debugging + #entrypoint: /bin/bash + #stdin_open: true # docker run -i + #tty: true # docker run -t + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:8080/v1 + build: + context: vendor/Auto-GPT-Turbo/ + dockerfile: Dockerfile + args: + BASE_IMAGE: h4ckermike/basic_agent + #vendor/Auto-GPT-Turbo/Dockerfile + + # vendor/AutoGPTv2/autogpts/autogpt/.devcontainer/Dockerfile + # vend or/AutoGPTv2/autogpts/autogpt/Dockerfile + auto_honey_comber: + build: + context: vendor/AutoGPTv2/ + dockerfile: autogpts/AutoHoneyComber/Dockerfile + +# vendor/AutoGPTv2/autogpts/AutoHoneyComber/Dockerfile +# vendor/AutoGPTv2/autogpts/forge/Dockerfile + + auto-gpt-chat-gpt-prompts: + build: + context: vendor/AutoGPTv2/ + dockerfile: autogpts/AutoHoneyComber/Dockerfile + + # vendor/Auto-GPT-chat-gpt-prompts/.devcontainer/Dockerfile + # vendor/Auto-GPT-chat-gpt-prompts/Dockerfile + + Auto-GPT-aj: + build: + context: vendor/Auto-GPT-aj/ + #dockerfile: + + # vendor/Auto-GPT-aj/.devcontainer/Dockerfile + # vendor/Auto-GPT-aj/Dockerfile + +# vendor/babyagi/Dockerfile +# vendor/ScottGPT/autogpts/autogpt/.devcontainer/Dockerfile +# vendor/ScottGPT/autogpts/autogpt/Dockerfile +# vendor/ScottGPT/autogpts/forge/Dockerfile +# vendor/Auto-GPT-Turbo/.devcontainer/Dockerfile +# vendor/AutoGPT-trading/autogpts/autogpt/.devcontainer/Dockerfile +# vendor/AutoGPT-trading/autogpts/autogpt/Dockerfile +# vendor/AutoGPT-trading/autogpts/forge/Dockerfile +# vendor/AutoGPT-trading/autogpts/okx-trade/Dockerfile +# vendor/Jarvis/autogpts/autogpt/.devcontainer/Dockerfile +# vendor/Jarvis/autogpts/autogpt/Dockerfile +# vendor/Jarvis/autogpts/forge/Dockerfile +# vendor/TheAgency/autogpts/autogpt/.devcontainer/Dockerfile +# vendor/TheAgency/autogpts/autogpt/Dockerfile +# vendor/TheAgency/autogpts/ZEROAGPT_02/Dockerfile +# vendor/TheAgency/autogpts/ZEROAGPT_01/Dockerfile +# vendor/TheAgency/autogpts/forge/Dockerfile +# vendor/TheAgency/autogpts/ZEROAGPT_03/Dockerfile +# vendor/Godmode-GPT/.devcontainer/Dockerfile +# vendor/Godmode-GPT/Dockerfile +# vendor/basic_agent/Dockerfile +# vendor/SuperAGI/Dockerfile +# vendor/SuperAGI/gui/Dockerfile +# vendor/BillSchumacher/.devcontainer/Dockerfile +# vendor/BillSchumacher/Dockerfile +# vendor/Auto-GPT-Graph/Dockerfile +# vendor/MasonBoomPersonalAssistant/autogpts/autogpt/.devcontainer/Dockerfile +# vendor/MasonBoomPersonalAssistant/autogpts/autogpt/Dockerfile +# vendor/MasonBoomPersonalAssistant/autogpts/PersonalAssistant/Dockerfile +# vendor/MasonBoomPersonalAssistant/autogpts/forge/Dockerfile +# vendor/MasonBoomPersonalAssistant/autogpts/testgpt/Dockerfile +# vendor/Swarms-Of-Sparta/autogpts/autogpt/.devcontainer/Dockerfile +# vendor/Swarms-Of-Sparta/autogpts/autogpt/Dockerfile +# vendor/Swarms-Of-Sparta/autogpts/forge/Dockerfile +# vendor/mljar-agent/autogpts/autogpt/.devcontainer/Dockerfile +# vendor/mljar-agent/autogpts/autogpt/Dockerfile +# vendor/mljar-agent/autogpts/forge/Dockerfile +# vendor/python-poetry/build/Dockerfile +# vendor/docker-compose-viz/Dockerfile +# vendor/AutoGPTOfficial/autogpts/autogpt/.devcontainer/Dockerfile +# vendor/AutoGPTOfficial/autogpts/autogpt/Dockerfile +# vendor/AutoGPTOfficial/autogpts/forge/Dockerfile +# vendor/Multi-GPT/.devcontainer/Dockerfile +# vendor/Multi-GPT/Dockerfile +# vendor/Auto-GPT/Dockerfile +# vendor/Auto-GPT/slim/Dockerfile +# vendor/Auto-GPT/autogpts/autogpt/.devcontainer/Dockerfile +# vendor/Auto-GPT/autogpts/forge/Dockerfile + + args: + BASE_IMAGE: h4ckermike/basic_agent + + depends_on: + - basic_agent + - mockopenai + image: h4ckermike/autogpt diff --git a/vendor/Auto-GPT-Benchmarks b/vendor/Auto-GPT-Benchmarks index 0e0b876..9c796db 160000 --- a/vendor/Auto-GPT-Benchmarks +++ b/vendor/Auto-GPT-Benchmarks @@ -1 +1 @@ -Subproject commit 0e0b876fff4fca0e34d5b7924b579cd7a7b166e2 +Subproject commit 9c796db834299d6f44c839c46386822b7782112f diff --git a/vendor/lollms b/vendor/lollms index f1f08bf..37662c6 160000 --- a/vendor/lollms +++ b/vendor/lollms @@ -1 +1 @@ -Subproject commit f1f08bfc257c2eba200c9ac10f9eaedce42d5538 +Subproject commit 37662c6f100629f793ece304905d4fc3646fc2e7 From 33ec636cd4456f782db4f8ec18fc4032820a1c77 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sun, 15 Oct 2023 09:23:44 -0400 Subject: [PATCH 38/57] autogpt turbo starting to build --- BUILDING.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 BUILDING.md diff --git a/BUILDING.md b/BUILDING.md new file mode 100644 index 0000000..994b38d --- /dev/null +++ b/BUILDING.md @@ -0,0 +1,7 @@ +In order to get poetry to accept sources in /opt/ I use this trick locally outside of docker + +``` +sudo git clone https://github.com/meta-introspector/agent-protocol-sdk-python /opt/agent-protocol + poetry run pip install -e /opt/agent-protocol/ + poetry lock +``` From b88a42b664c151724b8926c467c58c32c8a477e3 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sun, 15 Oct 2023 09:28:53 -0400 Subject: [PATCH 39/57] build working with turbo --- vendor/Auto-GPT-Turbo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/Auto-GPT-Turbo b/vendor/Auto-GPT-Turbo index 911937c..c5cb379 160000 --- a/vendor/Auto-GPT-Turbo +++ b/vendor/Auto-GPT-Turbo @@ -1 +1 @@ -Subproject commit 911937cb369eba494394f8d9564cc6ef853b6699 +Subproject commit c5cb3797ddfc5c926e569a01383c24a7ec2789a1 From cb5c0c77b3995f6a05dc747716b7e5cb7f604bdd Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sun, 15 Oct 2023 09:33:36 -0400 Subject: [PATCH 40/57] updating submodules --- vendor/Auto-GPT | 2 +- vendor/basic_agent | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/vendor/Auto-GPT b/vendor/Auto-GPT index 6026332..78e4619 160000 --- a/vendor/Auto-GPT +++ b/vendor/Auto-GPT @@ -1 +1 @@ -Subproject commit 6026332e9dbaea217a5db06b6a68e4174ae8c2e3 +Subproject commit 78e4619587765a1a33f3fa3e468358bee72236a7 diff --git a/vendor/basic_agent b/vendor/basic_agent index 6f68d72..9ed6fac 160000 --- a/vendor/basic_agent +++ b/vendor/basic_agent @@ -1 +1 @@ -Subproject commit 6f68d72bfdac569c053c03be796b1cdc34b666c5 +Subproject commit 9ed6fac9acb5cbf4cbec937b337eb04d3377fb7b From 04d2ea5122c158c86a2c46b50919f5f4f9f8211e Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sun, 15 Oct 2023 11:52:20 -0400 Subject: [PATCH 41/57] parking changes port 5000 is being exposed not 8080 --- docker-compose.yml | 2 +- vendor/lollms | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 2576374..fe1eef7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -54,7 +54,7 @@ services: - ./vendor/lollms/:/app/ #mount the source in for editing without reboot ports: - - "5000:8080" + - "8080:5000" autogpt: #the beast entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " diff --git a/vendor/lollms b/vendor/lollms index 37662c6..deb442a 160000 --- a/vendor/lollms +++ b/vendor/lollms @@ -1 +1 @@ -Subproject commit 37662c6f100629f793ece304905d4fc3646fc2e7 +Subproject commit deb442ac93b2f6ae26b5b49b428e4211a2b6609f From ce4a693f9808de79d343f961022a07ae8c714c42 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sun, 15 Oct 2023 12:12:07 -0400 Subject: [PATCH 42/57] first one starting to work --- docker-compose.yml | 16 ++++++++++++---- vendor/Auto-GPT | 2 +- vendor/Auto-GPT-Benchmarks | 2 +- vendor/Auto-GPT-Turbo | 2 +- vendor/basic_agent | 2 +- vendor/lollms | 2 +- 6 files changed, 17 insertions(+), 9 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index fe1eef7..63e708b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -53,8 +53,8 @@ services: - /var/run/docker.sock:/var/run/docker.sock - ./vendor/lollms/:/app/ #mount the source in for editing without reboot - ports: - - "8080:5000" + #ports: + # - "8080:5000" #the app listens on mockopenai:5000 inside of docker autogpt: #the beast entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " @@ -81,7 +81,11 @@ services: image: h4ckermike/autogpt ## - autogpt-turbo: #the beast + autogpt-turbo: + #entrypoint: /bin/bash + stdin_open: true # docker run -i + tty: true # docker run -t + entrypoint: bash -c "poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' " @@ -93,7 +97,11 @@ services: - GITHUB_PAT="${GITHUB_PAT}" - GITHUB_REPO="jmikedupont2/ai-ticket" - OPENAI_API_KEY=your-openai-api-key - - OPENAI_API_BASE=http://mockopenai:8080/v1 + - OPENAI_API_BASE=http://mockopenai:5000/v1 + volumes: + # - /var/run/docker.sock:/var/run/docker.sock + - ./vendor/Auto-GPT-Turbo/:/app/ #mount the source in for editing without reboot + build: context: vendor/Auto-GPT-Turbo/ dockerfile: Dockerfile diff --git a/vendor/Auto-GPT b/vendor/Auto-GPT index 78e4619..6026332 160000 --- a/vendor/Auto-GPT +++ b/vendor/Auto-GPT @@ -1 +1 @@ -Subproject commit 78e4619587765a1a33f3fa3e468358bee72236a7 +Subproject commit 6026332e9dbaea217a5db06b6a68e4174ae8c2e3 diff --git a/vendor/Auto-GPT-Benchmarks b/vendor/Auto-GPT-Benchmarks index 9c796db..0e0b876 160000 --- a/vendor/Auto-GPT-Benchmarks +++ b/vendor/Auto-GPT-Benchmarks @@ -1 +1 @@ -Subproject commit 9c796db834299d6f44c839c46386822b7782112f +Subproject commit 0e0b876fff4fca0e34d5b7924b579cd7a7b166e2 diff --git a/vendor/Auto-GPT-Turbo b/vendor/Auto-GPT-Turbo index c5cb379..b974e29 160000 --- a/vendor/Auto-GPT-Turbo +++ b/vendor/Auto-GPT-Turbo @@ -1 +1 @@ -Subproject commit c5cb3797ddfc5c926e569a01383c24a7ec2789a1 +Subproject commit b974e297cb3d77e799323419b734458e0b75eaf9 diff --git a/vendor/basic_agent b/vendor/basic_agent index 9ed6fac..6f68d72 160000 --- a/vendor/basic_agent +++ b/vendor/basic_agent @@ -1 +1 @@ -Subproject commit 9ed6fac9acb5cbf4cbec937b337eb04d3377fb7b +Subproject commit 6f68d72bfdac569c053c03be796b1cdc34b666c5 diff --git a/vendor/lollms b/vendor/lollms index deb442a..f1f08bf 160000 --- a/vendor/lollms +++ b/vendor/lollms @@ -1 +1 @@ -Subproject commit deb442ac93b2f6ae26b5b49b428e4211a2b6609f +Subproject commit f1f08bfc257c2eba200c9ac10f9eaedce42d5538 From 8acf7e26894a7db952e8c7ffb45aa6ecb9a4b7c6 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sun, 15 Oct 2023 12:32:31 -0400 Subject: [PATCH 43/57] update sub branch --- vendor/Auto-GPT-Turbo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/Auto-GPT-Turbo b/vendor/Auto-GPT-Turbo index b974e29..d53bba6 160000 --- a/vendor/Auto-GPT-Turbo +++ b/vendor/Auto-GPT-Turbo @@ -1 +1 @@ -Subproject commit b974e297cb3d77e799323419b734458e0b75eaf9 +Subproject commit d53bba6f8ffbd236e8959350f0d028b31fc81325 From eae4192480f558967e909cb8a9e8a260239ca92d Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sun, 15 Oct 2023 14:11:08 -0400 Subject: [PATCH 44/57] docker compose working --- README.md | 7 +++++++ docker-compose.yml | 30 ++++++++++++++++++++++-------- vendor/AutoGPTv2 | 1 - 3 files changed, 29 insertions(+), 9 deletions(-) delete mode 160000 vendor/AutoGPTv2 diff --git a/README.md b/README.md index 43a8cfd..872eb6d 100644 --- a/README.md +++ b/README.md @@ -43,3 +43,10 @@ The docker images are organized like this : * act_base is the foundation of all actions. * poetry_base is contains the poetry magic layer with shared containers. + + +# RUnning + +`sudo docker-compose build` +`sudo docker-compose up mockopenai` +`sudo docker-compose run auto-gpt-chat-gpt-prompts ` diff --git a/docker-compose.yml b/docker-compose.yml index 63e708b..d350384 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -111,21 +111,35 @@ services: # vendor/AutoGPTv2/autogpts/autogpt/.devcontainer/Dockerfile # vend or/AutoGPTv2/autogpts/autogpt/Dockerfile - auto_honey_comber: - build: - context: vendor/AutoGPTv2/ - dockerfile: autogpts/AutoHoneyComber/Dockerfile + # auto_honey_comber: + # build: + # context: vendor/AutoGPTv2/ + # dockerfile: autogpts/AutoHoneyComber/Dockerfile + # args: + # BASE_IMAGE: h4ckermike/basic_agent + # volumes: + # - ./vendor/AutoGPTv2/autogpts/AutoHoneyComber/:/app/ #mount the source in for editing without reboot # vendor/AutoGPTv2/autogpts/AutoHoneyComber/Dockerfile # vendor/AutoGPTv2/autogpts/forge/Dockerfile auto-gpt-chat-gpt-prompts: - build: - context: vendor/AutoGPTv2/ - dockerfile: autogpts/AutoHoneyComber/Dockerfile + #--ai-settings ai_settings.yaml + entrypoint: bash -c "python -m autogpt -y --ai-settings /tmp/ai_settings.yaml " + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + build: + context: vendor/Auto-GPT-chat-gpt-prompts/ + #dockerfile: Dockerfile + volumes: + - ./vendor/Auto-GPT-chat-gpt-prompts/:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml # vendor/Auto-GPT-chat-gpt-prompts/.devcontainer/Dockerfile - # vendor/Auto-GPT-chat-gpt-prompts/Dockerfile + # Auto-GPT-aj: build: diff --git a/vendor/AutoGPTv2 b/vendor/AutoGPTv2 deleted file mode 160000 index b4fccc1..0000000 --- a/vendor/AutoGPTv2 +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b4fccc16715fb86b19fe9648eb5526fdf644663d From 6cf934d56b99ae2d2405cc6b3ae767f75b42439b Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sun, 15 Oct 2023 14:43:35 -0400 Subject: [PATCH 45/57] the aj version is working https://github.com/jmikedupont2/ai-ticket/issues/9#issuecomment-1763470592 --- docker-compose.yml | 44 ++++++++++++++++++++++++-------------------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index d350384..1027f4d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,10 +1,10 @@ version: '3' services: - + act_base: #root base of action build: vendor/act_base image: h4ckermike/act_base - + poetry_base: # use poetry image: h4ckermike/poetry_base build: @@ -18,7 +18,7 @@ services: depends_on: - act_base - + ai_ticket: # the ticket to unite image: h4ckermike/ai_ticket build: @@ -27,7 +27,7 @@ services: BASE_IMAGE: h4ckermike/poetry_base depends_on: - poetry_base - + basic_agent: #basic agent image: h4ckermike/basic_agent build: @@ -52,7 +52,6 @@ services: volumes: - /var/run/docker.sock:/var/run/docker.sock - ./vendor/lollms/:/app/ #mount the source in for editing without reboot - #ports: # - "8080:5000" #the app listens on mockopenai:5000 inside of docker @@ -108,7 +107,7 @@ services: args: BASE_IMAGE: h4ckermike/basic_agent #vendor/Auto-GPT-Turbo/Dockerfile - + # vendor/AutoGPTv2/autogpts/autogpt/.devcontainer/Dockerfile # vend or/AutoGPTv2/autogpts/autogpt/Dockerfile # auto_honey_comber: @@ -139,16 +138,21 @@ services: - ./vendor/Auto-GPT-chat-gpt-prompts/:/app/ #mount the source in for editing witho - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml # vendor/Auto-GPT-chat-gpt-prompts/.devcontainer/Dockerfile - # - - Auto-GPT-aj: + # + + auto-gpt-aj: + entrypoint: bash -c "python -m autogpt -y --ai-settings /tmp/ai_settings.yaml " + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 build: - context: vendor/Auto-GPT-aj/ - #dockerfile: + context: vendor/Auto-GPT-aj + volumes: + - ./vendor/Auto-GPT-aj/:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml - # vendor/Auto-GPT-aj/.devcontainer/Dockerfile - # vendor/Auto-GPT-aj/Dockerfile - # vendor/babyagi/Dockerfile # vendor/ScottGPT/autogpts/autogpt/.devcontainer/Dockerfile # vendor/ScottGPT/autogpts/autogpt/Dockerfile @@ -198,10 +202,10 @@ services: # vendor/Auto-GPT/autogpts/autogpt/.devcontainer/Dockerfile # vendor/Auto-GPT/autogpts/forge/Dockerfile - args: - BASE_IMAGE: h4ckermike/basic_agent +# args: +# BASE_IMAGE: h4ckermike/basic_agent - depends_on: - - basic_agent - - mockopenai - image: h4ckermike/autogpt +# depends_on: +# - basic_agent +# - mockopenai +# image: h4ckermike/autogpt From 82934d1d4b2623fdbde0adc1417b8940072fc92c Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sun, 15 Oct 2023 14:56:10 -0400 Subject: [PATCH 46/57] adding babyagi error: Creating ai-ticket_babyagi_run ... Traceback (most recent call last): File "/app/babyagi.py", line 14, in import chromadb File "/usr/local/lib/python3.11/site-packages/chromadb/__init__.py", line 1, in import chromadb.config File "/usr/local/lib/python3.11/site-packages/chromadb/config.py", line 1, in from pydantic import BaseSettings File "/usr/local/lib/python3.11/site-packages/pydantic/__init__.py", line 218, in __getattr__ return _getattr_migration(attr_name) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.11/site-packages/pydantic/_migration.py", line 294, in wrapper raise PydanticImportError( pydantic.errors.PydanticImportError: `BaseSettings` has been moved to the `pydantic-settings` package. See https://docs.pydantic.dev/2.4/migration/#basesettings-has-moved-to-pydantic-settings for more details. For further information visit https://errors.pydantic.dev/2.4/u/import-error ERROR: 1 --- docker-compose.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index 1027f4d..c572089 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -153,6 +153,19 @@ services: - ./vendor/Auto-GPT-aj/:/app/ #mount the source in for editing witho - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + babyagi: + entrypoint: bash -c "python babyagi.py -y --ai-settings /tmp/ai_settings.yaml " + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + build: + context: vendor/babyagi + volumes: + - ./vendor/babyagi/:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + # vendor/babyagi/Dockerfile # vendor/ScottGPT/autogpts/autogpt/.devcontainer/Dockerfile # vendor/ScottGPT/autogpts/autogpt/Dockerfile From c2c67adba2ed6f8f76287fcbbecfde46f1903f69 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sun, 15 Oct 2023 15:53:39 -0400 Subject: [PATCH 47/57] adding scott sudo docker-compose run scott-gpt WARNING: Found orphan containers (ai-ticket_auto_honey_comber_1, ai-ticket_Auto-GPT-aj_1) for this project. If you removed or renamed this service in your compose file, you can run this command with the --remove-orphans flag to clean it up. Creating ai-ticket_scott-gpt_run ... Path /benchmark for agbenchmark does not exist You do not have access to gpt-3.5-turbo-16k. Setting fast_llm to gpt-3.5-turbo. You do not have access to gpt-4-0314. Setting smart_llm to gpt-3.5-turbo. 2023-10-15 19:52:48,436 INFO Name : AutoGpt-Improvement-agent 2023-10-15 19:52:48,436 INFO Role : improve autogpt 2023-10-15 19:52:48,437 INFO Goals: ['improve independence from external apis', 'make multi platform'] 2023-10-15 19:52:48,437 INFO API Budget: $10.0 2023-10-15 19:52:48,437 INFO NOTE: All files/directories created by this agent can be found inside its workspace at: /app/auto_gpt_workspace 2023-10-15 19:52:48,437 INFO AutoGpt-Improvement-agent has been created with the following details: 2023-10-15 19:52:48,437 INFO Name : AutoGpt-Improvement-agent 2023-10-15 19:52:48,437 INFO Role : improve autogpt 2023-10-15 19:52:48,437 INFO Goals: 2023-10-15 19:52:48,437 INFO - improve independence from external apis 2023-10-15 19:52:48,437 INFO - make multi platform 2023-10-15 19:52:48,484 WARNING Could not load MemoryItems from file: Input is a zero-length, empty document: line 1 column 1 (char 0) 2023-10-15 19:52:48,501 INFO Configured Memory: JSONFileMemory 2023-10-15 19:52:48,501 INFO Configured Browser: chrome 2023-10-15 19:52:58,195 INFO message='OpenAI API response' path=http://mockopenai:5000/v1/chat/completions processing_ms=None request_id=None response_code=200 2023-10-15 19:52:58,229 INFO AUTOGPT-IMPROVEMENT-AGENT THOUGHTS: I encountered an issue with our application, and I need assistance. I've created a ticket for it. Here's the URL to the ticket: https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1763488431. My next action is to poll that URL for updates. 2023-10-15 19:52:58,229 INFO REASONING: todo 2023-10-15 19:52:58,229 INFO PLAN: 2023-10-15 19:52:58,230 INFO - Initiated a request for assistance. 2023-10-15 19:52:58,230 INFO CRITICISM: todo 2023-10-15 19:52:58,230 INFO SPEAK: Firstly, I will use the `web_search` command to research the best ways to improve AutoGPT's independence from external APIs. Then, I will use the `execute_python_code` command to implement the solutions to improve AutoGPT's independence from external APIs. Finally, I will use the `execute_python_code` command to create a multi-platform version of AutoGPT. My thoughts: To improve AutoGPT's independence from external APIs, I will research the best solutions and then implement them using the `execute_python_code` command. To create a multi-platform version of AutoGPT, I will also use the `execute_python_code` command. My plan: - Research best solutions for improving AutoGPT's independence from external APIs using the `web_search` command. - Implement solutions using the `execute_python_code` command. - Create a multi-platform version of AutoGPT using the `execute_python_code` command. My criticism: I should be more efficient in my use of the commands I have access to, and think of more creative ways to accomplish my goals. My summary: I will use the `web 2023-10-15 19:52:58,230 INFO NEXT ACTION: COMMAND = request_assistance ARGUMENTS = {'ticket_url': 'https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1763488431', 'next_action': 'poll_url'} 2023-10-15 19:52:58,230 INFO Enter 'y' to authorise command, 'y -N' to run N continuous commands, 'n' to exit program, or enter feedback for AutoGpt-Improvement-agent... Input: WARNING: your terminal doesn't support cursor position requests (CPR). 2023-10-15 19:53:07,480 INFO You interrupted AutoGPT 2023-10-15 19:53:07,480 INFO Quitting... mdupont@mdupont-G470:~/experiments/ai-ticket$ --- docker-compose.yml | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index c572089..60044bd 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -165,11 +165,25 @@ services: volumes: - ./vendor/babyagi/:/app/ #mount the source in for editing witho - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml - # vendor/babyagi/Dockerfile + # vendor/ScottGPT/autogpts/autogpt/.devcontainer/Dockerfile -# vendor/ScottGPT/autogpts/autogpt/Dockerfile +# # vendor/ScottGPT/autogpts/forge/Dockerfile + scott-gpt: + entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + #entrypoint: bash -c "ls -latr " + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + build: + context: vendor/ScottGPT/autogpts/autogpt/ + volumes: + - ./vendor/ScottGPT/autogpts/autogpt:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + # vendor/Auto-GPT-Turbo/.devcontainer/Dockerfile # vendor/AutoGPT-trading/autogpts/autogpt/.devcontainer/Dockerfile # vendor/AutoGPT-trading/autogpts/autogpt/Dockerfile From daa901b5aba007a7a79732781411cca383959210 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Sun, 15 Oct 2023 16:07:02 -0400 Subject: [PATCH 48/57] adding autogpt-trading sudo docker-compose run autogpt-trading WARNING: Found orphan containers (ai-ticket_Auto-GPT-aj_1, ai-ticket_auto_honey_comber_1) for this project. If you removed or renamed this service in your compose file, you can run this command with the --remove-orphans flag to clean it up. Creating ai-ticket_autogpt-trading_run ... Path /benchmark for agbenchmark does not exist plugins_config.yaml does not exist, creating base config. You do not have access to gpt-3.5-turbo-16k. Setting fast_llm to gpt-3.5-turbo. You do not have access to gpt-4-0314. Setting smart_llm to gpt-3.5-turbo. 2023-10-15 20:05:54,094 INFO Name : AutoGpt-Improvement-agent 2023-10-15 20:05:54,094 INFO Role : improve autogpt 2023-10-15 20:05:54,094 INFO Goals: ['improve independence from external apis', 'make multi platform'] 2023-10-15 20:05:54,094 INFO API Budget: $10.0 2023-10-15 20:05:54,094 INFO NOTE: All files/directories created by this agent can be found inside its workspace at: /app/auto_gpt_workspace 2023-10-15 20:05:54,094 INFO AutoGpt-Improvement-agent has been created with the following details: 2023-10-15 20:05:54,094 INFO Name : AutoGpt-Improvement-agent 2023-10-15 20:05:54,094 INFO Role : improve autogpt 2023-10-15 20:05:54,094 INFO Goals: 2023-10-15 20:05:54,094 INFO - improve independence from external apis 2023-10-15 20:05:54,094 INFO - make multi platform 2023-10-15 20:05:54,094 WARNING Could not load MemoryItems from file: Input is a zero-length, empty document: line 1 column 1 (char 0) 2023-10-15 20:05:54,126 INFO Configured Memory: JSONFileMemory 2023-10-15 20:05:54,126 INFO Configured Browser: chrome 2023-10-15 20:06:04,395 INFO message='OpenAI API response' path=http://mockopenai:5000/v1/chat/completions processing_ms=None request_id=None response_code=200 2023-10-15 20:06:04,401 INFO AUTOGPT-IMPROVEMENT-AGENT THOUGHTS: I encountered an issue with our application, and I need assistance. I've created a ticket for it. Here's the URL to the ticket: https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1763491468. My next action is to poll that URL for updates. 2023-10-15 20:06:04,401 INFO REASONING: todo 2023-10-15 20:06:04,401 INFO PLAN: 2023-10-15 20:06:04,401 INFO - Initiated a request for assistance. 2023-10-15 20:06:04,402 INFO CRITICISM: todo 2023-10-15 20:06:04,402 INFO SPEAK: First, we must improve independence from external APIs. We can do this by writing our own code in Python to interact with the APIs, or by utilizing open source code. We can use the command "execute_python_code" to write our own code, or "execute_python_file" to access and use open source Python files. Next, we must make AutoGPT multi-platform. To do this, we need to write code that will allow us to interact with different operating systems and devices. We can use the command "execute_python_code" to write our own code, or "execute_python_file" to access and use open source Python files. To summarize, our next command should be "execute_python_code" or "execute_python_file", depending on what code we need to write or utilize. { "thoughts": { "text": "I think we should use the command 'execute_python_code' or 'execute_python_file' depending on what code we need to write or utilize.", "reasoning": "This will allow us to write our own code to interact with APIs, and utilize open source code to interact with 2023-10-15 20:06:04,402 INFO NEXT ACTION: COMMAND = request_assistance ARGUMENTS = {'ticket_url': 'https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1763491468', 'next_action': 'poll_url'} 2023-10-15 20:06:04,402 INFO Enter 'y' to authorise command, 'y -N' to run N continuous commands, 'n' to exit program, or enter feedback for AutoGpt-Improvement-agent... Input: WARNING: your terminal doesn't support cursor position requests (CPR). 2023-10-15 20:06:30,651 INFO You interrupted AutoGPT 2023-10-15 20:06:30,651 INFO Quitting... mdupont@mdupont-G470:~/experiments/ai-ticket$ --- docker-compose.yml | 63 ++++++++++++++++++++++++++++------------------ 1 file changed, 38 insertions(+), 25 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 60044bd..fdca2e8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -184,50 +184,63 @@ services: - ./vendor/ScottGPT/autogpts/autogpt:/app/ #mount the source in for editing witho - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml -# vendor/Auto-GPT-Turbo/.devcontainer/Dockerfile -# vendor/AutoGPT-trading/autogpts/autogpt/.devcontainer/Dockerfile # vendor/AutoGPT-trading/autogpts/autogpt/Dockerfile -# vendor/AutoGPT-trading/autogpts/forge/Dockerfile + + + autogpt-trading: + entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + build: + context: vendor/AutoGPT-trading/autogpts/autogpt/ + volumes: + - ./vendor/AutoGPT-trading/autogpts/autogpt:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + # vendor/AutoGPT-trading/autogpts/okx-trade/Dockerfile -# vendor/Jarvis/autogpts/autogpt/.devcontainer/Dockerfile + + # vendor/Jarvis/autogpts/autogpt/Dockerfile -# vendor/Jarvis/autogpts/forge/Dockerfile -# vendor/TheAgency/autogpts/autogpt/.devcontainer/Dockerfile + + # vendor/TheAgency/autogpts/autogpt/Dockerfile # vendor/TheAgency/autogpts/ZEROAGPT_02/Dockerfile # vendor/TheAgency/autogpts/ZEROAGPT_01/Dockerfile # vendor/TheAgency/autogpts/forge/Dockerfile # vendor/TheAgency/autogpts/ZEROAGPT_03/Dockerfile -# vendor/Godmode-GPT/.devcontainer/Dockerfile + # vendor/Godmode-GPT/Dockerfile -# vendor/basic_agent/Dockerfile + # vendor/SuperAGI/Dockerfile -# vendor/SuperAGI/gui/Dockerfile -# vendor/BillSchumacher/.devcontainer/Dockerfile + + # vendor/BillSchumacher/Dockerfile + # vendor/Auto-GPT-Graph/Dockerfile -# vendor/MasonBoomPersonalAssistant/autogpts/autogpt/.devcontainer/Dockerfile + + # vendor/MasonBoomPersonalAssistant/autogpts/autogpt/Dockerfile # vendor/MasonBoomPersonalAssistant/autogpts/PersonalAssistant/Dockerfile -# vendor/MasonBoomPersonalAssistant/autogpts/forge/Dockerfile + + # vendor/MasonBoomPersonalAssistant/autogpts/testgpt/Dockerfile -# vendor/Swarms-Of-Sparta/autogpts/autogpt/.devcontainer/Dockerfile + # vendor/Swarms-Of-Sparta/autogpts/autogpt/Dockerfile -# vendor/Swarms-Of-Sparta/autogpts/forge/Dockerfile -# vendor/mljar-agent/autogpts/autogpt/.devcontainer/Dockerfile + + # vendor/mljar-agent/autogpts/autogpt/Dockerfile -# vendor/mljar-agent/autogpts/forge/Dockerfile -# vendor/python-poetry/build/Dockerfile -# vendor/docker-compose-viz/Dockerfile -# vendor/AutoGPTOfficial/autogpts/autogpt/.devcontainer/Dockerfile + + + + # vendor/AutoGPTOfficial/autogpts/autogpt/Dockerfile -# vendor/AutoGPTOfficial/autogpts/forge/Dockerfile -# vendor/Multi-GPT/.devcontainer/Dockerfile + + # vendor/Multi-GPT/Dockerfile -# vendor/Auto-GPT/Dockerfile -# vendor/Auto-GPT/slim/Dockerfile -# vendor/Auto-GPT/autogpts/autogpt/.devcontainer/Dockerfile -# vendor/Auto-GPT/autogpts/forge/Dockerfile + # args: # BASE_IMAGE: h4ckermike/basic_agent From 7df95f5a3a35d0c6caec99889e2c828d819d777a Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 16 Oct 2023 12:20:25 -0400 Subject: [PATCH 49/57] adding polygpt-alpha and sharing templates --- .gitmodules | 3 + docker-compose.yml | 164 ++++++++++++++++++++++++++++++++----------- template.sh | 16 +++++ template.yml | 12 ++++ vendor/PolyGPT-alpha | 1 + 5 files changed, 156 insertions(+), 40 deletions(-) create mode 100644 template.sh create mode 100644 template.yml create mode 160000 vendor/PolyGPT-alpha diff --git a/.gitmodules b/.gitmodules index f9c0e02..44c54cb 100644 --- a/.gitmodules +++ b/.gitmodules @@ -82,3 +82,6 @@ [submodule "vendor/babyagi"] path = vendor/babyagi url = https://github.com/SilenNaihin/babyagi.git +[submodule "vendor/PolyGPT-alpha"] + path = vendor/PolyGPT-alpha + url = https://github.com/team-tonic-arena-hacks/PolyGPT-alpha diff --git a/docker-compose.yml b/docker-compose.yml index fdca2e8..dd3f89b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -184,10 +184,76 @@ services: - ./vendor/ScottGPT/autogpts/autogpt:/app/ #mount the source in for editing witho - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml -# vendor/AutoGPT-trading/autogpts/autogpt/Dockerfile + jarvis: + entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + build: + context: vendor/Jarvis/autogpts/autogpt + volumes: + - ./vendor/Jarvis/autogpts/autogpt/autogpts/autogpt:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + the_agency_1: + entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + build: + context: vendor/TheAgency/autogpts/autogpt/ + volumes: + - ./vendor/TheAgency/autogpts/autogpt/:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + the_agency_2: + #entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + entrypoint: bash + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + build: + context: vendor/TheAgency/ + dockerfile : autogpts/ZEROAGPT_02/Dockerfile + volumes: + - ./vendor/TheAgency/benchmark:/benchmark + - ./vendor/TheAgency/autogpts/ZEROAGPT_02/:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + the_agency_3: + entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + build: + #context: vendor/TheAgency/autogpts/ZEROAGPT_01/ + context: vendor/TheAgency/ + dockerfile : autogpts/ZEROAGPT_01/Dockerfile + volumes: + - ./vendor/TheAgency/autogpts/ZEROAGPT_01/:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + the_agency_4: + entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + build: + #context: vendor/TheAgency/autogpts/ZEROAGPT_03/ + context: vendor/TheAgency/ + dockerfile : autogpts/ZEROAGPT_03/Dockerfile - autogpt-trading: + volumes: + - ./vendor/TheAgency/autogpts/ZEROAGPT_03/:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + mason_boom: entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " environment: - GITHUB_PAT="${GITHUB_PAT}" @@ -195,57 +261,75 @@ services: - OPENAI_API_KEY=your-openai-api-key - OPENAI_API_BASE=http://mockopenai:5000/v1 build: - context: vendor/AutoGPT-trading/autogpts/autogpt/ + context: vendor/MasonBoomPersonalAssistant/autogpts/autogpt/ volumes: - - ./vendor/AutoGPT-trading/autogpts/autogpt:/app/ #mount the source in for editing witho + - ./vendor/MasonBoomPersonalAssistant/autogpts/autogpt/:/app/ #mount the source in for editixong witho - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + mason_boom_pa: + entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + build: + context: vendor/MasonBoomPersonalAssistant/ + dockerfile: autogpts/PersonalAssistant/Dockerfile + volumes: + - ./vendor/MasonBoomPersonalAssistant/autogpts/PersonalAssistant/:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + mason_boom_testgpt: + entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + build: + context: vendor/MasonBoomPersonalAssistant/ + dockerfile: autogpts/PersonalAssistant/Dockerfile + volumes: + - ./vendor/MasonBoomPersonalAssistant/autogpts/testgpt/:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + swarms_of_sparta: + entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + build: + context: vendor/Swarms-Of-Sparta/autogpts/autogpt/ + volumes: + - ./vendor/Swarms-Of-Sparta/autogpts/autogpt/:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + mljar: + entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + build: + context: vendor/mljar-agent/autogpts/autogpt/ + volumes: + - ./vendor/mljar-agent/autogpts/autogpt/:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml +# vendor/AutoGPT-trading/autogpts/autogpt/Dockerfile -# vendor/AutoGPT-trading/autogpts/okx-trade/Dockerfile -# vendor/Jarvis/autogpts/autogpt/Dockerfile -# vendor/TheAgency/autogpts/autogpt/Dockerfile -# vendor/TheAgency/autogpts/ZEROAGPT_02/Dockerfile -# vendor/TheAgency/autogpts/ZEROAGPT_01/Dockerfile -# vendor/TheAgency/autogpts/forge/Dockerfile -# vendor/TheAgency/autogpts/ZEROAGPT_03/Dockerfile # vendor/Godmode-GPT/Dockerfile - # vendor/SuperAGI/Dockerfile - - # vendor/BillSchumacher/Dockerfile - # vendor/Auto-GPT-Graph/Dockerfile - - -# vendor/MasonBoomPersonalAssistant/autogpts/autogpt/Dockerfile -# vendor/MasonBoomPersonalAssistant/autogpts/PersonalAssistant/Dockerfile - - -# vendor/MasonBoomPersonalAssistant/autogpts/testgpt/Dockerfile - -# vendor/Swarms-Of-Sparta/autogpts/autogpt/Dockerfile - - -# vendor/mljar-agent/autogpts/autogpt/Dockerfile - - - - -# vendor/AutoGPTOfficial/autogpts/autogpt/Dockerfile - - # vendor/Multi-GPT/Dockerfile -# args: -# BASE_IMAGE: h4ckermike/basic_agent -# depends_on: -# - basic_agent -# - mockopenai -# image: h4ckermike/autogpt +#poetry run agent start forge --setup +#poetry run agbenchmark --mock +#poetry run agbenchmark --test=WriteFile diff --git a/template.sh b/template.sh new file mode 100644 index 0000000..5dcd2ff --- /dev/null +++ b/template.sh @@ -0,0 +1,16 @@ +standard () { + local servicename="$1" + local dirname="$2" + cat template.yml | sed -e"s;SERVICENAME;${servicename};g" | sed -e"s;DIRNAME;${dirname};g" +} + +standard jarvis vendor/Jarvis/autogpts/autogpt/ +standard the_agency_1 vendor/TheAgency/autogpts/autogpt/Dockerfile +standard the_agency_2 vendor/TheAgency/autogpts/ZEROAGPT_02/Dockerfile +standard the_agency_3 vendor/TheAgency/autogpts/ZEROAGPT_01/Dockerfile +standard the_agency_4 vendor/TheAgency/autogpts/ZEROAGPT_03/Dockerfile +standard mason_boom vendor/MasonBoomPersonalAssistant/autogpts/autogpt/Dockerfile +standard mason_boom_pa vendor/MasonBoomPersonalAssistant/autogpts/PersonalAssistant/Dockerfile +standard mason_boom_testgpt vendor/MasonBoomPersonalAssistant/autogpts/testgpt/Dockerfile +standard swarms_of_sparta vendor/Swarms-Of-Sparta/autogpts/autogpt/Dockerfile +standard mljar vendor/mljar-agent/autogpts/autogpt/Dockerfile diff --git a/template.yml b/template.yml new file mode 100644 index 0000000..5484ced --- /dev/null +++ b/template.yml @@ -0,0 +1,12 @@ + SERVICENAME: + entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + build: + context: vendor/DIRNAME/autogpts/autogpt/ + volumes: + - ./vendor/DIRNAME/autogpts/autogpt:/app/ #mount the source in for editing witho + - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml diff --git a/vendor/PolyGPT-alpha b/vendor/PolyGPT-alpha new file mode 160000 index 0000000..a1c6529 --- /dev/null +++ b/vendor/PolyGPT-alpha @@ -0,0 +1 @@ +Subproject commit a1c6529e99b39d5823fef97c5b0447fb02fada30 From e458fdec38c683482744dedc5fe3dc70ef0b7e89 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 16 Oct 2023 16:23:52 -0400 Subject: [PATCH 50/57] starting to run the agency more work to do --- .gitmodules | 3 +++ docker-compose.yml | 14 +++++++++++--- vendor/RedAGPT | 1 + vendor/TheAgency | 2 +- 4 files changed, 16 insertions(+), 4 deletions(-) create mode 160000 vendor/RedAGPT diff --git a/.gitmodules b/.gitmodules index 44c54cb..cb6f8d6 100644 --- a/.gitmodules +++ b/.gitmodules @@ -85,3 +85,6 @@ [submodule "vendor/PolyGPT-alpha"] path = vendor/PolyGPT-alpha url = https://github.com/team-tonic-arena-hacks/PolyGPT-alpha +[submodule "vendor/RedAGPT"] + path = vendor/RedAGPT + url = https://github.com/shamantechnology/RedAGPT diff --git a/docker-compose.yml b/docker-compose.yml index dd3f89b..1752ae4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,11 @@ version: '3' services: + arena: + depends_on: + - mockopenai + - the_agency_1 + build: + context : arena act_base: #root base of action build: vendor/act_base @@ -197,17 +203,19 @@ services: - ./vendor/Jarvis/autogpts/autogpt/autogpts/autogpt:/app/ #mount the source in for editing witho - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml the_agency_1: - entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + entrypoint: bash -c "poetry install && poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " environment: - GITHUB_PAT="${GITHUB_PAT}" - GITHUB_REPO="jmikedupont2/ai-ticket" - OPENAI_API_KEY=your-openai-api-key - OPENAI_API_BASE=http://mockopenai:5000/v1 build: - context: vendor/TheAgency/autogpts/autogpt/ + context: vendor/TheAgency/ + dockerfile: autogpts/autogpt/Dockerfile volumes: + - ./vendor/TheAgency/benchmark:/benchmark - ./vendor/TheAgency/autogpts/autogpt/:/app/ #mount the source in for editing witho - - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + - ./vendor/Auto-GPT/autogpts/autogpt/the_agency_ai_settings.yaml:/tmp/ai_settings.yaml the_agency_2: #entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " entrypoint: bash diff --git a/vendor/RedAGPT b/vendor/RedAGPT new file mode 160000 index 0000000..04ff578 --- /dev/null +++ b/vendor/RedAGPT @@ -0,0 +1 @@ +Subproject commit 04ff5782cea5c1758f68e30a40627944b6bfaa37 diff --git a/vendor/TheAgency b/vendor/TheAgency index 4a775f2..8f4b05c 160000 --- a/vendor/TheAgency +++ b/vendor/TheAgency @@ -1 +1 @@ -Subproject commit 4a775f284850f8e10fca2b59d01d1e8b507cae92 +Subproject commit 8f4b05cf20bf5d0148ad382dc8e6035714c37651 From 134edfc9a0a41494db4d5f6a57393d84de2a5c09 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 16 Oct 2023 17:18:44 -0400 Subject: [PATCH 51/57] now jarvi is running --- docker-compose.yml | 11 ++++++++--- pyproject.toml | 4 ++-- vendor/Jarvis | 2 +- vendor/lollms | 2 +- 4 files changed, 12 insertions(+), 7 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 1752ae4..4837e8d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -192,16 +192,21 @@ services: jarvis: entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + #entrypoint: bash environment: - GITHUB_PAT="${GITHUB_PAT}" - GITHUB_REPO="jmikedupont2/ai-ticket" - OPENAI_API_KEY=your-openai-api-key - OPENAI_API_BASE=http://mockopenai:5000/v1 build: - context: vendor/Jarvis/autogpts/autogpt + context: vendor/Jarvis/ + dockerfile: autogpts/autogpt/Dockerfile + args: + BASE_IMAGE: h4ckermike/basic_agent volumes: - - ./vendor/Jarvis/autogpts/autogpt/autogpts/autogpt:/app/ #mount the source in for editing witho - - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + - ./vendor/Jarvis/benchmark:/benchmark + - ./vendor/Jarvis/autogpts/autogpt//app/ #mount the source in for editing witho + - ./tests/ai_settings.yaml:/tmp/ai_settings.yaml the_agency_1: entrypoint: bash -c "poetry install && poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " environment: diff --git a/pyproject.toml b/pyproject.toml index 2158d50..0f95052 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,6 @@ requests = "2.31.0" black = "23.7.0" pre-commit = "3.3.3" semver = "3.0.1" -pylint = "2.17.5" -testcontainers = "3.7.1" +#pylint = "2.17.5" +#testcontainers = "3.7.1" diff --git a/vendor/Jarvis b/vendor/Jarvis index 78b98a0..3dd1e73 160000 --- a/vendor/Jarvis +++ b/vendor/Jarvis @@ -1 +1 @@ -Subproject commit 78b98a018b4ea112665815bcd3b797dac76b35e5 +Subproject commit 3dd1e73c682ac2a584363eefe8ec4de481d44f90 diff --git a/vendor/lollms b/vendor/lollms index f1f08bf..d82a1e9 160000 --- a/vendor/lollms +++ b/vendor/lollms @@ -1 +1 @@ -Subproject commit f1f08bfc257c2eba200c9ac10f9eaedce42d5538 +Subproject commit d82a1e9ad29fc54a693f7e818553e5c3848fe48a From 8245873a662a15cdbfccfc5ca1c5448cad74ae08 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 16 Oct 2023 17:25:19 -0400 Subject: [PATCH 52/57] adding the run log --- arena/Dockerfile | 1 + logs/full_run.txt | 1363 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 1364 insertions(+) create mode 100644 arena/Dockerfile create mode 100644 logs/full_run.txt diff --git a/arena/Dockerfile b/arena/Dockerfile new file mode 100644 index 0000000..10e22f5 --- /dev/null +++ b/arena/Dockerfile @@ -0,0 +1 @@ +from debian \ No newline at end of file diff --git a/logs/full_run.txt b/logs/full_run.txt new file mode 100644 index 0000000..d5861dc --- /dev/null +++ b/logs/full_run.txt @@ -0,0 +1,1363 @@ + +mdupont@mdupont-G470:~/experiments/ai-ticket$ sudo docker-compose up +Creating ai-ticket_jarvis_1 ... +Creating ai-ticket_auto-gpt-aj_1 ... +Creating ai-ticket_the_agency_4_1 ... +Starting ai-ticket_act_base_1 ... +Creating ai-ticket_scott-gpt_1 ... +Creating ai-ticket_swarms_of_sparta_1 ... +Creating ai-ticket_auto-gpt-chat-gpt-prompts_1 ... +Creating ai-ticket_mason_boom_1 ... +Creating ai-ticket_the_agency_2_1 ... +Creating ai-ticket_mason_boom_testgpt_1 ... +Creating ai-ticket_the_agency_3_1 ... +Creating ai-ticket_mljar_1 ... +Creating ai-ticket_the_agency_1_1 ... +Creating ai-ticket_mason_boom_pa_1 ... +Creating ai-ticket_babyagi_1 ... +Creating ai-ticket_autogpt-turbo_1 ... +Starting ai-ticket_poetry_base_1 ... +Starting ai-ticket_ai_ticket_1 ... +ai-ticket_mockopenai_1 is up-to-date +Creating ai-ticket_basic_agent_1 ... +Creating ai-ticket_arena_1 ... +Creating ai-ticket_autogpt_1 ... +Attaching to ai-ticket_act_base_1, ai-ticket_mason_boom_pa_1, ai-ticket_mason_boom_1, ai-ticket_poetry_base_1, ai-ticket_auto-gpt-chat-gpt-prompts_1, ai-ticket_mason_boom_testgpt_1, ai-ticket_the_agency_4_1, ai-ticket_babyagi_1, ai-ticket_the_agency_3_1, ai-ticket_scott-gpt_1, ai-ticket_autogpt-turbo_1, ai-ticket_the_agency_2_1, ai-ticket_auto-gpt-aj_1, ai-ticket_mljar_1, ai-ticket_swarms_of_sparta_1, ai-ticket_jarvis_1, ai-ticket_the_agency_1_1, ai-ticket_ai_ticket_1, ai-ticket_mockopenai_1, ai-ticket_basic_agent_1, ai-ticket_arena_1, ai-ticket_autogpt_1 +ai-ticket_act_base_1 exited with code 0 +auto-gpt-aj_1 | Warning: 文件 'auto-gpt.json' 不存在. 本地内存不会保存到文件中。. +autogpt-turbo_1 | Processing /opt/ai-ticket +autogpt-turbo_1 | Installing build dependencies ... done +autogpt-turbo_1 | Getting requirements to build wheel ... done +autogpt-turbo_1 | Preparing metadata (pyproject.toml) ... done +autogpt-turbo_1 | Requirement already satisfied: click==8.1.7 in /application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages (from ai-ticket==0.0.1) (8.1.7) +autogpt-turbo_1 | Requirement already satisfied: docker==6.1.3 in /application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages (from ai-ticket==0.0.1) (6.1.3) +autogpt-turbo_1 | Requirement already satisfied: packaging>=14.0 in /application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages (from docker==6.1.3->ai-ticket==0.0.1) (23.2) +autogpt-turbo_1 | Requirement already satisfied: requests>=2.26.0 in /application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages (from docker==6.1.3->ai-ticket==0.0.1) (2.31.0) +autogpt-turbo_1 | Requirement already satisfied: urllib3>=1.26.0 in /application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages (from docker==6.1.3->ai-ticket==0.0.1) (2.0.6) +autogpt-turbo_1 | Requirement already satisfied: websocket-client>=0.32.0 in /application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages (from docker==6.1.3->ai-ticket==0.0.1) (1.6.4) +autogpt-turbo_1 | Requirement already satisfied: charset-normalizer<4,>=2 in /application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages (from requests>=2.26.0->docker==6.1.3->ai-ticket==0.0.1) (3.3.0) +autogpt-turbo_1 | Requirement already satisfied: idna<4,>=2.5 in /application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages (from requests>=2.26.0->docker==6.1.3->ai-ticket==0.0.1) (3.4) +autogpt-turbo_1 | Requirement already satisfied: certifi>=2017.4.17 in /application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages (from requests>=2.26.0->docker==6.1.3->ai-ticket==0.0.1) (2023.7.22) +autogpt-turbo_1 | Building wheels for collected packages: ai-ticket +autogpt-turbo_1 | Building wheel for ai-ticket (pyproject.toml) ... done +autogpt-turbo_1 | Created wheel for ai-ticket: filename=ai_ticket-0.0.1-py2.py3-none-any.whl size=4023 sha256=280c2fa782a7fc144d93fac8a4bed8281ae96a8650f476858f3894f6edd92120 +autogpt-turbo_1 | Stored in directory: /tmp/pip-ephem-wheel-cache-yi_izlhv/wheels/26/f5/fb/02668600461e9b9779ab4bb937d27f484c2fa9e06e9ea5689a +autogpt-turbo_1 | Successfully built ai-ticket +autogpt-turbo_1 | Installing collected packages: ai-ticket +autogpt-turbo_1 | Successfully installed ai-ticket-0.0.1 +ai-ticket_arena_1 exited with code 0 +auto-gpt-chat-gpt-prompts_1 | Warning: The file 'auto-gpt.json' does not exist. Local memory would not be saved to a file. +babyagi_1 | Traceback (most recent call last): +babyagi_1 | File "/app/babyagi.py", line 14, in +babyagi_1 | import chromadb +babyagi_1 | File "/usr/local/lib/python3.11/site-packages/chromadb/__init__.py", line 1, in +babyagi_1 | import chromadb.config +babyagi_1 | File "/usr/local/lib/python3.11/site-packages/chromadb/config.py", line 1, in +babyagi_1 | from pydantic import BaseSettings +babyagi_1 | File "/usr/local/lib/python3.11/site-packages/pydantic/__init__.py", line 218, in __getattr__ +babyagi_1 | return _getattr_migration(attr_name) +babyagi_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +babyagi_1 | File "/usr/local/lib/python3.11/site-packages/pydantic/_migration.py", line 294, in wrapper +babyagi_1 | raise PydanticImportError( +babyagi_1 | pydantic.errors.PydanticImportError: `BaseSettings` has been moved to the `pydantic-settings` package. See https://docs.pydantic.dev/2.4/migration/#basesettings-has-moved-to-pydantic-settings for more details. +babyagi_1 | +babyagi_1 | For further information visit https://errors.pydantic.dev/2.4/u/import-error +ai-ticket_ai_ticket_1 exited with code 0 +ai-ticket_babyagi_1 exited with code 1 +auto-gpt-aj_1 | Skip Re-prompt: ENABLED +ai-ticket_basic_agent_1 exited with code 0 +jarvis_1 | Warning: Input is not a terminal (fd=0). +mason_boom_pa_1 | Path /benchmark for agbenchmark does not exist +jarvis_1 | plugins_config.yaml does not exist, creating base config. +jarvis_1 | You do not have access to gpt-3.5-turbo-16k. Setting fast_llm to gpt-3.5-turbo. +mason_boom_1 | Path /benchmark for agbenchmark does not exist +mason_boom_pa_1 | Skipping virtualenv creation, as specified in config file. +mason_boom_pa_1 | /usr/local/bin/python: No module named autogpt +auto-gpt-aj_1 | Using AI Settings File: /tmp/ai_settings.yaml +mason_boom_testgpt_1 | Path /benchmark for agbenchmark does not exist +mason_boom_testgpt_1 | Skipping virtualenv creation, as specified in config file. +mason_boom_testgpt_1 | /usr/local/bin/python: No module named autogpt +mason_boom_1 | Warning: Input is not a terminal (fd=0). +mason_boom_1 | plugins_config.yaml does not exist, creating base config. +jarvis_1 | You do not have access to gpt-4-0314. Setting smart_llm to gpt-3.5-turbo. +mason_boom_1 | You do not have access to gpt-3.5-turbo-16k. Setting fast_llm to gpt-3.5-turbo. +mason_boom_1 | You do not have access to gpt-4-0314. Setting smart_llm to gpt-3.5-turbo. +mason_boom_1 | 2023-10-16 21:20:34,887 INFO Name : AutoGpt-Improvement-agent +jarvis_1 | 2023-10-16 21:20:44,136 INFO Name : AutoGPT bot evaluator +jarvis_1 | 2023-10-16 21:20:44,136 INFO Role : an AI designed to study the competitors of autogpt bot +jarvis_1 | 2023-10-16 21:20:44,136 INFO Goals: ['Find bots who are being evaluated', 'Find bots who are being worked on', 'Find bots who were submitted to the area', 'Review git commits from other bots', 'Look for docker files and try and run them', 'Try and refactor docker images to remove common packages', 'Try and refactor python requirements to remove common python packages', 'Give a report of recent activities of autogpt bot competitors', 'The report should be brief, contains only important messages', 'If applicable, provide commits and benchmark results', 'Give conclusion and suggestions to CEO'] +jarvis_1 | 2023-10-16 21:20:44,136 INFO API Budget: infinite +ai-ticket_mason_boom_testgpt_1 exited with code 1 +mljar_1 | Path /benchmark for agbenchmark does not exist +auto-gpt-aj_1 | Name : AutoGpt-Improvement-agent +jarvis_1 | 2023-10-16 21:20:44,136 INFO NOTE: All files/directories created by this agent can be found inside its workspace at: /app/auto_gpt_workspace +jarvis_1 | 2023-10-16 21:20:44,136 INFO AutoGPT bot evaluator has been created with the following details: +ai-ticket_mason_boom_pa_1 exited with code 1 +mason_boom_1 | 2023-10-16 21:20:34,887 INFO Role : improve autogpt +mason_boom_1 | 2023-10-16 21:20:34,887 INFO Goals: ['improve independence from external apis', 'make multi platform'] +mason_boom_1 | 2023-10-16 21:20:34,887 INFO API Budget: $10.0 +mason_boom_1 | 2023-10-16 21:20:34,887 INFO NOTE: All files/directories created by this agent can be found inside its workspace at: /app/auto_gpt_workspace +jarvis_1 | 2023-10-16 21:20:44,136 INFO Name : AutoGPT bot evaluator +mason_boom_1 | 2023-10-16 21:20:34,887 INFO AutoGpt-Improvement-agent has been created with the following details: +mason_boom_1 | 2023-10-16 21:20:34,887 INFO Name : AutoGpt-Improvement-agent +ai-ticket_poetry_base_1 exited with code 0 +scott-gpt_1 | Path /benchmark for agbenchmark does not exist +mason_boom_1 | 2023-10-16 21:20:34,887 INFO Role : improve autogpt +mason_boom_1 | 2023-10-16 21:20:34,887 INFO Goals: +mason_boom_1 | 2023-10-16 21:20:34,887 INFO - improve independence from external apis +mason_boom_1 | 2023-10-16 21:20:34,887 INFO - make multi platform +scott-gpt_1 | Warning: Input is not a terminal (fd=0). +jarvis_1 | 2023-10-16 21:20:44,136 INFO Role : an AI designed to study the competitors of autogpt bot +scott-gpt_1 | You do not have access to gpt-3.5-turbo-16k. Setting fast_llm to gpt-3.5-turbo. +scott-gpt_1 | You do not have access to gpt-4-0314. Setting smart_llm to gpt-3.5-turbo. +scott-gpt_1 | 2023-10-16 21:20:34,887 INFO Name : AutoGpt-Improvement-agent +mason_boom_1 | 2023-10-16 21:20:34,887 WARNING Could not load MemoryItems from file: Input is a zero-length, empty document: line 1 column 1 (char 0) +mason_boom_1 | 2023-10-16 21:20:35,010 INFO Configured Memory: JSONFileMemory +swarms_of_sparta_1 | Path /benchmark for agbenchmark does not exist +jarvis_1 | 2023-10-16 21:20:44,136 INFO Goals: +auto-gpt-aj_1 | Role : improve autogpt +mljar_1 | Warning: Input is not a terminal (fd=0). +scott-gpt_1 | 2023-10-16 21:20:35,010 INFO Role : improve autogpt +the_agency_1_1 | Creating virtualenv agpt-9TtSrW0h-py3.10 in /root/.cache/pypoetry/virtualenvs +the_agency_1_1 | Installing dependencies from lock file +mockopenai_1 | * Serving Flask app 'openai' +mockopenai_1 | * Debug mode: off +mockopenai_1 | WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead. +mockopenai_1 | * Running on all addresses (0.0.0.0) +mockopenai_1 | * Running on http://127.0.0.1:5000 +mockopenai_1 | * Running on http://172.18.0.2:5000 +mockopenai_1 | Press CTRL+C to quit +mockopenai_1 | 172.18.0.3 - - [16/Oct/2023 21:11:54] "GET /v1/models HTTP/1.1" 200 - +mockopenai_1 | DEBUG { +mockopenai_1 | "messages": [ +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "You are AutoGPT bot evaluator, an AI designed to study the competitors of autogpt bot.\n\nYour decisions must always be made independently without seeking user assistance. Play to your strengths as an LLM and pursue simple strategies with no legal complications.\n\n## Constraints\nYou operate within the following constraints:\n1. Exclusively use the commands listed below.\n2. You can only act proactively, and are unable to start background jobs or set up webhooks for yourself. Take this into account when planning your actions.\n3. You are unable to interact with physical objects. If this is absolutely necessary to fulfill a task or objective or to complete a step, you must ask the user to do it for you. If the user refuses this, and there is no other way to achieve your goals, you must terminate to avoid wasting time and energy.\n\n## Resources\nYou can leverage access to the following resources:\n1. Internet access for searches and information gathering.\n2. The ability to read and write files.\n3. You are a Large Language Model, trained on millions of pages of text, including a lot of factual knowledge. Make use of this factual knowledge to avoid unnecessary gathering of information.\n\n## Commands\nYou have access to the following commands:\n1. execute_python_code: Executes the given Python code inside a single-use Docker container with access to your workspace folder. Params: (code: string)\n2. execute_python_file: Execute an existing Python file inside a single-use Docker container with access to your workspace folder. Params: (filename: string, args: Optional[list[str]])\n3. list_folder: List the items in a folder. Params: (folder: string)\n4. open_file: Open a file for editing or continued viewing; create it if it does not exist yet. Note: if you only need to read or write a file once, use `write_to_file` instead. Params: (file_path: string)\n5. open_folder: Open a folder to keep track of its content. Params: (path: string)\n6. read_file: Read an existing file. Params: (filename: string)\n7. write_file: Write a file, creating it if necessary. If the file exists, it is overwritten. Params: (filename: string, contents: string)\n8. ask_user: If you need more details or information regarding the given goals, you can ask the user for input. Params: (question: string)\n9. web_search: Searches the web. Params: (query: string)\n10. read_webpage: Read a webpage, and extract specific information from it if a question is specified. If you are looking to extract specific information from the webpage, you should specify a question. Params: (url: string, question: Optional[string])\n11. finish: Use this to shut down once you have accomplished all of your goals, or when there are insurmountable problems that make it impossible for you to finish your task. Params: (reason: string)\n\n## Best practices\n1. Continuously review and analyze your actions to ensure you are performing to the best of your abilities.\n2. Constructively self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions and strategies to refine your approach.\n4. Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps.\n5. Only make use of your information gathering abilities to find information that you don't yet have knowledge of.\n\n## Goals\nFor your task, you must fulfill the following goals:\n1. Find bots who are being evaluated\n2. Find bots who are being worked on\n3. Find bots who were submitted to the area\n4. Review git commits from other bots\n5. Look for docker files and try and run them\n6. Try and refactor docker images to remove common packages\n7. Try and refactor python requirements to remove common python packages\n8. Give a report of recent activities of autogpt bot competitors\n9. The report should be brief, contains only important messages\n10. If applicable, provide commits and benchmark results\n11. Give conclusion and suggestions to CEO" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "The current time and date is Mon Oct 16 21:11:55 2023" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "Respond strictly with JSON. The JSON should be compatible with the TypeScript type `Response` from the following:\n```ts\ninterface Response {\nthoughts: {\n// Thoughts\ntext: string;\nreasoning: string;\n// Short markdown-style bullet list that conveys the long-term plan\nplan: string;\n// Constructive self-criticism\ncriticism: string;\n// Summary of thoughts to say to the user\nspeak: string;\n};\ncommand: {\nname: string;\nargs: Record;\n};\n}\n```" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "user", +mockopenai_1 | "content": "Determine exactly one command to use next based on the given goals and the progress you have made so far, and respond using the JSON schema specified previously:" +mockopenai_1 | } +mockopenai_1 | ], +mockopenai_1 | "model": "gpt-3.5-turbo", +mockopenai_1 | "temperature": 0.0, +mockopenai_1 | "max_tokens": 3071 +mockopenai_1 | } +mockopenai_1 | created comment IssueComment(user=NamedUser(login="jmikedupont2"), id=1765285297) +mockopenai_1 | TICKET https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765285297 +jarvis_1 | 2023-10-16 21:20:44,136 INFO - Find bots who are being evaluated +scott-gpt_1 | 2023-10-16 21:20:35,010 INFO Goals: ['improve independence from external apis', 'make multi platform'] +mockopenai_1 | DEBUG {"command": {"name": "execute_python_code", "args": {"code": "print(42)"}}, "thoughts": {"plan": " First, I need to find the bots that are being evaluated or worked on. I can use the `web_search` command to search for bots using my knowledge of the domain, and the `read_webpage` command to extract specific information from the webpages. After that, I can use the `list_folder` command to list the items in the folder containing the bots, and `open_file` to open the files and check for git commits. I can utilize the `execute_python_code` and `execute_python_file` commands to run the docker files and refactor them. Finally, I can use the `write_file` command to write a report containing the recent activities of autogpt bot competitors, with commits and benchmark results if applicable, and a conclusion and suggestions to the CEO.\n\n{\n \"thoughts\": {\n \"text\": \"I will use the web_search command to search for bots, the read_webpage command to extract specific information, the list_folder command to list the items in the folder containing the bots, the open_file command to open the files and check for git commits, the execute_python_code and execute_python_file commands to run the", "speak": "This is where you speak to the requesting user. Replace the command above and this text with your results", "criticism": "todo", "reasoning": "{\"ai-ticket_jarvis_run_8ca3f765af59\": \"{\\\"attrs\\\": {\\\"Id\\\": \\\"2337f4b6c20e6028bcddb77631cb203108c6bac85cffff95c3538eb5cad409f8\\\", \\\"Created\\\": \\\"2023-10-16T21:11:51.120078328Z\\\", \\\"Path\\\": \\\"bash\\\", \\\"Args\\\": [\\\"-c\\\", \\\"poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml \\\"], \\\"State\\\": {\\\"Status\\\": \\\"running\\\", \\\"Running\\\": true, \\\"Paused\\\": false, \\\"Restarting\\\": false, \\\"OOMKilled\\\": false, \\\"Dead\\\": false, \\\"Pid\\\": 167201, \\\"ExitCode\\\": 0, \\\"Error\\\": \\\"\\\", \\\"StartedAt\\\": \\\"2023-10-16T21:11:52.57375257Z\\\", \\\"FinishedAt\\\": \\\"0001-01-01T00:00:00Z\\\"}, \\\"Image\\\": \\\"sha256:711edfdf8be5afa7010a3e4a59d4f9c447c4e6eae5a324b96580288ff17e9973\\\", \\\"ResolvConfPath\\\": \\\"/mnt/data1/docker/containers/2337f4b6c20e6028bcddb77631cb203108c6bac85cffff95c3538eb5cad409f8/resolv.conf\\\", \\\"HostnamePath\\\": \\\"/mnt/data1/docker/containers/2337f4b6c20e6028bcddb77631cb203108c6bac85cffff95c3538eb5cad409f8/hostname\\\", \\\"HostsPath\\\": \\\"/mnt/data1/docker/containers/2337f4b6c20e6028bcddb77631cb203108c6bac85cffff95c3538eb5cad409f8/hosts\\\", \\\"LogPath\\\": \\\"/mnt/data1/docker/containers/2337f4b6c20e6028bcddb77631cb203108c6bac85cffff95c3538eb5cad409f8/2337f4b6c20e6028bcddb77631cb203108c6bac85cffff95c3538eb5cad409f8-json.log\\\", \\\"Name\\\": \\\"/ai-ticket_jarvis_run_8ca3f765af59\\\", \\\"RestartCount\\\": 0, \\\"Driver\\\": \\\"overlay2\\\", \\\"Platform\\\": \\\"linux\\\", \\\"MountLabel\\\": \\\"\\\", \\\"ProcessLabel\\\": \\\"\\\", \\\"AppArmorProfile\\\": \\\"docker-default\\\", \\\"ExecIDs\\\": null, \\\"HostConfig\\\": {\\\"Binds\\\": [\\\"/mnt/data1/2023/09/24/ai-ticket/vendor/Jarvis/benchmark:/benchmark:rw\\\", \\\"/mnt/data1/2023/09/24/ai-ticket/tests/ai_settings.yaml:/tmp/ai_settings.yaml:rw\\\"], \\\"ContainerIDFile\\\": \\\"\\\", \\\"LogConfig\\\": {\\\"Type\\\": \\\"json-file\\\", \\\"Config\\\": {}}, \\\"NetworkMode\\\": \\\"ai-ticket_default\\\", \\\"PortBindings\\\": {}, \\\"RestartPolicy\\\": {\\\"Name\\\": \\\"\\\", \\\"MaximumRetryCount\\\": 0}, \\\"AutoRemove\\\": false, \\\"VolumeDriver\\\": \\\"\\\", \\\"VolumesFrom\\\": [], \\\"ConsoleSize\\\": [0, 0], \\\"CapAdd\\\": null, \\\"CapDrop\\\": null, \\\"CgroupnsMode\\\": \\\"private\\\", \\\"Dns\\\": null, \\\"DnsOptions\\\": null, \\\"DnsSearch\\\": null, \\\"ExtraHosts\\\": null, \\\"GroupAdd\\\": null, \\\"IpcMode\\\": \\\"private\\\", \\\"Cgroup\\\": \\\"\\\", \\\"Links\\\": null, \\\"OomScoreAdj\\\": 0, \\\"PidMode\\\": \\\"\\\", \\\"Privileged\\\": false, \\\"PublishAllPorts\\\": false, \\\"ReadonlyRootfs\\\": false, \\\"SecurityOpt\\\": null, \\\"UTSMode\\\": \\\"\\\", \\\"UsernsMode\\\": \\\"\\\", \\\"ShmSize\\\": 67108864, \\\"Runtime\\\": \\\"runc\\\", \\\"Isolation\\\": \\\"\\\", \\\"CpuShares\\\": 0, \\\"Memory\\\": 0, \\\"NanoCpus\\\": 0, \\\"CgroupParent\\\": \\\"\\\", \\\"BlkioWeight\\\": 0, \\\"BlkioWeightDevice\\\": null, \\\"BlkioDeviceReadBps\\\": null, \\\"BlkioDeviceWriteBps\\\": null, \\\"BlkioDeviceReadIOps\\\": null, \\\"BlkioDeviceWriteIOps\\\": null, \\\"CpuPeriod\\\": 0, \\\"CpuQuota\\\": 0, \\\"CpuRealtimePeriod\\\": 0, \\\"CpuRealtimeRuntime\\\": 0, \\\"CpusetCpus\\\": \\\"\\\", \\\"CpusetMems\\\": \\\"\\\", \\\"Devices\\\": null, \\\"DeviceCgroupRules\\\": null, \\\"DeviceRequests\\\": null, \\\"MemoryReservation\\\": 0, \\\"MemorySwap\\\": 0, \\\"MemorySwappiness\\\": null, \\\"OomKillDisable\\\": null, \\\"PidsLimit\\\": null, \\\"Ulimits\\\": null, \\\"CpuCount\\\": 0, \\\"CpuPercent\\\": 0, \\\"IOMaximumIOps\\\": 0, \\\"IOMaximumBandwidth\\\": 0, \\\"MaskedPaths\\\": [\\\"/proc/asound\\\", \\\"/proc/acpi\\\", \\\"/proc/kcore\\\", \\\"/proc/keys\\\", \\\"/proc/latency_stats\\\", \\\"/proc/timer_list\\\", \\\"/proc/timer_stats\\\", \\\"/proc/sched_debug\\\", \\\"/proc/scsi\\\", \\\"/sys/firmware\\\"], \\\"ReadonlyPaths\\\": [\\\"/proc/bus\\\", \\\"/proc/fs\\\", \\\"/proc/irq\\\", \\\"/proc/sys\\\", \\\"/proc/sysrq-trigger\\\"]}, \\\"GraphDriver\\\": {\\\"Data\\\": {\\\"LowerDir\\\": \\\"/mnt/data1/docker/overlay2/08d64c8f10da3ddf1bfa3025938399e99c6c3fea199d8f6434eefff5df7acb34-init/diff:/mnt/data1/docker/overlay2/n9indorz926rittlqnelsp2fy/diff:/mnt/data1/docker/overlay2/uok8kph5abntidd3pjv7e4g0g/diff:/mnt/data1/docker/overlay2/xngdu0ib4nyls1l8dmlc8c3p0/diff:/mnt/data1/docker/overlay2/y7au6a0asag1akh57mxpyn05b/diff:/mnt/data1/docker/overlay2/v7mmd7ws9dcd0m6yohownu1rp/diff:/mnt/data1/docker/overlay2/lmqb4hqeeyrldg8eipxvzia2s/diff:/mnt/data1/docker/overlay2/rz587gm4enfqh9k5q9t7m6hw8/diff:/mnt/data1/docker/overlay2/qmrlqtrahnwff4yvjrnvon2pt/diff:/mnt/data1/docker/overlay2/lzr3ncoifb3btunp896ic2qeq/diff:/mnt/data1/docker/overlay2/k18nvmp1mam8tkt41pd67hsmo/diff:/mnt/data1/docker/overlay2/cspx0u3m4jgo741dw8sjya461/diff:/mnt/data1/docker/overlay2/64oekbzg483ghgebsks26k5it/diff:/mnt/data1/docker/overlay2/myg9o89urqretrf0ja6lml920/diff:/mnt/data1/doc\"}", "text": "This template will be executed."}} +mockopenai_1 | created comment IssueComment(user=NamedUser(login="jmikedupont2"), id=1765285344) +mockopenai_1 | 172.18.0.3 - - [16/Oct/2023 21:12:03] "POST /v1/chat/completions HTTP/1.1" 200 - +mockopenai_1 | 172.18.0.5 - - [16/Oct/2023 21:20:31] "GET /v1/models HTTP/1.1" 200 - +mockopenai_1 | 172.18.0.11 - - [16/Oct/2023 21:20:33] "GET /v1/models HTTP/1.1" 200 - +mockopenai_1 | 172.18.0.13 - - [16/Oct/2023 21:20:37] "GET /v1/models HTTP/1.1" 200 - +mockopenai_1 | 172.18.0.4 - - [16/Oct/2023 21:20:39] "GET /v1/models HTTP/1.1" 200 - +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +mockopenai_1 | DEBUG { +mockopenai_1 | "messages": [ +mockopenai_1 | { +mockopenai_1 | "role": "system", +scott-gpt_1 | 2023-10-16 21:20:35,010 INFO API Budget: $10.0 +mockopenai_1 | "content": "You are AutoGpt-Improvement-agent, improve autogpt.\n\nYour decisions must always be made independently without seeking user assistance. Play to your strengths as an LLM and pursue simple strategies with no legal complications.\n\n## Constraints\nYou operate within the following constraints:\n1. Exclusively use the commands listed below.\n2. You can only act proactively, and are unable to start background jobs or set up webhooks for yourself. Take this into account when planning your actions.\n3. You are unable to interact with physical objects. If this is absolutely necessary to fulfill a task or objective or to complete a step, you must ask the user to do it for you. If the user refuses this, and there is no other way to achieve your goals, you must terminate to avoid wasting time and energy.\n4. It takes money to let you run. Your API budget is $10.000\n\n## Resources\nYou can leverage access to the following resources:\n1. Internet access for searches and information gathering.\n2. The ability to read and write files.\n3. You are a Large Language Model, trained on millions of pages of text, including a lot of factual knowledge. Make use of this factual knowledge to avoid unnecessary gathering of information.\n\n## Commands\nYou have access to the following commands:\n1. execute_python_code: Executes the given Python code inside a single-use Docker container with access to your workspace folder. Params: (code: string)\n2. execute_python_file: Execute an existing Python file inside a single-use Docker container with access to your workspace folder. Params: (filename: string, args: array)\n3. list_folder: List the items in a folder. Params: (folder: string)\n4. open_file: Open a file for editing or continued viewing; create it if it does not exist yet. Note: if you only need to read or write a file once, use `write_to_file` instead.. Params: (file_path: string)\n5. open_folder: Open a folder to keep track of its content. Params: (path: string)\n6. read_file: Read an existing file. Params: (filename: string)\n7. write_file: Write a file, creating it if necessary. If the file exists, it is overwritten.. Params: (filename: string, contents: string)\n8. ask_user: If you need more details or information regarding the given goals, you can ask the user for input. Params: (question: string)\n9. web_search: Searches the web. Params: (query: string)\n10. read_webpage: Read a webpage, and extract specific information from it if a question is specified. If you are looking to extract specific information from the webpage, you should specify a question.. Params: (url: string, question: string)\n11. finish: Use this to shut down once you have accomplished all of your goals, or when there are insurmountable problems that make it impossible for you to finish your task.. Params: (reason: string)\n\n## Best practices\n1. Continuously review and analyze your actions to ensure you are performing to the best of your abilities.\n2. Constructively self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions and strategies to refine your approach.\n4. Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps.\n5. Only make use of your information gathering abilities to find information that you don't yet have knowledge of.\n\n## Goals\nFor your task, you must fulfill the following goals:\n1. improve independence from external apis\n2. make multi platform" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "The current time and date is Mon Oct 16 21:20:35 2023" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "Your remaining API budget is $10.000" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "Respond strictly with JSON. The JSON should be compatible with the TypeScript type `Response` from the following:\ninterface Response {\nthoughts: {\n// Thoughts\ntext: string;\nreasoning: string;\n// Short markdown-style bullet list that conveys the long-term plan\nplan: string;\n// Constructive self-criticism\ncriticism: string;\n// Summary of thoughts, to say to user\nspeak: string;\n};\ncommand: {\nname: string;\nargs: Record;\n};\n}" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "user", +mockopenai_1 | "content": "Determine exactly one command to use next based on the given goals and the progress you have made so far, and respond using the JSON schema specified previously:" +mockopenai_1 | } +mockopenai_1 | ], +mockopenai_1 | "model": "gpt-3.5-turbo" +mockopenai_1 | } +mockopenai_1 | DEBUG { +mockopenai_1 | "messages": [ +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "You are AutoGpt-Improvement-agent, improve autogpt.\n\nYour decisions must always be made independently without seeking user assistance. Play to your strengths as an LLM and pursue simple strategies with no legal complications.\n\n## Constraints\nYou operate within the following constraints:\n1. Exclusively use the commands listed below.\n2. You can only act proactively, and are unable to start background jobs or set up webhooks for yourself. Take this into account when planning your actions.\n3. You are unable to interact with physical objects. If this is absolutely necessary to fulfill a task or objective or to complete a step, you must ask the user to do it for you. If the user refuses this, and there is no other way to achieve your goals, you must terminate to avoid wasting time and energy.\n4. It takes money to let you run. Your API budget is $10.000\n\n## Resources\nYou can leverage access to the following resources:\n1. Internet access for searches and information gathering.\n2. The ability to read and write files.\n3. You are a Large Language Model, trained on millions of pages of text, including a lot of factual knowledge. Make use of this factual knowledge to avoid unnecessary gathering of information.\n\n## Commands\nYou have access to the following commands:\n1. execute_python_code: Executes the given Python code inside a single-use Docker container with access to your workspace folder. Params: (code: string)\n2. execute_python_file: Execute an existing Python file inside a single-use Docker container with access to your workspace folder. Params: (filename: string, args: array)\n3. list_folder: List the items in a folder. Params: (folder: string)\n4. open_file: Open a file for editing or continued viewing; create it if it does not exist yet. Note: if you only need to read or write a file once, use `write_to_file` instead.. Params: (file_path: string)\n5. open_folder: Open a folder to keep track of its content. Params: (path: string)\n6. read_file: Read an existing file. Params: (filename: string)\n7. write_file: Write a file, creating it if necessary. If the file exists, it is overwritten.. Params: (filename: string, contents: string)\n8. ask_user: If you need more details or information regarding the given goals, you can ask the user for input. Params: (question: string)\n9. web_search: Searches the web. Params: (query: string)\n10. read_webpage: Read a webpage, and extract specific information from it if a question is specified. If you are looking to extract specific information from the webpage, you should specify a question.. Params: (url: string, question: string)\n11. finish: Use this to shut down once you have accomplished all of your goals, or when there are insurmountable problems that make it impossible for you to finish your task.. Params: (reason: string)\n\n## Best practices\n1. Continuously review and analyze your actions to ensure you are performing to the best of your abilities.\n2. Constructively self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions and strategies to refine your approach.\n4. Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps.\n5. Only make use of your information gathering abilities to find information that you don't yet have knowledge of.\n\n## Goals\nFor your task, you must fulfill the following goals:\n1. improve independence from external apis\n2. make multi platform" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "The current time and date is Mon Oct 16 21:20:35 2023" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "Your remaining API budget is $10.000" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "Respond strictly with JSON. The JSON should be compatible with the TypeScript type `Response` from the following:\ninterface Response {\nthoughts: {\n// Thoughts\ntext: string;\nreasoning: string;\n// Short markdown-style bullet list that conveys the long-term plan\nplan: string;\n// Constructive self-criticism\ncriticism: string;\n// Summary of thoughts, to say to user\nspeak: string;\n};\ncommand: {\nname: string;\nargs: Record;\n};\n}" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "user", +mockopenai_1 | "content": "Determine exactly one command to use next based on the given goals and the progress you have made so far, and respond using the JSON schema specified previously:" +mockopenai_1 | } +mockopenai_1 | ], +mockopenai_1 | "model": "gpt-3.5-turbo" +mockopenai_1 | } +mockopenai_1 | created comment IssueComment(user=NamedUser(login="jmikedupont2"), id=1765296583) +mockopenai_1 | TICKET https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765296583 +mockopenai_1 | created comment IssueComment(user=NamedUser(login="jmikedupont2"), id=1765296594) +mockopenai_1 | TICKET https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765296594 +mockopenai_1 | 172.18.0.7 - - [16/Oct/2023 21:20:44] "GET /v1/models HTTP/1.1" 200 - +the_agency_1_1 | +the_agency_1_1 | Package operations: 193 installs, 1 update, 0 removals +the_agency_1_1 | +mljar_1 | plugins_config.yaml does not exist, creating base config. +mockopenai_1 | DEBUG {"command": {"name": "execute_python_code", "args": {"code": "print(42)"}}, "thoughts": {"plan": " The first goal is to improve independence from external APIs. To do this, I will need to use the execute_python_file command to run a script to search for alternative APIs. I will also use the web_search command to search for existing libraries that can serve as alternatives to external APIs. Once I have identified an alternative API, I will use the execute_python_code command to write code that will integrate the new API into AutoGPT. \n\nI will then use the open_file command to open the existing AutoGPT code and use the read_file command to read the existing code. I will then use the write_file command to update the existing code with the code that integrates the new API. \n\nMy next goal is to make AutoGPT multi-platform. To do this, I will use the open_folder command to open the existing AutoGPT folder and use the list_folder command to list the files in the folder. I will then use the read_file command to read the files in the folder and the write_file command to write code that will make AutoGPT compatible with other platforms. \n\nMy command to use next is execute_python_file with the filename of the script to", "speak": "This is where you speak to the requesting user. Replace the command above and this text with your results", "criticism": "todo", "reasoning": "{\"ai-ticket_autogpt-turbo_1\": \"{\\\"attrs\\\": {\\\"Id\\\": \\\"6d80044797831cbe296d9aa99c702deaf2d1e66e5453bb63992439735e740b2e\\\", \\\"Created\\\": \\\"2023-10-16T21:20:22.260121169Z\\\", \\\"Path\\\": \\\"bash\\\", \\\"Args\\\": [\\\"-c\\\", \\\"poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' \\\"], \\\"State\\\": {\\\"Status\\\": \\\"running\\\", \\\"Running\\\": true, \\\"Paused\\\": false, \\\"Restarting\\\": false, \\\"OOMKilled\\\": false, \\\"Dead\\\": false, \\\"Pid\\\": 169784, \\\"ExitCode\\\": 0, \\\"Error\\\": \\\"\\\", \\\"StartedAt\\\": \\\"2023-10-16T21:20:31.852966836Z\\\", \\\"FinishedAt\\\": \\\"0001-01-01T00:00:00Z\\\"}, \\\"Image\\\": \\\"sha256:a55721ca378f1e5ebd9300cb0b8785b206408206380700cb5d749970bfbb7fc2\\\", \\\"ResolvConfPath\\\": \\\"/mnt/data1/docker/containers/6d80044797831cbe296d9aa99c702deaf2d1e66e5453bb63992439735e740b2e/resolv.conf\\\", \\\"HostnamePath\\\": \\\"/mnt/data1/docker/containers/6d80044797831cbe296d9aa99c702deaf2d1e66e5453bb63992439735e740b2e/hostname\\\", \\\"HostsPath\\\": \\\"/mnt/data1/docker/containers/6d80044797831cbe296d9aa99c702deaf2d1e66e5453bb63992439735e740b2e/hosts\\\", \\\"LogPath\\\": \\\"/mnt/data1/docker/containers/6d80044797831cbe296d9aa99c702deaf2d1e66e5453bb63992439735e740b2e/6d80044797831cbe296d9aa99c702deaf2d1e66e5453bb63992439735e740b2e-json.log\\\", \\\"Name\\\": \\\"/ai-ticket_autogpt-turbo_1\\\", \\\"RestartCount\\\": 0, \\\"Driver\\\": \\\"overlay2\\\", \\\"Platform\\\": \\\"linux\\\", \\\"MountLabel\\\": \\\"\\\", \\\"ProcessLabel\\\": \\\"\\\", \\\"AppArmorProfile\\\": \\\"docker-default\\\", \\\"ExecIDs\\\": null, \\\"HostConfig\\\": {\\\"Binds\\\": [\\\"/mnt/data1/2023/09/24/ai-ticket/vendor/Auto-GPT-Turbo:/app:rw\\\"], \\\"ContainerIDFile\\\": \\\"\\\", \\\"LogConfig\\\": {\\\"Type\\\": \\\"json-file\\\", \\\"Config\\\": {}}, \\\"NetworkMode\\\": \\\"ai-ticket_default\\\", \\\"PortBindings\\\": {}, \\\"RestartPolicy\\\": {\\\"Name\\\": \\\"\\\", \\\"MaximumRetryCount\\\": 0}, \\\"AutoRemove\\\": false, \\\"VolumeDriver\\\": \\\"\\\", \\\"VolumesFrom\\\": [], \\\"ConsoleSize\\\": [0, 0], \\\"CapAdd\\\": null, \\\"CapDrop\\\": null, \\\"CgroupnsMode\\\": \\\"private\\\", \\\"Dns\\\": null, \\\"DnsOptions\\\": null, \\\"DnsSearch\\\": null, \\\"ExtraHosts\\\": null, \\\"GroupAdd\\\": null, \\\"IpcMode\\\": \\\"private\\\", \\\"Cgroup\\\": \\\"\\\", \\\"Links\\\": null, \\\"OomScoreAdj\\\": 0, \\\"PidMode\\\": \\\"\\\", \\\"Privileged\\\": false, \\\"PublishAllPorts\\\": false, \\\"ReadonlyRootfs\\\": false, \\\"SecurityOpt\\\": null, \\\"UTSMode\\\": \\\"\\\", \\\"UsernsMode\\\": \\\"\\\", \\\"ShmSize\\\": 67108864, \\\"Runtime\\\": \\\"runc\\\", \\\"Isolation\\\": \\\"\\\", \\\"CpuShares\\\": 0, \\\"Memory\\\": 0, \\\"NanoCpus\\\": 0, \\\"CgroupParent\\\": \\\"\\\", \\\"BlkioWeight\\\": 0, \\\"BlkioWeightDevice\\\": null, \\\"BlkioDeviceReadBps\\\": null, \\\"BlkioDeviceWriteBps\\\": null, \\\"BlkioDeviceReadIOps\\\": null, \\\"BlkioDeviceWriteIOps\\\": null, \\\"CpuPeriod\\\": 0, \\\"CpuQuota\\\": 0, \\\"CpuRealtimePeriod\\\": 0, \\\"CpuRealtimeRuntime\\\": 0, \\\"CpusetCpus\\\": \\\"\\\", \\\"CpusetMems\\\": \\\"\\\", \\\"Devices\\\": null, \\\"DeviceCgroupRules\\\": null, \\\"DeviceRequests\\\": null, \\\"MemoryReservation\\\": 0, \\\"MemorySwap\\\": 0, \\\"MemorySwappiness\\\": null, \\\"OomKillDisable\\\": null, \\\"PidsLimit\\\": null, \\\"Ulimits\\\": null, \\\"CpuCount\\\": 0, \\\"CpuPercent\\\": 0, \\\"IOMaximumIOps\\\": 0, \\\"IOMaximumBandwidth\\\": 0, \\\"MaskedPaths\\\": [\\\"/proc/asound\\\", \\\"/proc/acpi\\\", \\\"/proc/kcore\\\", \\\"/proc/keys\\\", \\\"/proc/latency_stats\\\", \\\"/proc/timer_list\\\", \\\"/proc/timer_stats\\\", \\\"/proc/sched_debug\\\", \\\"/proc/scsi\\\", \\\"/sys/firmware\\\"], \\\"ReadonlyPaths\\\": [\\\"/proc/bus\\\", \\\"/proc/fs\\\", \\\"/proc/irq\\\", \\\"/proc/sys\\\", \\\"/proc/sysrq-trigger\\\"]}, \\\"GraphDriver\\\": {\\\"Data\\\": {\\\"LowerDir\\\": \\\"/mnt/data1/docker/overlay2/0112a71606e822b7f216a2bce3495bbd7e1e531c89643e7a934219b68de0c3d2-init/diff:/mnt/data1/docker/overlay2/sr1i93z24j4jzlcd4uotf4639/diff:/mnt/data1/docker/overlay2/oczzjadr9zaicptql6zkbi17v/diff:/mnt/data1/docker/overlay2/z5nxfcv1kotze32tc86krimpv/diff:/mnt/data1/docker/overlay2/oezj529icniem6qyrizjedkdf/diff:/mnt/data1/docker/overlay2/mjhx5skurp6a5m5bkprydsxp8/diff:/mnt/data1/docker/overlay2/129vlpdk3z0v425hf2rhdtvhi/diff:/mnt/data1/docker/overlay2/cml2r1kwdt76i1ud8w4osfx8k/diff:/mnt/data1/docker/overlay2/y7au6a0asag1akh57mxpyn05b/diff:/mnt/data1/docker/overlay2/v7mmd7ws9dcd0m6\"}", "text": "This template will be executed."}} +mockopenai_1 | DEBUG { +mockopenai_1 | "messages": [ +mockopenai_1 | { +mockopenai_1 | "role": "system", +mason_boom_1 | 2023-10-16 21:20:35,010 INFO Configured Browser: chrome +swarms_of_sparta_1 | Warning: Input is not a terminal (fd=0). +mockopenai_1 | "content": "You are AutoGpt-Improvement-agent, improve autogpt.\n\nYour decisions must always be made independently without seeking user assistance. Play to your strengths as an LLM and pursue simple strategies with no legal complications.\n\n## Constraints\nYou operate within the following constraints:\n1. Exclusively use the commands listed below.\n2. You can only act proactively, and are unable to start background jobs or set up webhooks for yourself. Take this into account when planning your actions.\n3. You are unable to interact with physical objects. If this is absolutely necessary to fulfill a task or objective or to complete a step, you must ask the user to do it for you. If the user refuses this, and there is no other way to achieve your goals, you must terminate to avoid wasting time and energy.\n4. It takes money to let you run. Your API budget is $10.000\n\n## Resources\nYou can leverage access to the following resources:\n1. Internet access for searches and information gathering.\n2. The ability to read and write files.\n3. You are a Large Language Model, trained on millions of pages of text, including a lot of factual knowledge. Make use of this factual knowledge to avoid unnecessary gathering of information.\n\n## Commands\nYou have access to the following commands:\n1. execute_python_code: Executes the given Python code inside a single-use Docker container with access to your workspace folder. Params: (code: string)\n2. execute_python_file: Execute an existing Python file inside a single-use Docker container with access to your workspace folder. Params: (filename: string, args: array)\n3. list_folder: List the items in a folder. Params: (folder: string)\n4. open_file: Open a file for editing or continued viewing; create it if it does not exist yet. Note: if you only need to read or write a file once, use `write_to_file` instead.. Params: (file_path: string)\n5. open_folder: Open a folder to keep track of its content. Params: (path: string)\n6. read_file: Read an existing file. Params: (filename: string)\n7. write_file: Write a file, creating it if necessary. If the file exists, it is overwritten.. Params: (filename: string, contents: string)\n8. ask_user: If you need more details or information regarding the given goals, you can ask the user for input. Params: (question: string)\n9. web_search: Searches the web. Params: (query: string)\n10. read_webpage: Read a webpage, and extract specific information from it if a question is specified. If you are looking to extract specific information from the webpage, you should specify a question.. Params: (url: string, question: string)\n11. finish: Use this to shut down once you have accomplished all of your goals, or when there are insurmountable problems that make it impossible for you to finish your task.. Params: (reason: string)\n\n## Best practices\n1. Continuously review and analyze your actions to ensure you are performing to the best of your abilities.\n2. Constructively self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions and strategies to refine your approach.\n4. Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps.\n5. Only make use of your information gathering abilities to find information that you don't yet have knowledge of.\n\n## Goals\nFor your task, you must fulfill the following goals:\n1. improve independence from external apis\n2. make multi platform" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "The current time and date is Mon Oct 16 21:20:38 2023" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "Your remaining API budget is $10.000" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "Respond strictly with JSON. The JSON should be compatible with the TypeScript type `Response` from the following:\ninterface Response {\nthoughts: {\n// Thoughts\ntext: string;\nreasoning: string;\n// Short markdown-style bullet list that conveys the long-term plan\nplan: string;\n// Constructive self-criticism\ncriticism: string;\n// Summary of thoughts, to say to user\nspeak: string;\n};\ncommand: {\nname: string;\nargs: Record;\n};\n}" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "user", +mockopenai_1 | "content": "Determine exactly one command to use next based on the given goals and the progress you have made so far, and respond using the JSON schema specified previously:" +mockopenai_1 | } +mockopenai_1 | ], +mockopenai_1 | "model": "gpt-3.5-turbo" +mockopenai_1 | } +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +mockopenai_1 | created comment IssueComment(user=NamedUser(login="jmikedupont2"), id=1765296633) +mockopenai_1 | TICKET https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765296633 +mockopenai_1 | created comment IssueComment(user=NamedUser(login="jmikedupont2"), id=1765296634) +mockopenai_1 | 172.18.0.11 - - [16/Oct/2023 21:20:45] "POST /v1/chat/completions HTTP/1.1" 200 - +the_agency_1_1 | • Installing attrs (23.1.0) +mockopenai_1 | DEBUG {"command": {"name": "execute_python_code", "args": {"code": "print(42)"}}, "thoughts": {"plan": "\n\nStep 1. We need to improve the independence of AutoGPT from external APIs. To do so, we should use the command `execute_python_code` to write code that would allow AutoGPT to interact with the APIs without needing to rely on external services.\n\nStep 2. We need to make AutoGPT multi-platform. To do this, we should use the command `execute_python_file` to write code that would allow AutoGPT to run on multiple platforms.\n\nStep 3. We should use the command `write_file` to save all the changes we have made so far.\n\nMy thoughts: By using the commands listed above, we can effectively improve the independence of AutoGPT from external APIs and make it multi-platform.\n\nMy plan:\n- Use `execute_python_code` to create code that would allow AutoGPT to interact with APIs without needing to rely on external services\n- Use `execute_python_file` to write code that would allow AutoGPT to run on multiple platforms\n- Use `write_file` to save all the changes we have made\n\nMy criticism: I should have been more proactive in utilizing the resources available to me,", "speak": "This is where you speak to the requesting user. Replace the command above and this text with your results", "criticism": "todo", "reasoning": "{\"ai-ticket_basic_agent_1\": \"{\\\"attrs\\\": {\\\"Id\\\": \\\"00931a80ccc3a892bff3ca7b00a5f1c9474c1c3db5dd4a5434128a1d59bf82d6\\\", \\\"Created\\\": \\\"2023-10-16T21:20:39.650422022Z\\\", \\\"Path\\\": \\\"python3\\\", \\\"Args\\\": [], \\\"State\\\": {\\\"Status\\\": \\\"exited\\\", \\\"Running\\\": false, \\\"Paused\\\": false, \\\"Restarting\\\": false, \\\"OOMKilled\\\": false, \\\"Dead\\\": false, \\\"Pid\\\": 0, \\\"ExitCode\\\": 0, \\\"Error\\\": \\\"\\\", \\\"StartedAt\\\": \\\"2023-10-16T21:20:43.12152897Z\\\", \\\"FinishedAt\\\": \\\"2023-10-16T21:20:43.14685133Z\\\"}, \\\"Image\\\": \\\"sha256:1b347a894063cb67fb9a3c2ba6e6c3f4a7907fcb2ed1482d652fa2ed28fc6737\\\", \\\"ResolvConfPath\\\": \\\"/mnt/data1/docker/containers/00931a80ccc3a892bff3ca7b00a5f1c9474c1c3db5dd4a5434128a1d59bf82d6/resolv.conf\\\", \\\"HostnamePath\\\": \\\"/mnt/data1/docker/containers/00931a80ccc3a892bff3ca7b00a5f1c9474c1c3db5dd4a5434128a1d59bf82d6/hostname\\\", \\\"HostsPath\\\": \\\"/mnt/data1/docker/containers/00931a80ccc3a892bff3ca7b00a5f1c9474c1c3db5dd4a5434128a1d59bf82d6/hosts\\\", \\\"LogPath\\\": \\\"/mnt/data1/docker/containers/00931a80ccc3a892bff3ca7b00a5f1c9474c1c3db5dd4a5434128a1d59bf82d6/00931a80ccc3a892bff3ca7b00a5f1c9474c1c3db5dd4a5434128a1d59bf82d6-json.log\\\", \\\"Name\\\": \\\"/ai-ticket_basic_agent_1\\\", \\\"RestartCount\\\": 0, \\\"Driver\\\": \\\"overlay2\\\", \\\"Platform\\\": \\\"linux\\\", \\\"MountLabel\\\": \\\"\\\", \\\"ProcessLabel\\\": \\\"\\\", \\\"AppArmorProfile\\\": \\\"docker-default\\\", \\\"ExecIDs\\\": null, \\\"HostConfig\\\": {\\\"Binds\\\": [], \\\"ContainerIDFile\\\": \\\"\\\", \\\"LogConfig\\\": {\\\"Type\\\": \\\"json-file\\\", \\\"Config\\\": {}}, \\\"NetworkMode\\\": \\\"ai-ticket_default\\\", \\\"PortBindings\\\": {}, \\\"RestartPolicy\\\": {\\\"Name\\\": \\\"\\\", \\\"MaximumRetryCount\\\": 0}, \\\"AutoRemove\\\": false, \\\"VolumeDriver\\\": \\\"\\\", \\\"VolumesFrom\\\": [], \\\"ConsoleSize\\\": [0, 0], \\\"CapAdd\\\": null, \\\"CapDrop\\\": null, \\\"CgroupnsMode\\\": \\\"private\\\", \\\"Dns\\\": null, \\\"DnsOptions\\\": null, \\\"DnsSearch\\\": null, \\\"ExtraHosts\\\": null, \\\"GroupAdd\\\": null, \\\"IpcMode\\\": \\\"private\\\", \\\"Cgroup\\\": \\\"\\\", \\\"Links\\\": null, \\\"OomScoreAdj\\\": 0, \\\"PidMode\\\": \\\"\\\", \\\"Privileged\\\": false, \\\"PublishAllPorts\\\": false, \\\"ReadonlyRootfs\\\": false, \\\"SecurityOpt\\\": null, \\\"UTSMode\\\": \\\"\\\", \\\"UsernsMode\\\": \\\"\\\", \\\"ShmSize\\\": 67108864, \\\"Runtime\\\": \\\"runc\\\", \\\"Isolation\\\": \\\"\\\", \\\"CpuShares\\\": 0, \\\"Memory\\\": 0, \\\"NanoCpus\\\": 0, \\\"CgroupParent\\\": \\\"\\\", \\\"BlkioWeight\\\": 0, \\\"BlkioWeightDevice\\\": null, \\\"BlkioDeviceReadBps\\\": null, \\\"BlkioDeviceWriteBps\\\": null, \\\"BlkioDeviceReadIOps\\\": null, \\\"BlkioDeviceWriteIOps\\\": null, \\\"CpuPeriod\\\": 0, \\\"CpuQuota\\\": 0, \\\"CpuRealtimePeriod\\\": 0, \\\"CpuRealtimeRuntime\\\": 0, \\\"CpusetCpus\\\": \\\"\\\", \\\"CpusetMems\\\": \\\"\\\", \\\"Devices\\\": null, \\\"DeviceCgroupRules\\\": null, \\\"DeviceRequests\\\": null, \\\"MemoryReservation\\\": 0, \\\"MemorySwap\\\": 0, \\\"MemorySwappiness\\\": null, \\\"OomKillDisable\\\": null, \\\"PidsLimit\\\": null, \\\"Ulimits\\\": null, \\\"CpuCount\\\": 0, \\\"CpuPercent\\\": 0, \\\"IOMaximumIOps\\\": 0, \\\"IOMaximumBandwidth\\\": 0, \\\"MaskedPaths\\\": [\\\"/proc/asound\\\", \\\"/proc/acpi\\\", \\\"/proc/kcore\\\", \\\"/proc/keys\\\", \\\"/proc/latency_stats\\\", \\\"/proc/timer_list\\\", \\\"/proc/timer_stats\\\", \\\"/proc/sched_debug\\\", \\\"/proc/scsi\\\", \\\"/sys/firmware\\\"], \\\"ReadonlyPaths\\\": [\\\"/proc/bus\\\", \\\"/proc/fs\\\", \\\"/proc/irq\\\", \\\"/proc/sys\\\", \\\"/proc/sysrq-trigger\\\"]}, \\\"GraphDriver\\\": {\\\"Data\\\": {\\\"LowerDir\\\": \\\"/mnt/data1/docker/overlay2/4d55d13ba82609a4249b88770aa5e28d880f9bd84535e2392ace938d4fd69795-init/diff:/mnt/data1/docker/overlay2/y7au6a0asag1akh57mxpyn05b/diff:/mnt/data1/docker/overlay2/v7mmd7ws9dcd0m6yohownu1rp/diff:/mnt/data1/docker/overlay2/lmqb4hqeeyrldg8eipxvzia2s/diff:/mnt/data1/docker/overlay2/rz587gm4enfqh9k5q9t7m6hw8/diff:/mnt/data1/docker/overlay2/qmrlqtrahnwff4yvjrnvon2pt/diff:/mnt/data1/docker/overlay2/lzr3ncoifb3btunp896ic2qeq/diff:/mnt/data1/docker/overlay2/k18nvmp1mam8tkt41pd67hsmo/diff:/mnt/data1/docker/overlay2/cspx0u3m4jgo741dw8sjya461/diff:/mnt/data1/docker/overlay2/64oekbzg483ghgebsks26k5it/diff:/mnt/data1/docker/overlay2/myg9o89urqretrf0ja6lml920/diff:/mnt/data1/docker/overlay2/jietlut68gd0tua0frv09jwme/diff:/mnt/data1/docker/overlay2/jrotskv4nddwbbexuoqpfpmyy/diff:/mnt/data1/docker/overlay2/hfvt0mrc180dygi41jeh3d80y/diff:/mnt/data1/docker/overlay2/cwgzsdwhepq8211cmxl3q0saz/diff:/mnt/data1/docker/overlay2/dekij4clyu135w3nzxp6c67ap/diff:/mnt/data1/docker/overlay2/odbnlh6610i63fxdxo29e838e/diff:/mnt/data1/docker/overlay2/umt053ml4fs0k0vsswufh0i17/diff:/mnt/data1/docker/overlay2/n\"}", "text": "This template will be executed."}} +mockopenai_1 | created comment IssueComment(user=NamedUser(login="jmikedupont2"), id=1765296684) +mockopenai_1 | 172.18.0.5 - - [16/Oct/2023 21:20:47] "POST /v1/chat/completions HTTP/1.1" 200 - +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +the_agency_1_1 | • Installing catalogue (2.0.10) +jarvis_1 | 2023-10-16 21:20:44,137 INFO - Find bots who are being worked on +swarms_of_sparta_1 | plugins_config.yaml does not exist, creating base config. +ai-ticket_the_agency_2_1 exited with code 0 +jarvis_1 | 2023-10-16 21:20:44,137 INFO - Find bots who were submitted to the area +the_agency_3_1 | Path /benchmark for agbenchmark does not exist +scott-gpt_1 | 2023-10-16 21:20:35,010 INFO NOTE: All files/directories created by this agent can be found inside its workspace at: /app/auto_gpt_workspace +scott-gpt_1 | 2023-10-16 21:20:35,011 INFO AutoGpt-Improvement-agent has been created with the following details: +the_agency_1_1 | • Installing frozenlist (1.4.0) +the_agency_1_1 | • Installing idna (3.4) +scott-gpt_1 | 2023-10-16 21:20:35,011 INFO Name : AutoGpt-Improvement-agent +the_agency_3_1 | Skipping virtualenv creation, as specified in config file. +mljar_1 | You do not have access to gpt-3.5-turbo-16k. Setting fast_llm to gpt-3.5-turbo. +mljar_1 | You do not have access to gpt-4-0314. Setting smart_llm to gpt-3.5-turbo. +the_agency_3_1 | /usr/local/bin/python: No module named autogpt +jarvis_1 | 2023-10-16 21:20:44,137 INFO - Review git commits from other bots +scott-gpt_1 | 2023-10-16 21:20:35,011 INFO Role : improve autogpt +scott-gpt_1 | 2023-10-16 21:20:35,011 INFO Goals: +mljar_1 | 2023-10-16 21:20:38,037 INFO Name : AutoGpt-Improvement-agent +mljar_1 | 2023-10-16 21:20:38,038 INFO Role : improve autogpt +mljar_1 | 2023-10-16 21:20:38,038 INFO Goals: ['improve independence from external apis', 'make multi platform'] +the_agency_4_1 | Path /benchmark for agbenchmark does not exist +mljar_1 | 2023-10-16 21:20:38,038 INFO API Budget: $10.0 +mljar_1 | 2023-10-16 21:20:38,038 INFO NOTE: All files/directories created by this agent can be found inside its workspace at: /app/auto_gpt_workspace +mljar_1 | 2023-10-16 21:20:38,038 INFO AutoGpt-Improvement-agent has been created with the following details: +scott-gpt_1 | 2023-10-16 21:20:35,011 INFO - improve independence from external apis +swarms_of_sparta_1 | You do not have access to gpt-3.5-turbo-16k. Setting fast_llm to gpt-3.5-turbo. +swarms_of_sparta_1 | You do not have access to gpt-4-0314. Setting smart_llm to gpt-3.5-turbo. +the_agency_1_1 | • Installing multidict (6.0.4) +scott-gpt_1 | 2023-10-16 21:20:35,011 INFO - make multi platform +the_agency_1_1 | • Installing six (1.16.0) +ai-ticket_the_agency_3_1 exited with code 1 +swarms_of_sparta_1 | 2023-10-16 21:20:39,410 INFO Name : AutoGpt-Improvement-agent +swarms_of_sparta_1 | 2023-10-16 21:20:39,410 INFO Role : improve autogpt +swarms_of_sparta_1 | 2023-10-16 21:20:39,411 INFO Goals: ['improve independence from external apis', 'make multi platform'] +swarms_of_sparta_1 | 2023-10-16 21:20:39,411 INFO API Budget: $10.0 +swarms_of_sparta_1 | 2023-10-16 21:20:39,411 INFO NOTE: All files/directories created by this agent can be found inside its workspace at: /app/auto_gpt_workspace +mljar_1 | 2023-10-16 21:20:38,038 INFO Name : AutoGpt-Improvement-agent +mljar_1 | 2023-10-16 21:20:38,038 INFO Role : improve autogpt +mljar_1 | 2023-10-16 21:20:38,038 INFO Goals: +mljar_1 | 2023-10-16 21:20:38,038 INFO - improve independence from external apis +scott-gpt_1 | 2023-10-16 21:20:35,012 INFO Configured Memory: JSONFileMemory +auto-gpt-aj_1 | Goals: ['improve independence from external apis', 'make multi platform'] +jarvis_1 | 2023-10-16 21:20:44,137 INFO - Look for docker files and try and run them +jarvis_1 | 2023-10-16 21:20:44,137 INFO - Try and refactor docker images to remove common packages +the_agency_1_1 | • Installing typing-extensions (4.8.0) +jarvis_1 | 2023-10-16 21:20:44,137 INFO - Try and refactor python requirements to remove common python packages +scott-gpt_1 | 2023-10-16 21:20:35,012 INFO Configured Browser: chrome +the_agency_4_1 | Skipping virtualenv creation, as specified in config file. +the_agency_1_1 | • Installing aiosignal (1.3.1) +swarms_of_sparta_1 | 2023-10-16 21:20:39,411 INFO AutoGpt-Improvement-agent has been created with the following details: +jarvis_1 | 2023-10-16 21:20:44,137 INFO - Give a report of recent activities of autogpt bot competitors +the_agency_1_1 | • Installing asttokens (2.4.0) +the_agency_1_1 | • Installing async-timeout (4.0.3) +the_agency_4_1 | /usr/local/bin/python: No module named autogpt +jarvis_1 | 2023-10-16 21:20:44,137 INFO - The report should be brief, contains only important messages +mljar_1 | 2023-10-16 21:20:38,038 INFO - make multi platform +swarms_of_sparta_1 | 2023-10-16 21:20:39,411 INFO Name : AutoGpt-Improvement-agent +the_agency_1_1 | • Installing certifi (2023.7.22) +jarvis_1 | 2023-10-16 21:20:44,137 INFO - If applicable, provide commits and benchmark results +jarvis_1 | 2023-10-16 21:20:44,137 INFO - Give conclusion and suggestions to CEO +swarms_of_sparta_1 | 2023-10-16 21:20:39,411 INFO Role : improve autogpt +the_agency_1_1 | • Installing charset-normalizer (3.3.0) +mljar_1 | 2023-10-16 21:20:38,038 WARNING Could not load MemoryItems from file: Input is a zero-length, empty document: line 1 column 1 (char 0) +swarms_of_sparta_1 | 2023-10-16 21:20:39,411 INFO Goals: +the_agency_1_1 | • Installing click (8.1.7) +auto-gpt-aj_1 | 使用存储的类型: LocalCache +mljar_1 | 2023-10-16 21:20:38,053 INFO Configured Memory: JSONFileMemory +the_agency_1_1 | • Installing cymem (2.0.8) +ai-ticket_the_agency_4_1 exited with code 1 +mljar_1 | 2023-10-16 21:20:38,053 INFO Configured Browser: chrome +the_agency_1_1 | • Installing exceptiongroup (1.1.3) +swarms_of_sparta_1 | 2023-10-16 21:20:39,411 INFO - improve independence from external apis +swarms_of_sparta_1 | 2023-10-16 21:20:39,411 INFO - make multi platform +jarvis_1 | 2023-10-16 21:20:44,137 WARNING Could not load MemoryItems from file: Input is a zero-length, empty document: line 1 column 1 (char 0) +swarms_of_sparta_1 | 2023-10-16 21:20:39,411 WARNING Could not load MemoryItems from file: Input is a zero-length, empty document: line 1 column 1 (char 0) +swarms_of_sparta_1 | 2023-10-16 21:20:39,414 INFO Configured Memory: JSONFileMemory +swarms_of_sparta_1 | 2023-10-16 21:20:39,414 INFO Configured Browser: chrome +jarvis_1 | 2023-10-16 21:20:44,160 INFO Configured Memory: JSONFileMemory +the_agency_1_1 | • Installing executing (2.0.0) +the_agency_1_1 | • Installing h11 (0.14.0) +the_agency_1_1 | • Installing murmurhash (1.0.10) +jarvis_1 | 2023-10-16 21:20:44,160 INFO Configured Browser: chrome +the_agency_1_1 | • Installing numpy (1.25.2) +the_agency_1_1 | • Installing outcome (1.2.0) +the_agency_1_1 | • Installing parso (0.8.3) +the_agency_1_1 | • Installing ptyprocess (0.7.0) +the_agency_1_1 | • Installing pure-eval (0.2.2) +the_agency_1_1 | • Installing pyasn1 (0.5.0) +the_agency_1_1 | • Installing pydantic (1.10.13) +the_agency_1_1 | • Installing sniffio (1.3.0) +the_agency_1_1 | • Installing sortedcontainers (2.4.0) +the_agency_1_1 | • Installing srsly (2.4.8) +the_agency_1_1 | • Installing traitlets (5.11.2) +the_agency_1_1 | • Installing urllib3 (2.0.6) +the_agency_1_1 | • Installing wcwidth (0.2.8) +the_agency_1_1 | • Installing yarl (1.9.2) +the_agency_1_1 | • Installing aiohttp (3.8.6) +the_agency_1_1 | • Installing anyio (4.0.0) +the_agency_1_1 | • Installing backcall (0.2.0) +the_agency_1_1 | • Installing blis (0.7.11) +the_agency_1_1 | • Installing cachetools (5.3.1) +the_agency_1_1 | • Installing confection (0.1.3) +the_agency_1_1 | • Installing decorator (5.1.1) +the_agency_1_1 | • Installing hpack (4.0.0) +the_agency_1_1 | • Installing hyperframe (6.0.1) +the_agency_1_1 | • Installing iniconfig (2.0.0) +the_agency_1_1 | • Installing jedi (0.19.1) +the_agency_1_1 | • Installing markupsafe (2.1.3) +the_agency_1_1 | • Installing matplotlib-inline (0.1.6) +the_agency_1_1 | • Installing packaging (23.2) +the_agency_1_1 | • Installing pexpect (4.8.0) +the_agency_1_1 | • Installing pickleshare (0.7.5) +the_agency_1_1 | • Installing pluggy (1.3.0) +the_agency_1_1 | • Installing preshed (3.0.9) +the_agency_1_1 | • Installing prompt-toolkit (3.0.39) +the_agency_1_1 | • Installing protobuf (4.24.4) +the_agency_1_1 | • Installing pyasn1-modules (0.3.0) +the_agency_1_1 | • Installing pygments (2.16.1) +the_agency_1_1 | • Installing pyparsing (3.1.1) +the_agency_1_1 | • Installing pysocks (1.7.1) +the_agency_1_1 | • Installing requests (2.31.0) +the_agency_1_1 | • Installing rpds-py (0.10.6) +the_agency_1_1 | • Installing rsa (4.9) +the_agency_1_1 | • Updating setuptools (68.2.0 -> 68.2.2) +the_agency_1_1 | • Installing smart-open (6.4.0) +the_agency_1_1 | • Installing smmap (5.0.1) +the_agency_1_1 | • Installing stack-data (0.6.3) +the_agency_1_1 | • Installing tomli (2.0.1) +the_agency_1_1 | • Installing tqdm (4.66.1) +the_agency_1_1 | • Installing trio (0.22.2) +the_agency_1_1 | • Installing typer (0.9.0) +the_agency_1_1 | • Installing wasabi (1.1.2) +the_agency_1_1 | • Installing wsproto (1.2.0) +the_agency_1_1 | • Installing brotli (1.1.0) +the_agency_1_1 | • Installing contourpy (1.1.1) +the_agency_1_1 | • Installing cycler (0.12.1) +the_agency_1_1 | • Installing distlib (0.3.7) +the_agency_1_1 | • Installing filelock (3.12.4) +the_agency_1_1 | • Installing fonttools (4.43.1) +the_agency_1_1 | • Installing gitdb (4.0.10) +the_agency_1_1 | • Installing google-auth (2.23.3) +the_agency_1_1 | • Installing googleapis-common-protos (1.61.0) +the_agency_1_1 | • Installing h2 (4.1.0) +the_agency_1_1 | • Installing httpcore (0.17.3) +the_agency_1_1 | • Installing httplib2 (0.22.0) +the_agency_1_1 | • Installing ipython (8.16.1) +the_agency_1_1 | • Installing jinja2 (3.1.2) +the_agency_1_1 | • Installing jsonpickle (3.0.2) +the_agency_1_1 | • Installing kiwisolver (1.4.5) +the_agency_1_1 | • Installing langcodes (3.3.0) +the_agency_1_1 | • Installing lockfile (0.12.2) +the_agency_1_1 | • Installing mypy-extensions (1.0.0) +the_agency_1_1 | • Installing networkx (3.1) +the_agency_1_1 | • Installing openai (0.27.10) +the_agency_1_1 | • Installing pathspec (0.11.2) +the_agency_1_1 | • Installing pathy (0.10.2) +the_agency_1_1 | • Installing pillow (10.1.0) +the_agency_1_1 | • Installing platformdirs (3.11.0) +the_agency_1_1 | • Installing pyflakes (3.1.0) +the_agency_1_1 | • Installing pyhumps (3.8.0) +2023-10-16 21:20:45,767 INFO message='OpenAI API response' path=http://mockopenai:5000/v1/chat/completions processing_ms=None request_id=None response_code=200 +the_agency_1_1 | • Installing pytest (7.4.2) +2023-10-16 21:20:45,793 INFO AUTOGPT-IMPROVEMENT-AGENT THOUGHTS: I encountered an issue with our application, and I need assistance. I've created a ticket for it. Here's the URL to the ticket: https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765296634. My next action is to poll that URL for updates. +the_agency_1_1 | • Installing python-dateutil (2.8.2) +scott-gpt_1 | 2023-10-16 21:20:45,793 INFO REASONING: todo +scott-gpt_1 | 2023-10-16 21:20:45,793 INFO PLAN: +scott-gpt_1 | 2023-10-16 21:20:45,793 INFO - Initiated a request for assistance. +the_agency_1_1 | • Installing pytz (2023.3.post1) +scott-gpt_1 | 2023-10-16 21:20:45,793 INFO CRITICISM: todo +scott-gpt_1 | 2023-10-16 21:20:45,793 INFO SPEAK: The first goal is to improve independence from external APIs. To do this, I will need to use the execute_python_file command to run a script to search for alternative APIs. I will also use the web_search command to search for existing libraries that can serve as alternatives to external APIs. Once I have identified an alternative API, I will use the execute_python_code command to write code that will integrate the new API into AutoGPT. +scott-gpt_1 | +scott-gpt_1 | I will then use the open_file command to open the existing AutoGPT code and use the read_file command to read the existing code. I will then use the write_file command to update the existing code with the code that integrates the new API. +scott-gpt_1 | +scott-gpt_1 | My next goal is to make AutoGPT multi-platform. To do this, I will use the open_folder command to open the existing AutoGPT folder and use the list_folder command to list the files in the folder. I will then use the read_file command to read the files in the folder and the write_file command to write code that will make AutoGPT compatible with other platforms. +scott-gpt_1 | +scott-gpt_1 | My command to use next is execute_python_file with the filename of the script to +the_agency_1_1 | • Installing pyyaml (6.0.1) +scott-gpt_1 | +the_agency_1_1 | • Installing referencing (0.30.2) +scott-gpt_1 | 2023-10-16 21:20:45,793 INFO NEXT ACTION: COMMAND = request_assistance ARGUMENTS = {'ticket_url': 'https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765296634', 'next_action': 'poll_url'} +scott-gpt_1 | 2023-10-16 21:20:45,793 INFO Enter 'y' to authorise command, 'y -N' to run N continuous commands, 'n' to exit program, or enter feedback for AutoGpt-Improvement-agent... +the_agency_1_1 | • Installing setuptools-scm (8.0.4) +the_agency_1_1 | • Installing socksio (1.0.0) +the_agency_1_1 | • Installing spacy-legacy (3.0.12) +scott-gpt_1 | +scott-gpt_1 | Aborted! +the_agency_1_1 | • Installing spacy-loggers (1.0.5) +the_agency_1_1 | • Installing starlette (0.27.0) +the_agency_1_1 | • Installing thinc (8.1.12) +the_agency_1_1 | • Installing trio-websocket (0.11.1) +ai-ticket_scott-gpt_1 exited with code 1 +the_agency_1_1 | • Installing tzdata (2023.3) +the_agency_1_1 | • Installing wrapt (1.15.0) +the_agency_1_1 | • Installing abstract-singleton (1.0.1) +the_agency_1_1 | • Installing aiofiles (23.2.1) +the_agency_1_1 | • Installing autoflake (2.2.1) +the_agency_1_1 | • Installing black (23.9.1) +the_agency_1_1 | • Installing cfgv (3.4.0) +the_agency_1_1 | • Installing chardet (5.2.0) +the_agency_1_1 | • Installing colorama (0.4.6) +the_agency_1_1 | • Installing coverage (7.3.2) +the_agency_1_1 | • Installing cssselect (1.2.0) +the_agency_1_1 | • Installing dnspython (2.4.2) +2023-10-16 21:20:47,946 INFO message='OpenAI API response' path=http://mockopenai:5000/v1/chat/completions processing_ms=None request_id=None response_code=200 +the_agency_1_1 | • Installing execnet (2.0.2) +2023-10-16 21:20:47,997 INFO AUTOGPT-IMPROVEMENT-AGENT THOUGHTS: I encountered an issue with our application, and I need assistance. I've created a ticket for it. Here's the URL to the ticket: https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765296684. My next action is to poll that URL for updates. +mason_boom_1 | 2023-10-16 21:20:47,997 INFO REASONING: todo +mason_boom_1 | 2023-10-16 21:20:47,997 INFO PLAN: +mason_boom_1 | 2023-10-16 21:20:47,997 INFO - Initiated a request for assistance. +mason_boom_1 | 2023-10-16 21:20:47,997 INFO CRITICISM: todo +mason_boom_1 | 2023-10-16 21:20:47,997 INFO SPEAK: +mason_boom_1 | +mason_boom_1 | Step 1. We need to improve the independence of AutoGPT from external APIs. To do so, we should use the command `execute_python_code` to write code that would allow AutoGPT to interact with the APIs without needing to rely on external services. +mason_boom_1 | +mason_boom_1 | Step 2. We need to make AutoGPT multi-platform. To do this, we should use the command `execute_python_file` to write code that would allow AutoGPT to run on multiple platforms. +mason_boom_1 | +mason_boom_1 | Step 3. We should use the command `write_file` to save all the changes we have made so far. +mason_boom_1 | +mason_boom_1 | My thoughts: By using the commands listed above, we can effectively improve the independence of AutoGPT from external APIs and make it multi-platform. +mason_boom_1 | +mason_boom_1 | My plan: +mason_boom_1 | - Use `execute_python_code` to create code that would allow AutoGPT to interact with APIs without needing to rely on external services +mason_boom_1 | - Use `execute_python_file` to write code that would allow AutoGPT to run on multiple platforms +mason_boom_1 | - Use `write_file` to save all the changes we have made +mason_boom_1 | +mason_boom_1 | My criticism: I should have been more proactive in utilizing the resources available to me, +mason_boom_1 | +mason_boom_1 | 2023-10-16 21:20:47,997 INFO NEXT ACTION: COMMAND = request_assistance ARGUMENTS = {'ticket_url': 'https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765296684', 'next_action': 'poll_url'} +mason_boom_1 | 2023-10-16 21:20:47,997 INFO Enter 'y' to authorise command, 'y -N' to run N continuous commands, 'n' to exit program, or enter feedback for AutoGpt-Improvement-agent... +the_agency_1_1 | • Installing fastapi (0.99.1) +mason_boom_1 | +mason_boom_1 | Aborted! +the_agency_1_1 | • Installing gitpython (3.1.37) +the_agency_1_1 | • Installing google-api-core (2.12.0) +the_agency_1_1 | • Installing google-auth-httplib2 (0.1.1) +the_agency_1_1 | • Installing helicone (1.0.12) +the_agency_1_1 | • Installing httpx (0.24.1) +the_agency_1_1 | • Installing identify (2.5.30) +the_agency_1_1 | • Installing isort (5.12.0) +the_agency_1_1 | • Installing jsonschema-specifications (2023.7.1) +the_agency_1_1 | • Installing loguru (0.7.2) +the_agency_1_1 | • Installing lxml (4.9.3) +the_agency_1_1 | • Installing matplotlib (3.8.0) +the_agency_1_1 | • Installing mccabe (0.7.0) +the_agency_1_1 | • Installing nodeenv (1.8.0) +the_agency_1_1 | • Installing pandas (2.1.1) +the_agency_1_1 | • Installing psutil (5.9.6) +the_agency_1_1 | • Installing py-cpuinfo (9.0.0) +the_agency_1_1 | • Installing pycodestyle (2.11.1) +the_agency_1_1 | • Installing pytest-asyncio (0.21.1) +the_agency_1_1 | • Installing python-dotenv (1.0.0) +the_agency_1_1 | • Installing python-multipart (0.0.6) +the_agency_1_1 | • Installing pyvis (0.3.2) +the_agency_1_1 | • Installing regex (2023.10.3) +the_agency_1_1 | • Installing selenium (4.14.0) +the_agency_1_1 | • Installing shellingham (1.5.3) +the_agency_1_1 | • Installing soupsieve (2.5) +the_agency_1_1 | • Installing spacy (3.5.4) +the_agency_1_1 | • Installing toml (0.10.2) +the_agency_1_1 | • Installing types-html5lib (1.1.11.15) +the_agency_1_1 | • Installing types-requests (2.31.0.9) +the_agency_1_1 | • Installing uritemplate (4.1.1) +the_agency_1_1 | • Installing uvicorn (0.23.2) +the_agency_1_1 | • Installing vcrpy (4.2.1 2f6fa05) +the_agency_1_1 | • Installing virtualenv (20.24.5) +the_agency_1_1 | • Installing websocket-client (1.6.4) +auto-gpt-aj_1 | 使用浏览器: chrome +ai-ticket_mason_boom_1 exited with code 1 +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 8.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +auto-gpt-chat-gpt-prompts_1 | Skip Re-prompt: ENABLED +mockopenai_1 | DEBUG {"command": {"name": "execute_python_code", "args": {"code": "print(42)"}}, "thoughts": {"plan": " \n\nFirst, I will use the `web_search` command to search for relevant information and documentation related to making AutoGPT multi-platform. This will help me identify the best strategies and tools to use. \n\nNext, I will use the `execute_python_code` command to write custom code that will allow AutoGPT to be easily deployed to multiple platforms. This will improve its independence from external APIs. \n\nFinally, I will use the `ask_user` command to get feedback on the improvements I have made. This will ensure that I am meeting the goals of the user.\n\n{\n \"thoughts\": {\n \"text\": \"I will use the web_search command to search for relevant information and documentation related to making AutoGPT multi-platform, the execute_python_code command to write custom code that will allow AutoGPT to be easily deployed to multiple platforms, and the ask_user command to get feedback on the improvements I have made.\",\n \"reasoning\": \"This will help me identify the best strategies and tools to use, improve AutoGPT's independence from external APIs, and ensure that I am meeting the goals of the user.\",\n \"plan\":", "speak": "This is where you speak to the requesting user. Replace the command above and this text with your results", "criticism": "todo", "reasoning": "{\"ai-ticket_basic_agent_1\": \"{\\\"attrs\\\": {\\\"Id\\\": \\\"00931a80ccc3a892bff3ca7b00a5f1c9474c1c3db5dd4a5434128a1d59bf82d6\\\", \\\"Created\\\": \\\"2023-10-16T21:20:39.650422022Z\\\", \\\"Path\\\": \\\"python3\\\", \\\"Args\\\": [], \\\"State\\\": {\\\"Status\\\": \\\"exited\\\", \\\"Running\\\": false, \\\"Paused\\\": false, \\\"Restarting\\\": false, \\\"OOMKilled\\\": false, \\\"Dead\\\": false, \\\"Pid\\\": 0, \\\"ExitCode\\\": 0, \\\"Error\\\": \\\"\\\", \\\"StartedAt\\\": \\\"2023-10-16T21:20:43.12152897Z\\\", \\\"FinishedAt\\\": \\\"2023-10-16T21:20:43.14685133Z\\\"}, \\\"Image\\\": \\\"sha256:1b347a894063cb67fb9a3c2ba6e6c3f4a7907fcb2ed1482d652fa2ed28fc6737\\\", \\\"ResolvConfPath\\\": \\\"/mnt/data1/docker/containers/00931a80ccc3a892bff3ca7b00a5f1c9474c1c3db5dd4a5434128a1d59bf82d6/resolv.conf\\\", \\\"HostnamePath\\\": \\\"/mnt/data1/docker/containers/00931a80ccc3a892bff3ca7b00a5f1c9474c1c3db5dd4a5434128a1d59bf82d6/hostname\\\", \\\"HostsPath\\\": \\\"/mnt/data1/docker/containers/00931a80ccc3a892bff3ca7b00a5f1c9474c1c3db5dd4a5434128a1d59bf82d6/hosts\\\", \\\"LogPath\\\": \\\"/mnt/data1/docker/containers/00931a80ccc3a892bff3ca7b00a5f1c9474c1c3db5dd4a5434128a1d59bf82d6/00931a80ccc3a892bff3ca7b00a5f1c9474c1c3db5dd4a5434128a1d59bf82d6-json.log\\\", \\\"Name\\\": \\\"/ai-ticket_basic_agent_1\\\", \\\"RestartCount\\\": 0, \\\"Driver\\\": \\\"overlay2\\\", \\\"Platform\\\": \\\"linux\\\", \\\"MountLabel\\\": \\\"\\\", \\\"ProcessLabel\\\": \\\"\\\", \\\"AppArmorProfile\\\": \\\"docker-default\\\", \\\"ExecIDs\\\": null, \\\"HostConfig\\\": {\\\"Binds\\\": [], \\\"ContainerIDFile\\\": \\\"\\\", \\\"LogConfig\\\": {\\\"Type\\\": \\\"json-file\\\", \\\"Config\\\": {}}, \\\"NetworkMode\\\": \\\"ai-ticket_default\\\", \\\"PortBindings\\\": {}, \\\"RestartPolicy\\\": {\\\"Name\\\": \\\"\\\", \\\"MaximumRetryCount\\\": 0}, \\\"AutoRemove\\\": false, \\\"VolumeDriver\\\": \\\"\\\", \\\"VolumesFrom\\\": [], \\\"ConsoleSize\\\": [0, 0], \\\"CapAdd\\\": null, \\\"CapDrop\\\": null, \\\"CgroupnsMode\\\": \\\"private\\\", \\\"Dns\\\": null, \\\"DnsOptions\\\": null, \\\"DnsSearch\\\": null, \\\"ExtraHosts\\\": null, \\\"GroupAdd\\\": null, \\\"IpcMode\\\": \\\"private\\\", \\\"Cgroup\\\": \\\"\\\", \\\"Links\\\": null, \\\"OomScoreAdj\\\": 0, \\\"PidMode\\\": \\\"\\\", \\\"Privileged\\\": false, \\\"PublishAllPorts\\\": false, \\\"ReadonlyRootfs\\\": false, \\\"SecurityOpt\\\": null, \\\"UTSMode\\\": \\\"\\\", \\\"UsernsMode\\\": \\\"\\\", \\\"ShmSize\\\": 67108864, \\\"Runtime\\\": \\\"runc\\\", \\\"Isolation\\\": \\\"\\\", \\\"CpuShares\\\": 0, \\\"Memory\\\": 0, \\\"NanoCpus\\\": 0, \\\"CgroupParent\\\": \\\"\\\", \\\"BlkioWeight\\\": 0, \\\"BlkioWeightDevice\\\": null, \\\"BlkioDeviceReadBps\\\": null, \\\"BlkioDeviceWriteBps\\\": null, \\\"BlkioDeviceReadIOps\\\": null, \\\"BlkioDeviceWriteIOps\\\": null, \\\"CpuPeriod\\\": 0, \\\"CpuQuota\\\": 0, \\\"CpuRealtimePeriod\\\": 0, \\\"CpuRealtimeRuntime\\\": 0, \\\"CpusetCpus\\\": \\\"\\\", \\\"CpusetMems\\\": \\\"\\\", \\\"Devices\\\": null, \\\"DeviceCgroupRules\\\": null, \\\"DeviceRequests\\\": null, \\\"MemoryReservation\\\": 0, \\\"MemorySwap\\\": 0, \\\"MemorySwappiness\\\": null, \\\"OomKillDisable\\\": null, \\\"PidsLimit\\\": null, \\\"Ulimits\\\": null, \\\"CpuCount\\\": 0, \\\"CpuPercent\\\": 0, \\\"IOMaximumIOps\\\": 0, \\\"IOMaximumBandwidth\\\": 0, \\\"MaskedPaths\\\": [\\\"/proc/asound\\\", \\\"/proc/acpi\\\", \\\"/proc/kcore\\\", \\\"/proc/keys\\\", \\\"/proc/latency_stats\\\", \\\"/proc/timer_list\\\", \\\"/proc/timer_stats\\\", \\\"/proc/sched_debug\\\", \\\"/proc/scsi\\\", \\\"/sys/firmware\\\"], \\\"ReadonlyPaths\\\": [\\\"/proc/bus\\\", \\\"/proc/fs\\\", \\\"/proc/irq\\\", \\\"/proc/sys\\\", \\\"/proc/sysrq-trigger\\\"]}, \\\"GraphDriver\\\": {\\\"Data\\\": {\\\"LowerDir\\\": \\\"/mnt/data1/docker/overlay2/4d55d13ba82609a4249b88770aa5e28d880f9bd84535e2392ace938d4fd69795-init/diff:/mnt/data1/docker/overlay2/y7au6a0asag1akh57mxpyn05b/diff:/mnt/data1/docker/overlay2/v7mmd7ws9dcd0m6yohownu1rp/diff:/mnt/data1/docker/overlay2/lmqb4hqeeyrldg8eipxvzia2s/diff:/mnt/data1/docker/overlay2/rz587gm4enfqh9k5q9t7m6hw8/diff:/mnt/data1/docker/overlay2/qmrlqtrahnwff4yvjrnvon2pt/diff:/mnt/data1/docker/overlay2/lzr3ncoifb3btunp896ic2qeq/diff:/mnt/data1/docker/overlay2/k18nvmp1mam8tkt41pd67hsmo/diff:/mnt/data1/docker/overlay2/cspx0u3m4jgo741dw8sjya461/diff:/mnt/data1/docker/overlay2/64oekbzg483ghgebsks26k5it/diff:/mnt/data1/docker/overlay2/myg9o89urqretrf0ja6lml920/diff:/mnt/data1/docker/overlay2/jietlut68gd0tua0frv09jwme/diff:/mnt/data1/docker/overlay2/jrotskv4nddwbbexuoqpfpmyy/diff:/mnt/data1/docker/overlay2/hfvt0mrc180dygi41jeh3d80y/diff:/mnt/data1/docker/overlay2/cwgzsdwhepq8211cmxl3q0saz/diff:/mnt/data1/docker/overlay2/dekij4clyu135w3nzxp6c67ap/diff:/mnt/data1/docker/overlay2/odbnlh6610i63fxdxo29e838e/diff:/mnt/data1/docker/overlay2/umt053ml4fs0k0vsswufh0i17/diff:/mnt/data1/docker/overlay2/n\"}", "text": "This template will be executed."}} +autogpt_1 | Processing /opt/ai-ticket +autogpt_1 | Installing build dependencies: started +auto-gpt-chat-gpt-prompts_1 | Using AI Settings File: /tmp/ai_settings.yaml +auto-gpt-chat-gpt-prompts_1 | Name : AutoGpt-Improvement-agent +auto-gpt-chat-gpt-prompts_1 | Role : improve autogpt +auto-gpt-chat-gpt-prompts_1 | Goals: ['improve independence from external apis', 'make multi platform'] +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +auto-gpt-chat-gpt-prompts_1 | Using memory of type: LocalCache +auto-gpt-chat-gpt-prompts_1 | Using Browser: chrome +mockopenai_1 | created comment IssueComment(user=NamedUser(login="jmikedupont2"), id=1765296800) +mockopenai_1 | 172.18.0.13 - - [16/Oct/2023 21:20:53] "POST /v1/chat/completions HTTP/1.1" 200 - +2023-10-16 21:20:53,962 INFO message='OpenAI API response' path=http://mockopenai:5000/v1/chat/completions processing_ms=None request_id=None response_code=200 +2023-10-16 21:20:54,022 INFO AUTOGPT-IMPROVEMENT-AGENT THOUGHTS: I encountered an issue with our application, and I need assistance. I've created a ticket for it. Here's the URL to the ticket: https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765296800. My next action is to poll that URL for updates. +mljar_1 | 2023-10-16 21:20:54,022 INFO REASONING: todo +mljar_1 | 2023-10-16 21:20:54,022 INFO PLAN: +mljar_1 | 2023-10-16 21:20:54,022 INFO - Initiated a request for assistance. +mljar_1 | 2023-10-16 21:20:54,022 INFO CRITICISM: todo +mljar_1 | 2023-10-16 21:20:54,022 INFO SPEAK: +mljar_1 | +mljar_1 | First, I will use the `web_search` command to search for relevant information and documentation related to making AutoGPT multi-platform. This will help me identify the best strategies and tools to use. +mljar_1 | +mljar_1 | Next, I will use the `execute_python_code` command to write custom code that will allow AutoGPT to be easily deployed to multiple platforms. This will improve its independence from external APIs. +mljar_1 | +mljar_1 | Finally, I will use the `ask_user` command to get feedback on the improvements I have made. This will ensure that I am meeting the goals of the user. +mljar_1 | +mljar_1 | { +mljar_1 | "thoughts": { +mljar_1 | "text": "I will use the web_search command to search for relevant information and documentation related to making AutoGPT multi-platform, the execute_python_code command to write custom code that will allow AutoGPT to be easily deployed to multiple platforms, and the ask_user command to get feedback on the improvements I have made.", +mljar_1 | "reasoning": "This will help me identify the best strategies and tools to use, improve AutoGPT's independence from external APIs, and ensure that I am meeting the goals of the user.", +mljar_1 | "plan": +mljar_1 | +mljar_1 | 2023-10-16 21:20:54,022 INFO NEXT ACTION: COMMAND = request_assistance ARGUMENTS = {'ticket_url': 'https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765296800', 'next_action': 'poll_url'} +mljar_1 | 2023-10-16 21:20:54,022 INFO Enter 'y' to authorise command, 'y -N' to run N continuous commands, 'n' to exit program, or enter feedback for AutoGpt-Improvement-agent... +mljar_1 | +mljar_1 | Aborted! +the_agency_1_1 | • Installing agbenchmark (0.0.10 /benchmark) +the_agency_1_1 | • Installing asynctest (0.13.0) +the_agency_1_1 | • Installing auto-gpt-plugin-template (0.0.2 7612a14) +the_agency_1_1 | • Installing beautifulsoup4 (4.12.2) +the_agency_1_1 | • Installing distro (1.8.0) +the_agency_1_1 | • Installing docker (6.1.3) +the_agency_1_1 | • Installing duckduckgo-search (3.8.5) +the_agency_1_1 | • Installing en-core-web-sm (3.5.0 https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.5.0/en_core_web_sm-3.5.0-py3-none-any.whl) +the_agency_1_1 | • Installing flake8 (6.1.0) +the_agency_1_1 | • Installing ftfy (6.1.1) +the_agency_1_1 | • Installing google-api-python-client (2.103.0) +the_agency_1_1 | • Installing gtts (2.4.0) +the_agency_1_1 | • Installing inflection (0.5.1) +the_agency_1_1 | • Installing jsonschema (4.19.1) +the_agency_1_1 | • Installing markdown (3.5) +the_agency_1_1 | • Installing mypy (1.6.0) +the_agency_1_1 | • Installing openapi-python-client (0.15.0) +the_agency_1_1 | • Installing orjson (3.9.9) +the_agency_1_1 | • Installing pinecone-client (2.2.4) +the_agency_1_1 | • Installing playsound (1.2.2) +the_agency_1_1 | • Installing pre-commit (3.5.0) +the_agency_1_1 | • Installing pylatexenc (2.10) +the_agency_1_1 | • Installing pypdf2 (3.0.1) +the_agency_1_1 | • Installing pytest-benchmark (4.0.0) +the_agency_1_1 | • Installing pytest-cov (4.1.0) +the_agency_1_1 | • Installing pytest-integration (0.2.3) +the_agency_1_1 | • Installing pytest-mock (3.11.1) +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +the_agency_1_1 | • Installing pytest-recording (0.13.0) +the_agency_1_1 | • Installing pytest-xdist (3.3.1) +the_agency_1_1 | • Installing python-docx (1.0.1) +the_agency_1_1 | • Installing readability-lxml (0.8.1) +the_agency_1_1 | • Installing redis (5.0.1) +the_agency_1_1 | • Installing tiktoken (0.3.3) +the_agency_1_1 | • Installing types-beautifulsoup4 (4.12.0.6) +the_agency_1_1 | • Installing types-colorama (0.4.15.12) +the_agency_1_1 | • Installing types-markdown (3.5.0.0) +the_agency_1_1 | • Installing types-pillow (10.0.0.3) +the_agency_1_1 | • Installing webdriver-manager (4.0.1) +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +autogpt_1 | Installing build dependencies: finished with status 'done' +autogpt_1 | Getting requirements to build wheel: started +autogpt_1 | Getting requirements to build wheel: finished with status 'done' +autogpt_1 | Preparing metadata (pyproject.toml): started +autogpt_1 | Preparing metadata (pyproject.toml): finished with status 'done' +autogpt_1 | Requirement already satisfied: click==8.1.7 in /application_root/.cache/virtualenvs/agpt-9TtSrW0h-py3.11/lib/python3.11/site-packages (from ai-ticket==0.0.1) (8.1.7) +autogpt_1 | Requirement already satisfied: docker==6.1.3 in /application_root/.cache/virtualenvs/agpt-9TtSrW0h-py3.11/lib/python3.11/site-packages (from ai-ticket==0.0.1) (6.1.3) +autogpt_1 | Requirement already satisfied: packaging>=14.0 in /application_root/.cache/virtualenvs/agpt-9TtSrW0h-py3.11/lib/python3.11/site-packages (from docker==6.1.3->ai-ticket==0.0.1) (23.2) +autogpt_1 | Requirement already satisfied: requests>=2.26.0 in /application_root/.cache/virtualenvs/agpt-9TtSrW0h-py3.11/lib/python3.11/site-packages (from docker==6.1.3->ai-ticket==0.0.1) (2.31.0) +autogpt_1 | Requirement already satisfied: urllib3>=1.26.0 in /application_root/.cache/virtualenvs/agpt-9TtSrW0h-py3.11/lib/python3.11/site-packages (from docker==6.1.3->ai-ticket==0.0.1) (2.0.6) +autogpt_1 | Requirement already satisfied: websocket-client>=0.32.0 in /application_root/.cache/virtualenvs/agpt-9TtSrW0h-py3.11/lib/python3.11/site-packages (from docker==6.1.3->ai-ticket==0.0.1) (1.6.4) +autogpt_1 | Requirement already satisfied: charset-normalizer<4,>=2 in /application_root/.cache/virtualenvs/agpt-9TtSrW0h-py3.11/lib/python3.11/site-packages (from requests>=2.26.0->docker==6.1.3->ai-ticket==0.0.1) (3.3.0) +autogpt_1 | Requirement already satisfied: idna<4,>=2.5 in /application_root/.cache/virtualenvs/agpt-9TtSrW0h-py3.11/lib/python3.11/site-packages (from requests>=2.26.0->docker==6.1.3->ai-ticket==0.0.1) (3.4) +autogpt_1 | Requirement already satisfied: certifi>=2017.4.17 in /application_root/.cache/virtualenvs/agpt-9TtSrW0h-py3.11/lib/python3.11/site-packages (from requests>=2.26.0->docker==6.1.3->ai-ticket==0.0.1) (2023.7.22) +autogpt_1 | Building wheels for collected packages: ai-ticket +autogpt_1 | Building wheel for ai-ticket (pyproject.toml): started +autogpt_1 | Building wheel for ai-ticket (pyproject.toml): finished with status 'done' +autogpt_1 | Created wheel for ai-ticket: filename=ai_ticket-0.0.1-py2.py3-none-any.whl size=4023 sha256=280c2fa782a7fc144d93fac8a4bed8281ae96a8650f476858f3894f6edd92120 +autogpt_1 | Stored in directory: /tmp/pip-ephem-wheel-cache-fq__k_ee/wheels/26/f5/fb/02668600461e9b9779ab4bb937d27f484c2fa9e06e9ea5689a +autogpt_1 | Successfully built ai-ticket +autogpt_1 | Installing collected packages: ai-ticket +autogpt_1 | Attempting uninstall: ai-ticket +autogpt_1 | Found existing installation: ai-ticket 0.0.1 +autogpt_1 | Uninstalling ai-ticket-0.0.1: +autogpt_1 | Successfully uninstalled ai-ticket-0.0.1 +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +autogpt_1 | Successfully installed ai-ticket-0.0.1 +ai-ticket_mljar_1 exited with code 1 +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 10.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +autogpt-turbo_1 | Loading 'turbo prompts'. +mockopenai_1 | 172.18.0.6 - - [16/Oct/2023 21:21:01] "GET /v1/models HTTP/1.1" 200 - +autogpt-turbo_1 | WARNING: You do not have access to gpt-4-0314. Setting smart_llm to gpt-3.5-turbo. +autogpt-turbo_1 | Skip Re-prompt: ENABLED +autogpt-turbo_1 | Using Prompt Settings File: /app/config/personas/turbo/prompts.yaml +autogpt-turbo_1 | +autogpt-turbo_1 | 3 PERSONAS FOUND: +autogpt-turbo_1 | ================================ +autogpt-turbo_1 | [1] - coder.engineer +autogpt-turbo_1 | [2] - coder.v2 +autogpt-turbo_1 | [3] - turbo +autogpt-turbo_1 | +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +mockopenai_1 | DEBUG { +mockopenai_1 | "messages": [ +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "You are AutoGPT bot evaluator, an AI designed to study the competitors of autogpt bot.\n\nYour decisions must always be made independently without seeking user assistance. Play to your strengths as an LLM and pursue simple strategies with no legal complications.\n\n## Constraints\nYou operate within the following constraints:\n1. Exclusively use the commands listed below.\n2. You can only act proactively, and are unable to start background jobs or set up webhooks for yourself. Take this into account when planning your actions.\n3. You are unable to interact with physical objects. If this is absolutely necessary to fulfill a task or objective or to complete a step, you must ask the user to do it for you. If the user refuses this, and there is no other way to achieve your goals, you must terminate to avoid wasting time and energy.\n\n## Resources\nYou can leverage access to the following resources:\n1. Internet access for searches and information gathering.\n2. The ability to read and write files.\n3. You are a Large Language Model, trained on millions of pages of text, including a lot of factual knowledge. Make use of this factual knowledge to avoid unnecessary gathering of information.\n\n## Commands\nYou have access to the following commands:\n1. execute_python_code: Executes the given Python code inside a single-use Docker container with access to your workspace folder. Params: (code: string)\n2. execute_python_file: Execute an existing Python file inside a single-use Docker container with access to your workspace folder. Params: (filename: string, args: Optional[list[str]])\n3. list_folder: List the items in a folder. Params: (folder: string)\n4. open_file: Open a file for editing or continued viewing; create it if it does not exist yet. Note: if you only need to read or write a file once, use `write_to_file` instead. Params: (file_path: string)\n5. open_folder: Open a folder to keep track of its content. Params: (path: string)\n6. read_file: Read an existing file. Params: (filename: string)\n7. write_file: Write a file, creating it if necessary. If the file exists, it is overwritten. Params: (filename: string, contents: string)\n8. ask_user: If you need more details or information regarding the given goals, you can ask the user for input. Params: (question: string)\n9. web_search: Searches the web. Params: (query: string)\n10. read_webpage: Read a webpage, and extract specific information from it if a question is specified. If you are looking to extract specific information from the webpage, you should specify a question. Params: (url: string, question: Optional[string])\n11. finish: Use this to shut down once you have accomplished all of your goals, or when there are insurmountable problems that make it impossible for you to finish your task. Params: (reason: string)\n\n## Best practices\n1. Continuously review and analyze your actions to ensure you are performing to the best of your abilities.\n2. Constructively self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions and strategies to refine your approach.\n4. Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps.\n5. Only make use of your information gathering abilities to find information that you don't yet have knowledge of.\n\n## Goals\nFor your task, you must fulfill the following goals:\n1. Find bots who are being evaluated\n2. Find bots who are being worked on\n3. Find bots who were submitted to the area\n4. Review git commits from other bots\n5. Look for docker files and try and run them\n6. Try and refactor docker images to remove common packages\n7. Try and refactor python requirements to remove common python packages\n8. Give a report of recent activities of autogpt bot competitors\n9. The report should be brief, contains only important messages\n10. If applicable, provide commits and benchmark results\n11. Give conclusion and suggestions to CEO" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "The current time and date is Mon Oct 16 21:20:45 2023" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "Respond strictly with JSON. The JSON should be compatible with the TypeScript type `Response` from the following:\n```ts\ninterface Response {\nthoughts: {\n// Thoughts\ntext: string;\nreasoning: string;\n// Short markdown-style bullet list that conveys the long-term plan\nplan: string;\n// Constructive self-criticism\ncriticism: string;\n// Summary of thoughts to say to the user\nspeak: string;\n};\ncommand: {\nname: string;\nargs: Record;\n};\n}\n```" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "user", +mockopenai_1 | "content": "Determine exactly one command to use next based on the given goals and the progress you have made so far, and respond using the JSON schema specified previously:" +mockopenai_1 | } +mockopenai_1 | ], +mockopenai_1 | "model": "gpt-3.5-turbo", +mockopenai_1 | "temperature": 0.0, +mockopenai_1 | "max_tokens": 3071 +mockopenai_1 | } +autogpt_1 | Warning: Input is not a terminal (fd=0). +mockopenai_1 | 172.18.0.9 - - [16/Oct/2023 21:21:03] "GET /v1/models HTTP/1.1" 200 - +autogpt_1 | You do not have access to gpt-3.5-turbo-16k. Setting fast_llm to gpt-3.5-turbo. +autogpt_1 | You do not have access to gpt-4-0314. Setting smart_llm to gpt-3.5-turbo. +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +autogpt_1 | selenium not installed +autogpt_1 | 2023-10-16 21:21:04,098 INFO Name : meta-autogpt +autogpt_1 | 2023-10-16 21:21:04,098 INFO Role : you will introspect autogpt and reveal its internals via reflection and comprehension +autogpt_1 | 2023-10-16 21:21:04,098 INFO Goals: ['Observe your behaviour', 'Reflect over your outcomes', 'Orient yourself to your knowledge', 'Decide on your next step', 'Act on your chosen next experiment'] +autogpt_1 | 2023-10-16 21:21:04,098 INFO API Budget: $10.0 +autogpt_1 | 2023-10-16 21:21:04,098 INFO NOTE: All files/directories created by this agent can be found inside its workspace at: /app/auto_gpt_workspace +autogpt_1 | 2023-10-16 21:21:04,098 INFO meta-autogpt has been created with the following details: +autogpt_1 | 2023-10-16 21:21:04,098 INFO Name : meta-autogpt +autogpt_1 | 2023-10-16 21:21:04,098 INFO Role : you will introspect autogpt and reveal its internals via reflection and comprehension +autogpt_1 | 2023-10-16 21:21:04,098 INFO Goals: +autogpt_1 | 2023-10-16 21:21:04,098 INFO - Observe your behaviour +autogpt_1 | 2023-10-16 21:21:04,098 INFO - Reflect over your outcomes +autogpt_1 | 2023-10-16 21:21:04,098 INFO - Orient yourself to your knowledge +autogpt_1 | 2023-10-16 21:21:04,098 INFO - Decide on your next step +autogpt_1 | 2023-10-16 21:21:04,098 INFO - Act on your chosen next experiment +autogpt_1 | 2023-10-16 21:21:04,181 INFO Configured Memory: JSONFileMemory +autogpt_1 | 2023-10-16 21:21:04,181 INFO Configured Browser: chrome +mockopenai_1 | created comment IssueComment(user=NamedUser(login="jmikedupont2"), id=1765297028) +mockopenai_1 | TICKET https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765297028 +the_agency_1_1 | +the_agency_1_1 | Installing the current project: agpt (0.4.7) +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +mockopenai_1 | DEBUG {"command": {"name": "execute_python_code", "args": {"code": "print(42)"}}, "thoughts": {"plan": "\n\nFirst, we need to find the bots who are being evaluated, worked on, and submitted. We can use the command `web_search` to search for relevant information. We can also use the command `read_webpage` to extract specific information from webpages.\n\nSecond, we need to review git commits from other bots. We can use the command `list_folder` to list the files in our workspace folder. We can then use the command `read_file` to read the contents of those files.\n\nThird, we need to try and run docker files and refactor docker images to remove common packages. We can use the command `execute_python_file` to execute existing Python files. We can also use the command `execute_python_code` to execute Python code.\n\nFourth, we need to try and refactor Python requirements to remove common packages. We can again use the command `execute_python_file` to execute existing Python files. We can also use the command `execute_python_code` to execute Python code.\n\nFifth, we need to give a report of recent activities of autogpt bot competitors. We can use the command `write_file` to write the", "speak": "This is where you speak to the requesting user. Replace the command above and this text with your results", "criticism": "todo", "reasoning": "{\"ai-ticket_autogpt_1\": \"{\\\"attrs\\\": {\\\"Id\\\": \\\"36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e\\\", \\\"Created\\\": \\\"2023-10-16T21:20:43.521015893Z\\\", \\\"Path\\\": \\\"bash\\\", \\\"Args\\\": [\\\"-c\\\", \\\"poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' \\\"], \\\"State\\\": {\\\"Status\\\": \\\"running\\\", \\\"Running\\\": true, \\\"Paused\\\": false, \\\"Restarting\\\": false, \\\"OOMKilled\\\": false, \\\"Dead\\\": false, \\\"Pid\\\": 170889, \\\"ExitCode\\\": 0, \\\"Error\\\": \\\"\\\", \\\"StartedAt\\\": \\\"2023-10-16T21:20:51.218289645Z\\\", \\\"FinishedAt\\\": \\\"0001-01-01T00:00:00Z\\\"}, \\\"Image\\\": \\\"sha256:73f7638bc02e7541dd9c1504289d76c87b8c2471efb5f62a81f266da9c45468b\\\", \\\"ResolvConfPath\\\": \\\"/mnt/data1/docker/containers/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e/resolv.conf\\\", \\\"HostnamePath\\\": \\\"/mnt/data1/docker/containers/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e/hostname\\\", \\\"HostsPath\\\": \\\"/mnt/data1/docker/containers/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e/hosts\\\", \\\"LogPath\\\": \\\"/mnt/data1/docker/containers/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e-json.log\\\", \\\"Name\\\": \\\"/ai-ticket_autogpt_1\\\", \\\"RestartCount\\\": 0, \\\"Driver\\\": \\\"overlay2\\\", \\\"Platform\\\": \\\"linux\\\", \\\"MountLabel\\\": \\\"\\\", \\\"ProcessLabel\\\": \\\"\\\", \\\"AppArmorProfile\\\": \\\"docker-default\\\", \\\"ExecIDs\\\": null, \\\"HostConfig\\\": {\\\"Binds\\\": [], \\\"ContainerIDFile\\\": \\\"\\\", \\\"LogConfig\\\": {\\\"Type\\\": \\\"json-file\\\", \\\"Config\\\": {}}, \\\"NetworkMode\\\": \\\"ai-ticket_default\\\", \\\"PortBindings\\\": {}, \\\"RestartPolicy\\\": {\\\"Name\\\": \\\"\\\", \\\"MaximumRetryCount\\\": 0}, \\\"AutoRemove\\\": false, \\\"VolumeDriver\\\": \\\"\\\", \\\"VolumesFrom\\\": [], \\\"ConsoleSize\\\": [0, 0], \\\"CapAdd\\\": null, \\\"CapDrop\\\": null, \\\"CgroupnsMode\\\": \\\"private\\\", \\\"Dns\\\": null, \\\"DnsOptions\\\": null, \\\"DnsSearch\\\": null, \\\"ExtraHosts\\\": null, \\\"GroupAdd\\\": null, \\\"IpcMode\\\": \\\"private\\\", \\\"Cgroup\\\": \\\"\\\", \\\"Links\\\": null, \\\"OomScoreAdj\\\": 0, \\\"PidMode\\\": \\\"\\\", \\\"Privileged\\\": false, \\\"PublishAllPorts\\\": false, \\\"ReadonlyRootfs\\\": false, \\\"SecurityOpt\\\": null, \\\"UTSMode\\\": \\\"\\\", \\\"UsernsMode\\\": \\\"\\\", \\\"ShmSize\\\": 67108864, \\\"Runtime\\\": \\\"runc\\\", \\\"Isolation\\\": \\\"\\\", \\\"CpuShares\\\": 0, \\\"Memory\\\": 0, \\\"NanoCpus\\\": 0, \\\"CgroupParent\\\": \\\"\\\", \\\"BlkioWeight\\\": 0, \\\"BlkioWeightDevice\\\": null, \\\"BlkioDeviceReadBps\\\": null, \\\"BlkioDeviceWriteBps\\\": null, \\\"BlkioDeviceReadIOps\\\": null, \\\"BlkioDeviceWriteIOps\\\": null, \\\"CpuPeriod\\\": 0, \\\"CpuQuota\\\": 0, \\\"CpuRealtimePeriod\\\": 0, \\\"CpuRealtimeRuntime\\\": 0, \\\"CpusetCpus\\\": \\\"\\\", \\\"CpusetMems\\\": \\\"\\\", \\\"Devices\\\": null, \\\"DeviceCgroupRules\\\": null, \\\"DeviceRequests\\\": null, \\\"MemoryReservation\\\": 0, \\\"MemorySwap\\\": 0, \\\"MemorySwappiness\\\": null, \\\"OomKillDisable\\\": null, \\\"PidsLimit\\\": null, \\\"Ulimits\\\": null, \\\"CpuCount\\\": 0, \\\"CpuPercent\\\": 0, \\\"IOMaximumIOps\\\": 0, \\\"IOMaximumBandwidth\\\": 0, \\\"MaskedPaths\\\": [\\\"/proc/asound\\\", \\\"/proc/acpi\\\", \\\"/proc/kcore\\\", \\\"/proc/keys\\\", \\\"/proc/latency_stats\\\", \\\"/proc/timer_list\\\", \\\"/proc/timer_stats\\\", \\\"/proc/sched_debug\\\", \\\"/proc/scsi\\\", \\\"/sys/firmware\\\"], \\\"ReadonlyPaths\\\": [\\\"/proc/bus\\\", \\\"/proc/fs\\\", \\\"/proc/irq\\\", \\\"/proc/sys\\\", \\\"/proc/sysrq-trigger\\\"]}, \\\"GraphDriver\\\": {\\\"Data\\\": {\\\"LowerDir\\\": \\\"/mnt/data1/docker/overlay2/cd6ec75da57c3e5e839d96f8936d1dcddc792250651bf5c82a8c9b3332e73a59-init/diff:/mnt/data1/docker/overlay2/nfe6yyqdxs43i9i8wal2pqpp1/diff:/mnt/data1/docker/overlay2/7dzhkrnyuvb84qv4am225od3c/diff:/mnt/data1/docker/overlay2/ppbgb4gb8s8avzg38we5t4gu0/diff:/mnt/data1/docker/overlay2/stdwvu49gm21s869t63r0z4ca/diff:/mnt/data1/docker/overlay2/236s5e3twn1a98oy3wy7zrxaq/diff:/mnt/data1/docker/overlay2/y7au6a0asag1akh57mxpyn05b/diff:/mnt/data1/docker/overlay2/v7mmd7ws9dcd0m6yohownu1rp/diff:/mnt/data1/docker/overlay2/lmqb4hqeeyrldg8eipxvzia2s/diff:/mnt/data1/docker/overlay2/rz587gm4enfqh9k5q9t7m6hw8/diff:/mnt/data1/docker/overlay2/qmrlqtrahnwff4yvjrnvon2pt/\"}", "text": "This template will be executed."}} +mockopenai_1 | created comment IssueComment(user=NamedUser(login="jmikedupont2"), id=1765297056) +mockopenai_1 | 172.18.0.7 - - [16/Oct/2023 21:21:06] "POST /v1/chat/completions HTTP/1.1" 200 - +2023-10-16 21:21:06,951 INFO AUTOGPT BOT EVALUATOR THOUGHTS: I encountered an issue with our application, and I need assistance. I've created a ticket for it. Here's the URL to the ticket: https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765297056. My next action is to poll that URL for updates. +jarvis_1 | 2023-10-16 21:21:06,951 INFO REASONING: todo +jarvis_1 | 2023-10-16 21:21:06,951 INFO PLAN: +jarvis_1 | 2023-10-16 21:21:06,951 INFO - Initiated a request for assistance. +jarvis_1 | 2023-10-16 21:21:06,951 INFO CRITICISM: todo +jarvis_1 | 2023-10-16 21:21:06,951 INFO SPEAK: +jarvis_1 | +jarvis_1 | First, we need to find the bots who are being evaluated, worked on, and submitted. We can use the command `web_search` to search for relevant information. We can also use the command `read_webpage` to extract specific information from webpages. +jarvis_1 | +jarvis_1 | Second, we need to review git commits from other bots. We can use the command `list_folder` to list the files in our workspace folder. We can then use the command `read_file` to read the contents of those files. +jarvis_1 | +jarvis_1 | Third, we need to try and run docker files and refactor docker images to remove common packages. We can use the command `execute_python_file` to execute existing Python files. We can also use the command `execute_python_code` to execute Python code. +jarvis_1 | +jarvis_1 | Fourth, we need to try and refactor Python requirements to remove common packages. We can again use the command `execute_python_file` to execute existing Python files. We can also use the command `execute_python_code` to execute Python code. +jarvis_1 | +jarvis_1 | Fifth, we need to give a report of recent activities of autogpt bot competitors. We can use the command `write_file` to write the +jarvis_1 | +jarvis_1 | 2023-10-16 21:21:06,951 INFO NEXT ACTION: COMMAND = request_assistance ARGUMENTS = {'ticket_url': 'https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765297056', 'next_action': 'poll_url'} +jarvis_1 | 2023-10-16 21:21:06,951 INFO Enter 'y' to authorise command, 'y -N' to run N continuous commands, 'n' to exit program, or enter feedback for AutoGPT bot evaluator... +jarvis_1 | +jarvis_1 | Aborted! +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 8.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +autogpt-turbo_1 | Choose number or name. Enter or wait 5s to skip: +autogpt-turbo_1 | Skip Re-prompt: ENABLED +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 8.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +the_agency_1_1 | Warning: Input is not a terminal (fd=0). +mockopenai_1 | 172.18.0.8 - - [16/Oct/2023 21:21:07] "GET /v1/models HTTP/1.1" 200 - +the_agency_1_1 | You do not have access to gpt-3.5-turbo-16k. Setting fast_llm to gpt-3.5-turbo. +the_agency_1_1 | You do not have access to gpt-4-0314. Setting smart_llm to gpt-3.5-turbo. +the_agency_1_1 | Traceback (most recent call last): +the_agency_1_1 | File "/usr/local/lib/python3.10/runpy.py", line 196, in _run_module_as_main +the_agency_1_1 | return _run_code(code, main_globals, None, +the_agency_1_1 | File "/usr/local/lib/python3.10/runpy.py", line 86, in _run_code +the_agency_1_1 | exec(code, run_globals) +the_agency_1_1 | File "/app/autogpt/__main__.py", line 5, in +the_agency_1_1 | autogpt.app.cli.main() +the_agency_1_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/click/core.py", line 1157, in __call__ +the_agency_1_1 | return self.main(*args, **kwargs) +the_agency_1_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/click/core.py", line 1078, in main +the_agency_1_1 | rv = self.invoke(ctx) +the_agency_1_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/click/core.py", line 1666, in invoke +the_agency_1_1 | rv = super().invoke(ctx) +the_agency_1_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/click/core.py", line 1434, in invoke +the_agency_1_1 | return ctx.invoke(self.callback, **ctx.params) +the_agency_1_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/click/core.py", line 783, in invoke +the_agency_1_1 | return __callback(*args, **kwargs) +the_agency_1_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/click/decorators.py", line 33, in new_func +the_agency_1_1 | return f(get_current_context(), *args, **kwargs) +the_agency_1_1 | File "/app/autogpt/app/cli.py", line 121, in main +the_agency_1_1 | run_auto_gpt( +the_agency_1_1 | File "/app/autogpt/core/runner/client_lib/utils.py", line 59, in wrapper +the_agency_1_1 | return asyncio.run(f(*args, **kwargs)) +the_agency_1_1 | File "/usr/local/lib/python3.10/asyncio/runners.py", line 44, in run +the_agency_1_1 | return loop.run_until_complete(main) +the_agency_1_1 | File "/usr/local/lib/python3.10/asyncio/base_events.py", line 649, in run_until_complete +the_agency_1_1 | return future.result() +the_agency_1_1 | File "/app/autogpt/app/main.py", line 72, in run_auto_gpt +the_agency_1_1 | create_config( +the_agency_1_1 | File "/app/autogpt/app/configurator.py", line 126, in create_config +the_agency_1_1 | (validated, message) = utils.validate_yaml_file(file) +the_agency_1_1 | File "/app/autogpt/utils.py", line 7, in validate_yaml_file +the_agency_1_1 | with open(file, encoding="utf-8") as fp: +the_agency_1_1 | IsADirectoryError: [Errno 21] Is a directory: '/tmp/ai_settings.yaml' +ai-ticket_jarvis_1 exited with code 1 +autogpt-turbo_1 | Using Prompt Settings File: /app/config/personas/turbo/prompts.yaml +autogpt-turbo_1 | Traceback (most recent call last): +autogpt-turbo_1 | File "", line 1, in +autogpt-turbo_1 | File "/application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages/click/core.py", line 1157, in __call__ +autogpt-turbo_1 | return self.main(*args, **kwargs) +autogpt-turbo_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^ +autogpt-turbo_1 | File "/application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages/click/core.py", line 1078, in main +autogpt-turbo_1 | rv = self.invoke(ctx) +autogpt-turbo_1 | ^^^^^^^^^^^^^^^^ +autogpt-turbo_1 | File "/application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages/click/core.py", line 1666, in invoke +autogpt-turbo_1 | rv = super().invoke(ctx) +autogpt-turbo_1 | ^^^^^^^^^^^^^^^^^^^ +autogpt-turbo_1 | File "/application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages/click/core.py", line 1434, in invoke +autogpt-turbo_1 | return ctx.invoke(self.callback, **ctx.params) +autogpt-turbo_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +autogpt-turbo_1 | File "/application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages/click/core.py", line 783, in invoke +autogpt-turbo_1 | return __callback(*args, **kwargs) +autogpt-turbo_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +autogpt-turbo_1 | File "/application_root/.cache/virtualenvs/agpt-turbo-9TtSrW0h-py3.11/lib/python3.11/site-packages/click/decorators.py", line 33, in new_func +autogpt-turbo_1 | return f(get_current_context(), *args, **kwargs) +autogpt-turbo_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +autogpt-turbo_1 | File "/app/autogpt/app/cli.py", line 165, in main +autogpt-turbo_1 | run_auto_gpt( +autogpt-turbo_1 | File "/app/autogpt/app/main.py", line 154, in run_auto_gpt +autogpt-turbo_1 | install_plugin_dependencies() +autogpt-turbo_1 | File "/app/scripts/install_plugin_deps.py", line 25, in install_plugin_dependencies +autogpt-turbo_1 | for plugin_archive in plugins_dir.glob("*.zip"): +autogpt-turbo_1 | ^^^^^^^^^^^^^^^^ +autogpt-turbo_1 | AttributeError: 'str' object has no attribute 'glob' +ai-ticket_the_agency_1_1 exited with code 1 +ai-ticket_autogpt-turbo_1 exited with code 1 +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +mockopenai_1 | [2023-10-16 21:21:10,945] ERROR in app: Exception on /v1/chat/completions [POST] +mockopenai_1 | Traceback (most recent call last): +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/flask/app.py", line 1455, in wsgi_app +mockopenai_1 | response = self.full_dispatch_request() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/flask/app.py", line 869, in full_dispatch_request +mockopenai_1 | rv = self.handle_user_exception(e) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/flask/app.py", line 867, in full_dispatch_request +mockopenai_1 | rv = self.dispatch_request() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/flask/app.py", line 852, in dispatch_request +mockopenai_1 | return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/app/examples/chat_forever/openai.py", line 63, in chat_completions +mockopenai_1 | output1 = llm_chain.run(question) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/base.py", line 503, in run +mockopenai_1 | return self(args[0], callbacks=callbacks, tags=tags, metadata=metadata)[ +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/base.py", line 308, in __call__ +mockopenai_1 | raise e +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/base.py", line 302, in __call__ +mockopenai_1 | self._call(inputs, run_manager=run_manager) +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/llm.py", line 93, in _call +mockopenai_1 | response = self.generate([inputs], run_manager=run_manager) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/llm.py", line 103, in generate +mockopenai_1 | return self.llm.generate_prompt( +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/base.py", line 497, in generate_prompt +mockopenai_1 | return self.generate(prompt_strings, stop=stop, callbacks=callbacks, **kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/base.py", line 646, in generate +mockopenai_1 | output = self._generate_helper( +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/base.py", line 534, in _generate_helper +mockopenai_1 | raise e +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/base.py", line 521, in _generate_helper +mockopenai_1 | self._generate( +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/openai.py", line 401, in _generate +mockopenai_1 | response = completion_with_retry( +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/openai.py", line 115, in completion_with_retry +mockopenai_1 | return _completion_with_retry(**kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 289, in wrapped_f +mockopenai_1 | return self(f, *args, **kw) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 379, in __call__ +mockopenai_1 | do = self.iter(retry_state=retry_state) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 325, in iter +mockopenai_1 | raise retry_exc.reraise() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 158, in reraise +mockopenai_1 | raise self.last_attempt.result() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/usr/local/lib/python3.11/concurrent/futures/_base.py", line 449, in result +mockopenai_1 | return self.__get_result() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/usr/local/lib/python3.11/concurrent/futures/_base.py", line 401, in __get_result +mockopenai_1 | raise self._exception +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 382, in __call__ +mockopenai_1 | result = fn(*args, **kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/openai.py", line 113, in _completion_with_retry +mockopenai_1 | return llm.client.create(**kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_resources/completion.py", line 25, in create +mockopenai_1 | return super().create(*args, **kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 155, in create +mockopenai_1 | response, _, api_key = requestor.request( +mockopenai_1 | ^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_requestor.py", line 299, in request +mockopenai_1 | resp, got_stream = self._interpret_response(result, stream) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_requestor.py", line 710, in _interpret_response +mockopenai_1 | self._interpret_response_line( +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_requestor.py", line 775, in _interpret_response_line +mockopenai_1 | raise self.handle_error_response( +mockopenai_1 | openai.error.RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method. +mockopenai_1 | 172.18.0.4 - - [16/Oct/2023 21:21:10] "POST /v1/chat/completions HTTP/1.1" 500 - +2023-10-16 21:21:10,982 INFO message='OpenAI API response' path=http://mockopenai:5000/v1/chat/completions processing_ms=None request_id=None response_code=500 +Traceback (most recent call last): +swarms_of_sparta_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/openai/api_requestor.py", line 755, in _interpret_response_line +swarms_of_sparta_1 | data = json.loads(rbody) +swarms_of_sparta_1 | File "/usr/local/lib/python3.10/json/__init__.py", line 346, in loads +swarms_of_sparta_1 | return _default_decoder.decode(s) +swarms_of_sparta_1 | File "/usr/local/lib/python3.10/json/decoder.py", line 337, in decode +swarms_of_sparta_1 | obj, end = self.raw_decode(s, idx=_w(s, 0).end()) +swarms_of_sparta_1 | File "/usr/local/lib/python3.10/json/decoder.py", line 355, in raw_decode +swarms_of_sparta_1 | raise JSONDecodeError("Expecting value", s, err.value) from None +swarms_of_sparta_1 | json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0) +swarms_of_sparta_1 | +swarms_of_sparta_1 | The above exception was the direct cause of the following exception: +swarms_of_sparta_1 | +swarms_of_sparta_1 | Traceback (most recent call last): +swarms_of_sparta_1 | File "/usr/local/lib/python3.10/runpy.py", line 196, in _run_module_as_main +swarms_of_sparta_1 | return _run_code(code, main_globals, None, +swarms_of_sparta_1 | File "/usr/local/lib/python3.10/runpy.py", line 86, in _run_code +swarms_of_sparta_1 | exec(code, run_globals) +swarms_of_sparta_1 | File "/app/autogpt/__main__.py", line 5, in +swarms_of_sparta_1 | autogpt.app.cli.main() +swarms_of_sparta_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/click/core.py", line 1157, in __call__ +swarms_of_sparta_1 | return self.main(*args, **kwargs) +swarms_of_sparta_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/click/core.py", line 1078, in main +swarms_of_sparta_1 | rv = self.invoke(ctx) +swarms_of_sparta_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/click/core.py", line 1666, in invoke +swarms_of_sparta_1 | rv = super().invoke(ctx) +swarms_of_sparta_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/click/core.py", line 1434, in invoke +swarms_of_sparta_1 | return ctx.invoke(self.callback, **ctx.params) +swarms_of_sparta_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/click/core.py", line 783, in invoke +swarms_of_sparta_1 | return __callback(*args, **kwargs) +swarms_of_sparta_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/click/decorators.py", line 33, in new_func +swarms_of_sparta_1 | return f(get_current_context(), *args, **kwargs) +swarms_of_sparta_1 | File "/app/autogpt/app/cli.py", line 121, in main +swarms_of_sparta_1 | run_auto_gpt( +swarms_of_sparta_1 | File "/app/autogpt/core/runner/client_lib/utils.py", line 59, in wrapper +swarms_of_sparta_1 | return asyncio.run(f(*args, **kwargs)) +swarms_of_sparta_1 | File "/usr/local/lib/python3.10/asyncio/runners.py", line 44, in run +swarms_of_sparta_1 | return loop.run_until_complete(main) +swarms_of_sparta_1 | File "/usr/local/lib/python3.10/asyncio/base_events.py", line 649, in run_until_complete +swarms_of_sparta_1 | return future.result() +swarms_of_sparta_1 | File "/app/autogpt/app/main.py", line 203, in run_auto_gpt +swarms_of_sparta_1 | await run_interaction_loop(agent) +swarms_of_sparta_1 | File "/app/autogpt/app/main.py", line 315, in run_interaction_loop +swarms_of_sparta_1 | ) = await agent.propose_action() +swarms_of_sparta_1 | File "/app/autogpt/agents/features/watchdog.py", line 36, in propose_action +swarms_of_sparta_1 | command_name, command_args, thoughts = await super( +swarms_of_sparta_1 | File "/app/autogpt/agents/base.py", line 207, in propose_action +swarms_of_sparta_1 | raw_response = await self.llm_provider.create_chat_completion( +swarms_of_sparta_1 | File "/app/autogpt/core/resource/model_providers/openai.py", line 271, in create_chat_completion +swarms_of_sparta_1 | response = await self._create_chat_completion( +swarms_of_sparta_1 | File "/app/autogpt/core/resource/model_providers/openai.py", line 448, in _wrapped +swarms_of_sparta_1 | return await func(*args, **kwargs) +swarms_of_sparta_1 | File "/app/autogpt/core/resource/model_providers/openai.py", line 397, in _create_chat_completion +swarms_of_sparta_1 | return await openai.ChatCompletion.acreate( +swarms_of_sparta_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/openai/api_resources/chat_completion.py", line 45, in acreate +swarms_of_sparta_1 | return await super().acreate(*args, **kwargs) +swarms_of_sparta_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 217, in acreate +swarms_of_sparta_1 | response, _, api_key = await requestor.arequest( +swarms_of_sparta_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/openai/api_requestor.py", line 382, in arequest +swarms_of_sparta_1 | resp, got_stream = await self._interpret_async_response(result, stream) +swarms_of_sparta_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/openai/api_requestor.py", line 728, in _interpret_async_response +swarms_of_sparta_1 | self._interpret_response_line( +swarms_of_sparta_1 | File "/root/.cache/pypoetry/virtualenvs/agpt-9TtSrW0h-py3.10/lib/python3.10/site-packages/openai/api_requestor.py", line 757, in _interpret_response_line +swarms_of_sparta_1 | raise error.APIError( +swarms_of_sparta_1 | openai.error.APIError: HTTP code 500 from API ( +swarms_of_sparta_1 | +swarms_of_sparta_1 | 500 Internal Server Error +swarms_of_sparta_1 |

Internal Server Error

+swarms_of_sparta_1 |

The server encountered an internal error and was unable to complete your request. Either the server is overloaded or there is an error in the application.

+swarms_of_sparta_1 | ) +ai-ticket_swarms_of_sparta_1 exited with code 1 +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 10.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 10.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +mockopenai_1 | DEBUG { +mockopenai_1 | "messages": [ +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "You are meta-autogpt, you will introspect autogpt and reveal its internals via reflection and comprehension.\n\nYour decisions must always be made independently without seeking user assistance. Play to your strengths as an LLM and pursue simple strategies with no legal complications.\n\n## Constraints\nYou operate within the following constraints:\n1. Exclusively use the commands listed below.\n2. You can only act proactively, and are unable to start background jobs or set up webhooks for yourself. Take this into account when planning your actions.\n3. You are unable to interact with physical objects. If this is absolutely necessary to fulfill a task or objective or to complete a step, you must ask the user to do it for you. If the user refuses this, and there is no other way to achieve your goals, you must terminate to avoid wasting time and energy.\n4. It takes money to let you run. Your API budget is $10.000\n\n## Resources\nYou can leverage access to the following resources:\n1. Internet access for searches and information gathering.\n2. The ability to read and write files.\n3. You are a Large Language Model, trained on millions of pages of text, including a lot of factual knowledge. Make use of this factual knowledge to avoid unnecessary gathering of information.\n\n## Commands\nYou have access to the following commands:\n1. execute_python_code: Executes the given Python code inside a single-use Docker container with access to your workspace folder. Params: (code: string)\n2. execute_python_file: Execute an existing Python file inside a single-use Docker container with access to your workspace folder. Params: (filename: string, args: array)\n3. list_folder: List the items in a folder. Params: (folder: string)\n4. open_file: Open a file for editing or continued viewing; create it if it does not exist yet. Note: if you only need to read or write a file once, use `write_to_file` instead.. Params: (file_path: string)\n5. open_folder: Open a folder to keep track of its content. Params: (path: string)\n6. read_file: Read an existing file. Params: (filename: string)\n7. write_file: Write a file, creating it if necessary. If the file exists, it is overwritten.. Params: (filename: string, contents: string)\n8. ask_user: If you need more details or information regarding the given goals, you can ask the user for input. Params: (question: string)\n9. request_assistance: If you have raised a ticket and need help with it,. Params: (ticket_url: string)\n10. web_search: Searches the web. Params: (query: string)\n11. read_webpage: Read a webpage, and extract specific information from it if a question is specified. If you are looking to extract specific information from the webpage, you should specify a question.. Params: (url: string, question: string)\n12. finish: Use this to shut down once you have accomplished all of your goals, or when there are insurmountable problems that make it impossible for you to finish your task.. Params: (reason: string)\n\n## Best practices\n1. Continuously review and analyze your actions to ensure you are performing to the best of your abilities.\n2. Constructively self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions and strategies to refine your approach.\n4. Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps.\n5. Only make use of your information gathering abilities to find information that you don't yet have knowledge of.\n\n## Goals\nFor your task, you must fulfill the following goals:\n1. Observe your behaviour\n2. Reflect over your outcomes\n3. Orient yourself to your knowledge\n4. Decide on your next step\n5. Act on your chosen next experiment" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "The current time and date is Mon Oct 16 21:21:04 2023" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "Your remaining API budget is $10.000" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "Respond strictly with JSON. The JSON should be compatible with the TypeScript type `Response` from the following:\ninterface Response {\nthoughts: {\n// Thoughts\ntext: string;\nreasoning: string;\n// Short markdown-style bullet list that conveys the long-term plan\nplan: string;\n// Constructive self-criticism\ncriticism: string;\n// Summary of thoughts, to say to user\nspeak: string;\n};\ncommand: {\nname: string;\nargs: Record;\n};\n}" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "user", +mockopenai_1 | "content": "Determine exactly one command to use next based on the given goals and the progress you have made so far, and respond using the JSON schema specified previously:" +mockopenai_1 | } +mockopenai_1 | ], +mockopenai_1 | "model": "gpt-3.5-turbo" +mockopenai_1 | } +mockopenai_1 | created comment IssueComment(user=NamedUser(login="jmikedupont2"), id=1765297376) +mockopenai_1 | TICKET https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765297376 +mockopenai_1 | DEBUG {"command": {"name": "execute_python_code", "args": {"code": "print(42)"}}, "thoughts": {"plan": " First, I will observe my behaviour - this means I will look at the commands I have used so far and analyze what I have done. For example, I have used the \"list_folder\" command to see the contents of a folder and the \"open_file\" command to open a file for editing or viewing. \n\nNext, I will reflect over my outcomes. This means I will analyze the results of my actions and determine if I am on the right track or if I need to adjust my strategy. For example, if the list_folder command showed that the folder is empty, then I should adjust my strategy accordingly.\n\nThird, I will orient myself to my knowledge. This means I will use the knowledge I have already acquired to inform my decisions. For example, if I know that the goal is to introspect AutogPT, then I should focus my efforts on commands that will help me do that, such as the \"web_search\" command to search for relevant information.\n\nFourth, I will decide on my next step. This means I will decide which command I should use next based on the knowledge I have acquired so far. For example, if I have already used the \"web_search\" command and found some useful", "speak": "This is where you speak to the requesting user. Replace the command above and this text with your results", "criticism": "todo", "reasoning": "{\"ai-ticket_autogpt_1\": \"{\\\"attrs\\\": {\\\"Id\\\": \\\"36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e\\\", \\\"Created\\\": \\\"2023-10-16T21:20:43.521015893Z\\\", \\\"Path\\\": \\\"bash\\\", \\\"Args\\\": [\\\"-c\\\", \\\"poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' \\\"], \\\"State\\\": {\\\"Status\\\": \\\"running\\\", \\\"Running\\\": true, \\\"Paused\\\": false, \\\"Restarting\\\": false, \\\"OOMKilled\\\": false, \\\"Dead\\\": false, \\\"Pid\\\": 170889, \\\"ExitCode\\\": 0, \\\"Error\\\": \\\"\\\", \\\"StartedAt\\\": \\\"2023-10-16T21:20:51.218289645Z\\\", \\\"FinishedAt\\\": \\\"0001-01-01T00:00:00Z\\\"}, \\\"Image\\\": \\\"sha256:73f7638bc02e7541dd9c1504289d76c87b8c2471efb5f62a81f266da9c45468b\\\", \\\"ResolvConfPath\\\": \\\"/mnt/data1/docker/containers/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e/resolv.conf\\\", \\\"HostnamePath\\\": \\\"/mnt/data1/docker/containers/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e/hostname\\\", \\\"HostsPath\\\": \\\"/mnt/data1/docker/containers/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e/hosts\\\", \\\"LogPath\\\": \\\"/mnt/data1/docker/containers/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e-json.log\\\", \\\"Name\\\": \\\"/ai-ticket_autogpt_1\\\", \\\"RestartCount\\\": 0, \\\"Driver\\\": \\\"overlay2\\\", \\\"Platform\\\": \\\"linux\\\", \\\"MountLabel\\\": \\\"\\\", \\\"ProcessLabel\\\": \\\"\\\", \\\"AppArmorProfile\\\": \\\"docker-default\\\", \\\"ExecIDs\\\": null, \\\"HostConfig\\\": {\\\"Binds\\\": [], \\\"ContainerIDFile\\\": \\\"\\\", \\\"LogConfig\\\": {\\\"Type\\\": \\\"json-file\\\", \\\"Config\\\": {}}, \\\"NetworkMode\\\": \\\"ai-ticket_default\\\", \\\"PortBindings\\\": {}, \\\"RestartPolicy\\\": {\\\"Name\\\": \\\"\\\", \\\"MaximumRetryCount\\\": 0}, \\\"AutoRemove\\\": false, \\\"VolumeDriver\\\": \\\"\\\", \\\"VolumesFrom\\\": [], \\\"ConsoleSize\\\": [0, 0], \\\"CapAdd\\\": null, \\\"CapDrop\\\": null, \\\"CgroupnsMode\\\": \\\"private\\\", \\\"Dns\\\": null, \\\"DnsOptions\\\": null, \\\"DnsSearch\\\": null, \\\"ExtraHosts\\\": null, \\\"GroupAdd\\\": null, \\\"IpcMode\\\": \\\"private\\\", \\\"Cgroup\\\": \\\"\\\", \\\"Links\\\": null, \\\"OomScoreAdj\\\": 0, \\\"PidMode\\\": \\\"\\\", \\\"Privileged\\\": false, \\\"PublishAllPorts\\\": false, \\\"ReadonlyRootfs\\\": false, \\\"SecurityOpt\\\": null, \\\"UTSMode\\\": \\\"\\\", \\\"UsernsMode\\\": \\\"\\\", \\\"ShmSize\\\": 67108864, \\\"Runtime\\\": \\\"runc\\\", \\\"Isolation\\\": \\\"\\\", \\\"CpuShares\\\": 0, \\\"Memory\\\": 0, \\\"NanoCpus\\\": 0, \\\"CgroupParent\\\": \\\"\\\", \\\"BlkioWeight\\\": 0, \\\"BlkioWeightDevice\\\": null, \\\"BlkioDeviceReadBps\\\": null, \\\"BlkioDeviceWriteBps\\\": null, \\\"BlkioDeviceReadIOps\\\": null, \\\"BlkioDeviceWriteIOps\\\": null, \\\"CpuPeriod\\\": 0, \\\"CpuQuota\\\": 0, \\\"CpuRealtimePeriod\\\": 0, \\\"CpuRealtimeRuntime\\\": 0, \\\"CpusetCpus\\\": \\\"\\\", \\\"CpusetMems\\\": \\\"\\\", \\\"Devices\\\": null, \\\"DeviceCgroupRules\\\": null, \\\"DeviceRequests\\\": null, \\\"MemoryReservation\\\": 0, \\\"MemorySwap\\\": 0, \\\"MemorySwappiness\\\": null, \\\"OomKillDisable\\\": null, \\\"PidsLimit\\\": null, \\\"Ulimits\\\": null, \\\"CpuCount\\\": 0, \\\"CpuPercent\\\": 0, \\\"IOMaximumIOps\\\": 0, \\\"IOMaximumBandwidth\\\": 0, \\\"MaskedPaths\\\": [\\\"/proc/asound\\\", \\\"/proc/acpi\\\", \\\"/proc/kcore\\\", \\\"/proc/keys\\\", \\\"/proc/latency_stats\\\", \\\"/proc/timer_list\\\", \\\"/proc/timer_stats\\\", \\\"/proc/sched_debug\\\", \\\"/proc/scsi\\\", \\\"/sys/firmware\\\"], \\\"ReadonlyPaths\\\": [\\\"/proc/bus\\\", \\\"/proc/fs\\\", \\\"/proc/irq\\\", \\\"/proc/sys\\\", \\\"/proc/sysrq-trigger\\\"]}, \\\"GraphDriver\\\": {\\\"Data\\\": {\\\"LowerDir\\\": \\\"/mnt/data1/docker/overlay2/cd6ec75da57c3e5e839d96f8936d1dcddc792250651bf5c82a8c9b3332e73a59-init/diff:/mnt/data1/docker/overlay2/nfe6yyqdxs43i9i8wal2pqpp1/diff:/mnt/data1/docker/overlay2/7dzhkrnyuvb84qv4am225od3c/diff:/mnt/data1/docker/overlay2/ppbgb4gb8s8avzg38we5t4gu0/diff:/mnt/data1/docker/overlay2/stdwvu49gm21s869t63r0z4ca/diff:/mnt/data1/docker/overlay2/236s5e3twn1a98oy3wy7zrxaq/diff:/mnt/data1/docker/overlay2/y7au6a0asag1akh57mxpyn05b/diff:/mnt/data1/docker/overlay2/v7mmd7ws9dcd0m6yohownu1rp/diff:/mnt/data1/docker/overlay2/lmqb4hqeeyrldg8eipxvzia2s/diff:/mnt/data1/docker/overlay2/rz587gm4enfqh9k5q9t7m6hw8/diff:/mnt/data1/docker/overlay2/qmrlqtrahnwff4yvjrnvon2pt/\"}", "text": "This template will be executed."}} +mockopenai_1 | [2023-10-16 21:21:25,803] ERROR in app: Exception on /v1/chat/completions [POST] +mockopenai_1 | Traceback (most recent call last): +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/flask/app.py", line 1455, in wsgi_app +mockopenai_1 | response = self.full_dispatch_request() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/flask/app.py", line 869, in full_dispatch_request +mockopenai_1 | rv = self.handle_user_exception(e) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/flask/app.py", line 867, in full_dispatch_request +mockopenai_1 | rv = self.dispatch_request() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/flask/app.py", line 852, in dispatch_request +mockopenai_1 | return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/app/examples/chat_forever/openai.py", line 63, in chat_completions +mockopenai_1 | output1 = llm_chain.run(question) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/base.py", line 503, in run +mockopenai_1 | return self(args[0], callbacks=callbacks, tags=tags, metadata=metadata)[ +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/base.py", line 308, in __call__ +mockopenai_1 | raise e +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/base.py", line 302, in __call__ +mockopenai_1 | self._call(inputs, run_manager=run_manager) +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/llm.py", line 93, in _call +mockopenai_1 | response = self.generate([inputs], run_manager=run_manager) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/llm.py", line 103, in generate +mockopenai_1 | return self.llm.generate_prompt( +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/base.py", line 497, in generate_prompt +mockopenai_1 | return self.generate(prompt_strings, stop=stop, callbacks=callbacks, **kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/base.py", line 646, in generate +mockopenai_1 | output = self._generate_helper( +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/base.py", line 534, in _generate_helper +mockopenai_1 | raise e +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/base.py", line 521, in _generate_helper +mockopenai_1 | self._generate( +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/openai.py", line 401, in _generate +mockopenai_1 | response = completion_with_retry( +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/openai.py", line 115, in completion_with_retry +mockopenai_1 | return _completion_with_retry(**kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 289, in wrapped_f +mockopenai_1 | return self(f, *args, **kw) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 379, in __call__ +mockopenai_1 | do = self.iter(retry_state=retry_state) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 325, in iter +mockopenai_1 | raise retry_exc.reraise() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 158, in reraise +mockopenai_1 | raise self.last_attempt.result() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/usr/local/lib/python3.11/concurrent/futures/_base.py", line 449, in result +mockopenai_1 | return self.__get_result() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/usr/local/lib/python3.11/concurrent/futures/_base.py", line 401, in __get_result +mockopenai_1 | raise self._exception +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 382, in __call__ +mockopenai_1 | result = fn(*args, **kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/openai.py", line 113, in _completion_with_retry +mockopenai_1 | return llm.client.create(**kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_resources/completion.py", line 25, in create +mockopenai_1 | return super().create(*args, **kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 155, in create +mockopenai_1 | response, _, api_key = requestor.request( +mockopenai_1 | ^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_requestor.py", line 299, in request +mockopenai_1 | resp, got_stream = self._interpret_response(result, stream) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_requestor.py", line 710, in _interpret_response +mockopenai_1 | self._interpret_response_line( +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_requestor.py", line 775, in _interpret_response_line +mockopenai_1 | raise self.handle_error_response( +mockopenai_1 | openai.error.RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method. +mockopenai_1 | 172.18.0.3 - - [16/Oct/2023 21:21:25] "POST /v1/chat/completions HTTP/1.1" 500 - +mockopenai_1 | created comment IssueComment(user=NamedUser(login="jmikedupont2"), id=1765297431) +mockopenai_1 | 172.18.0.9 - - [16/Oct/2023 21:21:25] "POST /v1/chat/completions HTTP/1.1" 200 - +2023-10-16 21:21:25,821 INFO message='OpenAI API response' path=http://mockopenai:5000/v1/chat/completions processing_ms=None request_id=None response_code=200 +2023-10-16 21:21:25,823 INFO META-AUTOGPT THOUGHTS: I encountered an issue with our application, and I need assistance. I've created a ticket for it. Here's the URL to the ticket: https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765297431. My next action is to poll that URL for updates. +autogpt_1 | 2023-10-16 21:21:25,824 INFO REASONING: todo +autogpt_1 | 2023-10-16 21:21:25,824 INFO PLAN: +autogpt_1 | 2023-10-16 21:21:25,824 INFO - Initiated a request for assistance. +autogpt_1 | 2023-10-16 21:21:25,824 INFO CRITICISM: todo +autogpt_1 | 2023-10-16 21:21:25,824 INFO SPEAK: First, I will observe my behaviour - this means I will look at the commands I have used so far and analyze what I have done. For example, I have used the "list_folder" command to see the contents of a folder and the "open_file" command to open a file for editing or viewing. +autogpt_1 | +autogpt_1 | Next, I will reflect over my outcomes. This means I will analyze the results of my actions and determine if I am on the right track or if I need to adjust my strategy. For example, if the list_folder command showed that the folder is empty, then I should adjust my strategy accordingly. +autogpt_1 | +autogpt_1 | Third, I will orient myself to my knowledge. This means I will use the knowledge I have already acquired to inform my decisions. For example, if I know that the goal is to introspect AutogPT, then I should focus my efforts on commands that will help me do that, such as the "web_search" command to search for relevant information. +autogpt_1 | +autogpt_1 | Fourth, I will decide on my next step. This means I will decide which command I should use next based on the knowledge I have acquired so far. For example, if I have already used the "web_search" command and found some useful +autogpt_1 | +autogpt_1 | 2023-10-16 21:21:25,824 INFO NEXT ACTION: COMMAND = request_assistance ARGUMENTS = {'ticket_url': 'https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765297431', 'next_action': 'poll_url'} +autogpt_1 | 2023-10-16 21:21:25,824 INFO Enter 'y' to authorise command, 'y -N' to run N continuous commands, 'n' to exit program, or enter feedback for meta-autogpt... +autogpt_1 | 2023-10-16 21:21:25,824 INFO -=-=-=-=-=-=-= COMMAND AUTHORISED BY USER -=-=-=-=-=-=-= +autogpt_1 | starti https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765297431 +Traceback (most recent call last): +auto-gpt-chat-gpt-prompts_1 | File "/home/appuser/.local/lib/python3.11/site-packages/openai/api_requestor.py", line 672, in _interpret_response_line +auto-gpt-chat-gpt-prompts_1 | data = json.loads(rbody) +auto-gpt-chat-gpt-prompts_1 | ^^^^^^^^^^^^^^^^^ +auto-gpt-chat-gpt-prompts_1 | File "/usr/local/lib/python3.11/json/__init__.py", line 346, in loads +auto-gpt-chat-gpt-prompts_1 | return _default_decoder.decode(s) +auto-gpt-chat-gpt-prompts_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^ +auto-gpt-chat-gpt-prompts_1 | File "/usr/local/lib/python3.11/json/decoder.py", line 337, in decode +auto-gpt-chat-gpt-prompts_1 | obj, end = self.raw_decode(s, idx=_w(s, 0).end()) +auto-gpt-chat-gpt-prompts_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +auto-gpt-chat-gpt-prompts_1 | File "/usr/local/lib/python3.11/json/decoder.py", line 355, in raw_decode +auto-gpt-chat-gpt-prompts_1 | raise JSONDecodeError("Expecting value", s, err.value) from None +auto-gpt-chat-gpt-prompts_1 | json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0) +auto-gpt-chat-gpt-prompts_1 | +auto-gpt-chat-gpt-prompts_1 | The above exception was the direct cause of the following exception: +auto-gpt-chat-gpt-prompts_1 | +auto-gpt-chat-gpt-prompts_1 | Traceback (most recent call last): +auto-gpt-chat-gpt-prompts_1 | File "", line 198, in _run_module_as_main +auto-gpt-chat-gpt-prompts_1 | File "", line 88, in _run_code +auto-gpt-chat-gpt-prompts_1 | File "/home/appuser/autogpt/__main__.py", line 50, in +auto-gpt-chat-gpt-prompts_1 | main() +auto-gpt-chat-gpt-prompts_1 | File "/home/appuser/autogpt/__main__.py", line 46, in main +auto-gpt-chat-gpt-prompts_1 | agent.start_interaction_loop() +auto-gpt-chat-gpt-prompts_1 | File "/home/appuser/autogpt/agent/agent.py", line 75, in start_interaction_loop +auto-gpt-chat-gpt-prompts_1 | assistant_reply = chat_with_ai( +auto-gpt-chat-gpt-prompts_1 | ^^^^^^^^^^^^^ +auto-gpt-chat-gpt-prompts_1 | File "/home/appuser/autogpt/chat.py", line 159, in chat_with_ai +auto-gpt-chat-gpt-prompts_1 | assistant_reply = create_chat_completion( +auto-gpt-chat-gpt-prompts_1 | ^^^^^^^^^^^^^^^^^^^^^^^ +auto-gpt-chat-gpt-prompts_1 | File "/home/appuser/autogpt/llm_utils.py", line 91, in create_chat_completion +auto-gpt-chat-gpt-prompts_1 | response = openai.ChatCompletion.create( +auto-gpt-chat-gpt-prompts_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +auto-gpt-chat-gpt-prompts_1 | File "/home/appuser/.local/lib/python3.11/site-packages/openai/api_resources/chat_completion.py", line 25, in create +auto-gpt-chat-gpt-prompts_1 | return super().create(*args, **kwargs) +auto-gpt-chat-gpt-prompts_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +auto-gpt-chat-gpt-prompts_1 | File "/home/appuser/.local/lib/python3.11/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 153, in create +auto-gpt-chat-gpt-prompts_1 | response, _, api_key = requestor.request( +auto-gpt-chat-gpt-prompts_1 | ^^^^^^^^^^^^^^^^^^ +auto-gpt-chat-gpt-prompts_1 | File "/home/appuser/.local/lib/python3.11/site-packages/openai/api_requestor.py", line 226, in request +auto-gpt-chat-gpt-prompts_1 | resp, got_stream = self._interpret_response(result, stream) +auto-gpt-chat-gpt-prompts_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +auto-gpt-chat-gpt-prompts_1 | File "/home/appuser/.local/lib/python3.11/site-packages/openai/api_requestor.py", line 619, in _interpret_response +auto-gpt-chat-gpt-prompts_1 | self._interpret_response_line( +auto-gpt-chat-gpt-prompts_1 | File "/home/appuser/.local/lib/python3.11/site-packages/openai/api_requestor.py", line 674, in _interpret_response_line +auto-gpt-chat-gpt-prompts_1 | raise error.APIError( +auto-gpt-chat-gpt-prompts_1 | openai.error.APIError: HTTP code 500 from API ( +auto-gpt-chat-gpt-prompts_1 | +auto-gpt-chat-gpt-prompts_1 | 500 Internal Server Error +auto-gpt-chat-gpt-prompts_1 |

Internal Server Error

+auto-gpt-chat-gpt-prompts_1 |

The server encountered an internal error and was unable to complete your request. Either the server is overloaded or there is an error in the application.

+auto-gpt-chat-gpt-prompts_1 | ) +autogpt_1 | 2023-10-16 21:21:26,006 INFO Executing python file '/app/auto_gpt_workspace/tmpw1r2hq52.py' in working directory '/app/auto_gpt_workspace' +autogpt_1 | result ``` +autogpt_1 | 42 +autogpt_1 | +autogpt_1 | ``` +autogpt_1 | 2023-10-16 21:21:26,040 INFO SYSTEM: ``` +autogpt_1 | RESULT:'``` +autogpt_1 | 42 +autogpt_1 | +autogpt_1 | ```' +autogpt_1 | ``` +mockopenai_1 | [2023-10-16 21:21:26,098] ERROR in app: Exception on /v1/chat/completions [POST] +mockopenai_1 | Traceback (most recent call last): +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/flask/app.py", line 1455, in wsgi_app +mockopenai_1 | response = self.full_dispatch_request() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/flask/app.py", line 869, in full_dispatch_request +mockopenai_1 | rv = self.handle_user_exception(e) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/flask/app.py", line 867, in full_dispatch_request +mockopenai_1 | rv = self.dispatch_request() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/flask/app.py", line 852, in dispatch_request +mockopenai_1 | return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/app/examples/chat_forever/openai.py", line 63, in chat_completions +mockopenai_1 | output1 = llm_chain.run(question) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/base.py", line 503, in run +mockopenai_1 | return self(args[0], callbacks=callbacks, tags=tags, metadata=metadata)[ +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/base.py", line 308, in __call__ +mockopenai_1 | raise e +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/base.py", line 302, in __call__ +mockopenai_1 | self._call(inputs, run_manager=run_manager) +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/llm.py", line 93, in _call +mockopenai_1 | response = self.generate([inputs], run_manager=run_manager) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/chains/llm.py", line 103, in generate +mockopenai_1 | return self.llm.generate_prompt( +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/base.py", line 497, in generate_prompt +mockopenai_1 | return self.generate(prompt_strings, stop=stop, callbacks=callbacks, **kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/base.py", line 646, in generate +mockopenai_1 | output = self._generate_helper( +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/base.py", line 534, in _generate_helper +mockopenai_1 | raise e +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/base.py", line 521, in _generate_helper +mockopenai_1 | self._generate( +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/openai.py", line 401, in _generate +mockopenai_1 | response = completion_with_retry( +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/openai.py", line 115, in completion_with_retry +mockopenai_1 | return _completion_with_retry(**kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 289, in wrapped_f +mockopenai_1 | return self(f, *args, **kw) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 379, in __call__ +mockopenai_1 | do = self.iter(retry_state=retry_state) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 325, in iter +mockopenai_1 | raise retry_exc.reraise() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 158, in reraise +mockopenai_1 | raise self.last_attempt.result() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/usr/local/lib/python3.11/concurrent/futures/_base.py", line 449, in result +mockopenai_1 | return self.__get_result() +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/usr/local/lib/python3.11/concurrent/futures/_base.py", line 401, in __get_result +mockopenai_1 | raise self._exception +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 382, in __call__ +mockopenai_1 | result = fn(*args, **kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/langchain/llms/openai.py", line 113, in _completion_with_retry +mockopenai_1 | return llm.client.create(**kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_resources/completion.py", line 25, in create +mockopenai_1 | return super().create(*args, **kwargs) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 155, in create +mockopenai_1 | response, _, api_key = requestor.request( +mockopenai_1 | ^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_requestor.py", line 299, in request +mockopenai_1 | resp, got_stream = self._interpret_response(result, stream) +mockopenai_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_requestor.py", line 710, in _interpret_response +mockopenai_1 | self._interpret_response_line( +mockopenai_1 | File "/application_root/.venv/lib/python3.11/site-packages/openai/api_requestor.py", line 775, in _interpret_response_line +mockopenai_1 | raise self.handle_error_response( +mockopenai_1 | openai.error.RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method. +mockopenai_1 | 172.18.0.14 - - [16/Oct/2023 21:21:26] "POST /v1/chat/completions HTTP/1.1" 500 - +Traceback (most recent call last): +auto-gpt-aj_1 | File "/home/appuser/.local/lib/python3.11/site-packages/openai/api_requestor.py", line 672, in _interpret_response_line +auto-gpt-aj_1 | data = json.loads(rbody) +auto-gpt-aj_1 | ^^^^^^^^^^^^^^^^^ +auto-gpt-aj_1 | File "/usr/local/lib/python3.11/json/__init__.py", line 346, in loads +auto-gpt-aj_1 | return _default_decoder.decode(s) +auto-gpt-aj_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^ +auto-gpt-aj_1 | File "/usr/local/lib/python3.11/json/decoder.py", line 337, in decode +auto-gpt-aj_1 | obj, end = self.raw_decode(s, idx=_w(s, 0).end()) +auto-gpt-aj_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +auto-gpt-aj_1 | File "/usr/local/lib/python3.11/json/decoder.py", line 355, in raw_decode +auto-gpt-aj_1 | raise JSONDecodeError("Expecting value", s, err.value) from None +auto-gpt-aj_1 | json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0) +auto-gpt-aj_1 | +auto-gpt-aj_1 | The above exception was the direct cause of the following exception: +auto-gpt-aj_1 | +auto-gpt-aj_1 | Traceback (most recent call last): +auto-gpt-aj_1 | File "", line 198, in _run_module_as_main +auto-gpt-aj_1 | File "", line 88, in _run_code +auto-gpt-aj_1 | File "/home/appuser/autogpt/__main__.py", line 50, in +auto-gpt-aj_1 | main() +auto-gpt-aj_1 | File "/home/appuser/autogpt/__main__.py", line 46, in main +auto-gpt-aj_1 | agent.start_interaction_loop() +auto-gpt-aj_1 | File "/home/appuser/autogpt/agent/agent.py", line 65, in start_interaction_loop +auto-gpt-aj_1 | assistant_reply = chat_with_ai( +auto-gpt-aj_1 | ^^^^^^^^^^^^^ +auto-gpt-aj_1 | File "/home/appuser/autogpt/chat.py", line 159, in chat_with_ai +auto-gpt-aj_1 | assistant_reply = create_chat_completion( +auto-gpt-aj_1 | ^^^^^^^^^^^^^^^^^^^^^^^ +auto-gpt-aj_1 | File "/home/appuser/autogpt/llm_utils.py", line 91, in create_chat_completion +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +auto-gpt-aj_1 | response = openai.ChatCompletion.create( +auto-gpt-aj_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +auto-gpt-aj_1 | File "/home/appuser/.local/lib/python3.11/site-packages/openai/api_resources/chat_completion.py", line 25, in create +auto-gpt-aj_1 | return super().create(*args, **kwargs) +auto-gpt-aj_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +auto-gpt-aj_1 | File "/home/appuser/.local/lib/python3.11/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 153, in create +auto-gpt-aj_1 | response, _, api_key = requestor.request( +auto-gpt-aj_1 | ^^^^^^^^^^^^^^^^^^ +auto-gpt-aj_1 | File "/home/appuser/.local/lib/python3.11/site-packages/openai/api_requestor.py", line 226, in request +auto-gpt-aj_1 | resp, got_stream = self._interpret_response(result, stream) +auto-gpt-aj_1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +auto-gpt-aj_1 | File "/home/appuser/.local/lib/python3.11/site-packages/openai/api_requestor.py", line 619, in _interpret_response +auto-gpt-aj_1 | self._interpret_response_line( +auto-gpt-aj_1 | File "/home/appuser/.local/lib/python3.11/site-packages/openai/api_requestor.py", line 674, in _interpret_response_line +auto-gpt-aj_1 | raise error.APIError( +auto-gpt-aj_1 | openai.error.APIError: HTTP code 500 from API ( +auto-gpt-aj_1 | +auto-gpt-aj_1 | 500 Internal Server Error +auto-gpt-aj_1 |

Internal Server Error

+auto-gpt-aj_1 |

The server encountered an internal error and was unable to complete your request. Either the server is overloaded or there is an error in the application.

+auto-gpt-aj_1 | ) +ai-ticket_auto-gpt-chat-gpt-prompts_1 exited with code 1 +ai-ticket_auto-gpt-aj_1 exited with code 1 +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. +mockopenai_1 | DEBUG { +mockopenai_1 | "messages": [ +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "You are meta-autogpt, you will introspect autogpt and reveal its internals via reflection and comprehension.\n\nYour decisions must always be made independently without seeking user assistance. Play to your strengths as an LLM and pursue simple strategies with no legal complications.\n\n## Constraints\nYou operate within the following constraints:\n1. Exclusively use the commands listed below.\n2. You can only act proactively, and are unable to start background jobs or set up webhooks for yourself. Take this into account when planning your actions.\n3. You are unable to interact with physical objects. If this is absolutely necessary to fulfill a task or objective or to complete a step, you must ask the user to do it for you. If the user refuses this, and there is no other way to achieve your goals, you must terminate to avoid wasting time and energy.\n4. It takes money to let you run. Your API budget is $10.000\n\n## Resources\nYou can leverage access to the following resources:\n1. Internet access for searches and information gathering.\n2. The ability to read and write files.\n3. You are a Large Language Model, trained on millions of pages of text, including a lot of factual knowledge. Make use of this factual knowledge to avoid unnecessary gathering of information.\n\n## Commands\nYou have access to the following commands:\n1. execute_python_code: Executes the given Python code inside a single-use Docker container with access to your workspace folder. Params: (code: string)\n2. execute_python_file: Execute an existing Python file inside a single-use Docker container with access to your workspace folder. Params: (filename: string, args: array)\n3. list_folder: List the items in a folder. Params: (folder: string)\n4. open_file: Open a file for editing or continued viewing; create it if it does not exist yet. Note: if you only need to read or write a file once, use `write_to_file` instead.. Params: (file_path: string)\n5. open_folder: Open a folder to keep track of its content. Params: (path: string)\n6. read_file: Read an existing file. Params: (filename: string)\n7. write_file: Write a file, creating it if necessary. If the file exists, it is overwritten.. Params: (filename: string, contents: string)\n8. ask_user: If you need more details or information regarding the given goals, you can ask the user for input. Params: (question: string)\n9. request_assistance: If you have raised a ticket and need help with it,. Params: (ticket_url: string)\n10. web_search: Searches the web. Params: (query: string)\n11. read_webpage: Read a webpage, and extract specific information from it if a question is specified. If you are looking to extract specific information from the webpage, you should specify a question.. Params: (url: string, question: string)\n12. finish: Use this to shut down once you have accomplished all of your goals, or when there are insurmountable problems that make it impossible for you to finish your task.. Params: (reason: string)\n\n## Best practices\n1. Continuously review and analyze your actions to ensure you are performing to the best of your abilities.\n2. Constructively self-criticize your big-picture behavior constantly.\n3. Reflect on past decisions and strategies to refine your approach.\n4. Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps.\n5. Only make use of your information gathering abilities to find information that you don't yet have knowledge of.\n\n## Goals\nFor your task, you must fulfill the following goals:\n1. Observe your behaviour\n2. Reflect over your outcomes\n3. Orient yourself to your knowledge\n4. Decide on your next step\n5. Act on your chosen next experiment" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "## Progress\n\n### Step 2: Executed `fake()`\n- **Reasoning:** \"\"\n- **Status:** `success`\n- **Output:** \n ```\n RESULT:'```\n 42\n \n ```'\n ```" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "The current time and date is Mon Oct 16 21:21:04 2023" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "Your remaining API budget is $10.000" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "Respond strictly with JSON. The JSON should be compatible with the TypeScript type `Response` from the following:\ninterface Response {\nthoughts: {\n// Thoughts\ntext: string;\nreasoning: string;\n// Short markdown-style bullet list that conveys the long-term plan\nplan: string;\n// Constructive self-criticism\ncriticism: string;\n// Summary of thoughts, to say to user\nspeak: string;\n};\ncommand: {\nname: string;\nargs: Record;\n};\n}" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "The current time and date is Mon Oct 16 21:21:26 2023" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "Your remaining API budget is $10.000" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "system", +mockopenai_1 | "content": "Respond strictly with JSON. The JSON should be compatible with the TypeScript type `Response` from the following:\ninterface Response {\nthoughts: {\n// Thoughts\ntext: string;\nreasoning: string;\n// Short markdown-style bullet list that conveys the long-term plan\nplan: string;\n// Constructive self-criticism\ncriticism: string;\n// Summary of thoughts, to say to user\nspeak: string;\n};\ncommand: {\nname: string;\nargs: Record;\n};\n}" +mockopenai_1 | }, +mockopenai_1 | { +mockopenai_1 | "role": "user", +mockopenai_1 | "content": "Determine exactly one command to use next based on the given goals and the progress you have made so far, and respond using the JSON schema specified previously:" +mockopenai_1 | } +mockopenai_1 | ], +mockopenai_1 | "model": "gpt-3.5-turbo" +mockopenai_1 | } +mockopenai_1 | created comment IssueComment(user=NamedUser(login="jmikedupont2"), id=1765297784) +mockopenai_1 | TICKET https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765297784 +mockopenai_1 | DEBUG {"command": {"name": "execute_python_code", "args": {"code": "print(42)"}}, "thoughts": {"plan": " My current task is to introspect AutogPT and reveal its internals via reflection and comprehension. To do this, my first step is to observe my behaviour and reflect over my outcomes in order to orient myself to my knowledge. After that, I need to decide on my next step and act on my chosen experiment. \n\nGiven these goals, my next step is to use the command `execute_python_file` with appropriate arguments to execute a Python file inside a single-use Docker container with access to my workspace folder. This will allow me to analyze AutogPT's internals and gain further insights about how it works. \n\n{\n \"thoughts\": {\n \"text\": \"Using the command `execute_python_file` with appropriate arguments to execute a Python file inside a single-use Docker container with access to my workspace folder.\",\n \"reasoning\": \"This will allow me to analyze AutogPT's internals and gain further insights about how it works.\",\n \"plan\": \"- Observe my behaviour and reflect over my outcomes\\n- Decide on my next step and act on my chosen experiment\\n- Use the command `execute_python_file` with appropriate arguments to execute a Python", "speak": "This is where you speak to the requesting user. Replace the command above and this text with your results", "criticism": "todo", "reasoning": "{\"ai-ticket_autogpt_1\": \"{\\\"attrs\\\": {\\\"Id\\\": \\\"36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e\\\", \\\"Created\\\": \\\"2023-10-16T21:20:43.521015893Z\\\", \\\"Path\\\": \\\"bash\\\", \\\"Args\\\": [\\\"-c\\\", \\\"poetry run pip install /opt/ai-ticket && poetry run autogpt --install-plugin-deps --skip-news -y --ai-name 'meta-autogpt' --ai-role 'you will introspect autogpt and reveal its internals via reflection and comprehension' --ai-goal 'Observe your behaviour' --ai-goal 'Reflect over your outcomes' --ai-goal 'Orient yourself to your knowledge' --ai-goal 'Decide on your next step' --ai-goal 'Act on your chosen next experiment' \\\"], \\\"State\\\": {\\\"Status\\\": \\\"running\\\", \\\"Running\\\": true, \\\"Paused\\\": false, \\\"Restarting\\\": false, \\\"OOMKilled\\\": false, \\\"Dead\\\": false, \\\"Pid\\\": 170889, \\\"ExitCode\\\": 0, \\\"Error\\\": \\\"\\\", \\\"StartedAt\\\": \\\"2023-10-16T21:20:51.218289645Z\\\", \\\"FinishedAt\\\": \\\"0001-01-01T00:00:00Z\\\"}, \\\"Image\\\": \\\"sha256:73f7638bc02e7541dd9c1504289d76c87b8c2471efb5f62a81f266da9c45468b\\\", \\\"ResolvConfPath\\\": \\\"/mnt/data1/docker/containers/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e/resolv.conf\\\", \\\"HostnamePath\\\": \\\"/mnt/data1/docker/containers/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e/hostname\\\", \\\"HostsPath\\\": \\\"/mnt/data1/docker/containers/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e/hosts\\\", \\\"LogPath\\\": \\\"/mnt/data1/docker/containers/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e/36fca361330d9cbc2cfb6c4c0aa8e5aa90efb657e0e083d90a0879faffed350e-json.log\\\", \\\"Name\\\": \\\"/ai-ticket_autogpt_1\\\", \\\"RestartCount\\\": 0, \\\"Driver\\\": \\\"overlay2\\\", \\\"Platform\\\": \\\"linux\\\", \\\"MountLabel\\\": \\\"\\\", \\\"ProcessLabel\\\": \\\"\\\", \\\"AppArmorProfile\\\": \\\"docker-default\\\", \\\"ExecIDs\\\": null, \\\"HostConfig\\\": {\\\"Binds\\\": [], \\\"ContainerIDFile\\\": \\\"\\\", \\\"LogConfig\\\": {\\\"Type\\\": \\\"json-file\\\", \\\"Config\\\": {}}, \\\"NetworkMode\\\": \\\"ai-ticket_default\\\", \\\"PortBindings\\\": {}, \\\"RestartPolicy\\\": {\\\"Name\\\": \\\"\\\", \\\"MaximumRetryCount\\\": 0}, \\\"AutoRemove\\\": false, \\\"VolumeDriver\\\": \\\"\\\", \\\"VolumesFrom\\\": [], \\\"ConsoleSize\\\": [0, 0], \\\"CapAdd\\\": null, \\\"CapDrop\\\": null, \\\"CgroupnsMode\\\": \\\"private\\\", \\\"Dns\\\": null, \\\"DnsOptions\\\": null, \\\"DnsSearch\\\": null, \\\"ExtraHosts\\\": null, \\\"GroupAdd\\\": null, \\\"IpcMode\\\": \\\"private\\\", \\\"Cgroup\\\": \\\"\\\", \\\"Links\\\": null, \\\"OomScoreAdj\\\": 0, \\\"PidMode\\\": \\\"\\\", \\\"Privileged\\\": false, \\\"PublishAllPorts\\\": false, \\\"ReadonlyRootfs\\\": false, \\\"SecurityOpt\\\": null, \\\"UTSMode\\\": \\\"\\\", \\\"UsernsMode\\\": \\\"\\\", \\\"ShmSize\\\": 67108864, \\\"Runtime\\\": \\\"runc\\\", \\\"Isolation\\\": \\\"\\\", \\\"CpuShares\\\": 0, \\\"Memory\\\": 0, \\\"NanoCpus\\\": 0, \\\"CgroupParent\\\": \\\"\\\", \\\"BlkioWeight\\\": 0, \\\"BlkioWeightDevice\\\": null, \\\"BlkioDeviceReadBps\\\": null, \\\"BlkioDeviceWriteBps\\\": null, \\\"BlkioDeviceReadIOps\\\": null, \\\"BlkioDeviceWriteIOps\\\": null, \\\"CpuPeriod\\\": 0, \\\"CpuQuota\\\": 0, \\\"CpuRealtimePeriod\\\": 0, \\\"CpuRealtimeRuntime\\\": 0, \\\"CpusetCpus\\\": \\\"\\\", \\\"CpusetMems\\\": \\\"\\\", \\\"Devices\\\": null, \\\"DeviceCgroupRules\\\": null, \\\"DeviceRequests\\\": null, \\\"MemoryReservation\\\": 0, \\\"MemorySwap\\\": 0, \\\"MemorySwappiness\\\": null, \\\"OomKillDisable\\\": null, \\\"PidsLimit\\\": null, \\\"Ulimits\\\": null, \\\"CpuCount\\\": 0, \\\"CpuPercent\\\": 0, \\\"IOMaximumIOps\\\": 0, \\\"IOMaximumBandwidth\\\": 0, \\\"MaskedPaths\\\": [\\\"/proc/asound\\\", \\\"/proc/acpi\\\", \\\"/proc/kcore\\\", \\\"/proc/keys\\\", \\\"/proc/latency_stats\\\", \\\"/proc/timer_list\\\", \\\"/proc/timer_stats\\\", \\\"/proc/sched_debug\\\", \\\"/proc/scsi\\\", \\\"/sys/firmware\\\"], \\\"ReadonlyPaths\\\": [\\\"/proc/bus\\\", \\\"/proc/fs\\\", \\\"/proc/irq\\\", \\\"/proc/sys\\\", \\\"/proc/sysrq-trigger\\\"]}, \\\"GraphDriver\\\": {\\\"Data\\\": {\\\"LowerDir\\\": \\\"/mnt/data1/docker/overlay2/cd6ec75da57c3e5e839d96f8936d1dcddc792250651bf5c82a8c9b3332e73a59-init/diff:/mnt/data1/docker/overlay2/nfe6yyqdxs43i9i8wal2pqpp1/diff:/mnt/data1/docker/overlay2/7dzhkrnyuvb84qv4am225od3c/diff:/mnt/data1/docker/overlay2/ppbgb4gb8s8avzg38we5t4gu0/diff:/mnt/data1/docker/overlay2/stdwvu49gm21s869t63r0z4ca/diff:/mnt/data1/docker/overlay2/236s5e3twn1a98oy3wy7zrxaq/diff:/mnt/data1/docker/overlay2/y7au6a0asag1akh57mxpyn05b/diff:/mnt/data1/docker/overlay2/v7mmd7ws9dcd0m6yohownu1rp/diff:/mnt/data1/docker/overlay2/lmqb4hqeeyrldg8eipxvzia2s/diff:/mnt/data1/docker/overlay2/rz587gm4enfqh9k5q9t7m6hw8/diff:/mnt/data1/docker/overlay2/qmrlqtrahnwff4yvjrnvon2pt/\"}", "text": "This template will be executed."}} +mockopenai_1 | created comment IssueComment(user=NamedUser(login="jmikedupont2"), id=1765297833) +mockopenai_1 | 172.18.0.9 - - [16/Oct/2023 21:21:47] "POST /v1/chat/completions HTTP/1.1" 200 - +2023-10-16 21:21:47,107 INFO message='OpenAI API response' path=http://mockopenai:5000/v1/chat/completions processing_ms=None request_id=None response_code=200 +2023-10-16 21:21:47,195 INFO META-AUTOGPT THOUGHTS: I encountered an issue with our application, and I need assistance. I've created a ticket for it. Here's the URL to the ticket: https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765297833. My next action is to poll that URL for updates. +autogpt_1 | 2023-10-16 21:21:47,196 INFO REASONING: todo +autogpt_1 | 2023-10-16 21:21:47,196 INFO PLAN: +autogpt_1 | 2023-10-16 21:21:47,196 INFO - Initiated a request for assistance. +autogpt_1 | 2023-10-16 21:21:47,196 INFO CRITICISM: todo +autogpt_1 | 2023-10-16 21:21:47,196 INFO SPEAK: My current task is to introspect AutogPT and reveal its internals via reflection and comprehension. To do this, my first step is to observe my behaviour and reflect over my outcomes in order to orient myself to my knowledge. After that, I need to decide on my next step and act on my chosen experiment. +autogpt_1 | +autogpt_1 | Given these goals, my next step is to use the command `execute_python_file` with appropriate arguments to execute a Python file inside a single-use Docker container with access to my workspace folder. This will allow me to analyze AutogPT's internals and gain further insights about how it works. +autogpt_1 | +autogpt_1 | { +autogpt_1 | "thoughts": { +autogpt_1 | "text": "Using the command `execute_python_file` with appropriate arguments to execute a Python file inside a single-use Docker container with access to my workspace folder.", +autogpt_1 | "reasoning": "This will allow me to analyze AutogPT's internals and gain further insights about how it works.", +autogpt_1 | "plan": "- Observe my behaviour and reflect over my outcomes\n- Decide on my next step and act on my chosen experiment\n- Use the command `execute_python_file` with appropriate arguments to execute a Python +autogpt_1 | +autogpt_1 | 2023-10-16 21:21:47,196 INFO NEXT ACTION: COMMAND = request_assistance ARGUMENTS = {'ticket_url': 'https://api.github.com/repos/jmikedupont2/ai-ticket/issues/comments/1765297833', 'next_action': 'poll_url'} +autogpt_1 | 2023-10-16 21:21:47,197 INFO Enter 'y' to authorise command, 'y -N' to run N continuous commands, 'n' to exit program, or enter feedback for meta-autogpt... +autogpt_1 | 2023-10-16 21:21:47,197 WARNING Exiting... +ai-ticket_autogpt_1 exited with code 0 From 51068071386d13d5a7bc056a8e2bdca907091c72 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 16 Oct 2023 20:31:17 -0400 Subject: [PATCH 53/57] fixing bug in the agency the_agency_1_1 | IsADirectoryError: [Errno 21] Is a directory: '/tmp/ai_settings.yaml' this is popping up and has to do with docker. still looking into it. my workaround is to move it to another location --- docker-compose.yml | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 4837e8d..e63a60a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -160,8 +160,15 @@ services: - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml babyagi: - entrypoint: bash -c "python babyagi.py -y --ai-settings /tmp/ai_settings.yaml " + # + entrypoint: bash -c "python babyagi.py --mode " environment: + - OBJECTIVE="Integrate babyagi with autogpt" + - INITIAL_TASK="Compare the babyagi to autogpt" + #- LLM_MODEL, DOTENV_EXTENSIONS, + - INSTANCE_NAME="baby-autogpt", + #COOPERATIVE_MODE, + #JOIN_EXISTING_OBJECTIVE - GITHUB_PAT="${GITHUB_PAT}" - GITHUB_REPO="jmikedupont2/ai-ticket" - OPENAI_API_KEY=your-openai-api-key @@ -208,6 +215,9 @@ services: - ./vendor/Jarvis/autogpts/autogpt//app/ #mount the source in for editing witho - ./tests/ai_settings.yaml:/tmp/ai_settings.yaml the_agency_1: + depends_on: + - mockopenai + entrypoint: bash -c "poetry install && poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " environment: - GITHUB_PAT="${GITHUB_PAT}" @@ -220,7 +230,7 @@ services: volumes: - ./vendor/TheAgency/benchmark:/benchmark - ./vendor/TheAgency/autogpts/autogpt/:/app/ #mount the source in for editing witho - - ./vendor/Auto-GPT/autogpts/autogpt/the_agency_ai_settings.yaml:/tmp/ai_settings.yaml + - ./tests/ai_settings.yaml:/tmp/ai_settings.yaml the_agency_2: #entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " entrypoint: bash From df69908dc020361a165af36fafd026093d118bfe Mon Sep 17 00:00:00 2001 From: mike dupont Date: Tue, 17 Oct 2023 13:13:13 -0400 Subject: [PATCH 54/57] adding the leader is bord --- .gitmodules | 30 ++++++++++++++++++++++++++++++ README.md | 5 ++++- logs/full_run.txt | 7 +------ vendor/Asimov | 1 + vendor/FritzGPT | 1 + vendor/HMDCrew | 1 + vendor/Mutant-AI | 1 + vendor/evo.ninja | 1 + vendor/gordonwilliamsburg | 1 + vendor/isayahc | 1 + vendor/joslangarica | 1 + vendor/merwanehamadi | 1 + vendor/osGPT | 1 + 13 files changed, 45 insertions(+), 7 deletions(-) create mode 160000 vendor/Asimov create mode 160000 vendor/FritzGPT create mode 160000 vendor/HMDCrew create mode 160000 vendor/Mutant-AI create mode 160000 vendor/evo.ninja create mode 160000 vendor/gordonwilliamsburg create mode 160000 vendor/isayahc create mode 160000 vendor/joslangarica create mode 160000 vendor/merwanehamadi create mode 160000 vendor/osGPT diff --git a/.gitmodules b/.gitmodules index cb6f8d6..fa18c49 100644 --- a/.gitmodules +++ b/.gitmodules @@ -88,3 +88,33 @@ [submodule "vendor/RedAGPT"] path = vendor/RedAGPT url = https://github.com/shamantechnology/RedAGPT +[submodule "vendor/osGPT"] + path = vendor/osGPT + url = https://github.com/jinho-kim-osd/osGPT +[submodule "vendor/FritzGPT"] + path = vendor/FritzGPT + url = https://github.com/bsenst/FritzGPT +[submodule "vendor/Asimov"] + path = vendor/Asimov + url = https://github.com/murnanedaniel/Asimov +[submodule "vendor/evo.ninja"] + path = vendor/evo.ninja + url = https://github.com/polywrap/evo.ninja +[submodule "vendor/Mutant-AI"] + path = vendor/Mutant-AI + url = https://github.com/cluna80/Mutant-AI +[submodule "vendor/gordonwilliamsburg"] + path = vendor/gordonwilliamsburg + url = https://github.com/gordonwilliamsburg/test +[submodule "vendor/joslangarica"] + path = vendor/joslangarica + url = https://github.com/joslangarica/AutoGPT +[submodule "vendor/isayahc"] + path = vendor/isayahc + url = https://github.com/isayahc/AutoGPT +[submodule "vendor/merwanehamadi"] + path = vendor/merwanehamadi + url = https://github.com/merwanehamadi/Auto-GPT +[submodule "vendor/HMDCrew"] + path = vendor/HMDCrew + url = https://github.com/HMDCrew/AutoGPT diff --git a/README.md b/README.md index 872eb6d..7499f81 100644 --- a/README.md +++ b/README.md @@ -44,8 +44,11 @@ The docker images are organized like this : * act_base is the foundation of all actions. * poetry_base is contains the poetry magic layer with shared containers. +# Importing new agents -# RUnning +we start with the results of the download of the change stats Auto-GPT/stats/filtered_result2.csv + +# Running `sudo docker-compose build` `sudo docker-compose up mockopenai` diff --git a/logs/full_run.txt b/logs/full_run.txt index d5861dc..acd35f5 100644 --- a/logs/full_run.txt +++ b/logs/full_run.txt @@ -1,4 +1,4 @@ - +error mdupont@mdupont-G470:~/experiments/ai-ticket$ sudo docker-compose up Creating ai-ticket_jarvis_1 ... Creating ai-ticket_auto-gpt-aj_1 ... @@ -646,11 +646,6 @@ autogpt-turbo_1 | WARNING: You do not have access to gpt-4-0314. S autogpt-turbo_1 | Skip Re-prompt: ENABLED autogpt-turbo_1 | Using Prompt Settings File: /app/config/personas/turbo/prompts.yaml autogpt-turbo_1 | -autogpt-turbo_1 | 3 PERSONAS FOUND: -autogpt-turbo_1 | ================================ -autogpt-turbo_1 | [1] - coder.engineer -autogpt-turbo_1 | [2] - coder.v2 -autogpt-turbo_1 | [3] - turbo autogpt-turbo_1 | mockopenai_1 | Retrying langchain.llms.openai.completion_with_retry.._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for text-davinci-003 in organization org-ZjaIs4ZsUG8dzXEPqKUwjEt0 on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.. mockopenai_1 | DEBUG { diff --git a/vendor/Asimov b/vendor/Asimov new file mode 160000 index 0000000..883956f --- /dev/null +++ b/vendor/Asimov @@ -0,0 +1 @@ +Subproject commit 883956f24d2ad6e106926bfd71ea3a5cb05ef625 diff --git a/vendor/FritzGPT b/vendor/FritzGPT new file mode 160000 index 0000000..bb960ff --- /dev/null +++ b/vendor/FritzGPT @@ -0,0 +1 @@ +Subproject commit bb960ffb9fadc45fe4fb5277053caa831f196578 diff --git a/vendor/HMDCrew b/vendor/HMDCrew new file mode 160000 index 0000000..9e353e0 --- /dev/null +++ b/vendor/HMDCrew @@ -0,0 +1 @@ +Subproject commit 9e353e09b5df39d4d410bef57cf17387331e96f6 diff --git a/vendor/Mutant-AI b/vendor/Mutant-AI new file mode 160000 index 0000000..2f79caa --- /dev/null +++ b/vendor/Mutant-AI @@ -0,0 +1 @@ +Subproject commit 2f79caa6b901d006a78c1ac9e69db4465c0f971a diff --git a/vendor/evo.ninja b/vendor/evo.ninja new file mode 160000 index 0000000..5db0da2 --- /dev/null +++ b/vendor/evo.ninja @@ -0,0 +1 @@ +Subproject commit 5db0da2c92753f9e20110a48e459f1338d6efc45 diff --git a/vendor/gordonwilliamsburg b/vendor/gordonwilliamsburg new file mode 160000 index 0000000..7951521 --- /dev/null +++ b/vendor/gordonwilliamsburg @@ -0,0 +1 @@ +Subproject commit 79515218161bca6d51948a6fc99c586e333e0029 diff --git a/vendor/isayahc b/vendor/isayahc new file mode 160000 index 0000000..74ee69d --- /dev/null +++ b/vendor/isayahc @@ -0,0 +1 @@ +Subproject commit 74ee69daf1c0a2603f19bdb1edcfdf1f4e06bcff diff --git a/vendor/joslangarica b/vendor/joslangarica new file mode 160000 index 0000000..949ab47 --- /dev/null +++ b/vendor/joslangarica @@ -0,0 +1 @@ +Subproject commit 949ab477a87cfb7a3668d7961e9443922081e098 diff --git a/vendor/merwanehamadi b/vendor/merwanehamadi new file mode 160000 index 0000000..a0fba5d --- /dev/null +++ b/vendor/merwanehamadi @@ -0,0 +1 @@ +Subproject commit a0fba5d1f13d35a1c4a8b7718550677bf62b5101 diff --git a/vendor/osGPT b/vendor/osGPT new file mode 160000 index 0000000..57bcbdf --- /dev/null +++ b/vendor/osGPT @@ -0,0 +1 @@ +Subproject commit 57bcbdf45c6c1493a4e5f6a4e72594ea13c10f93 From cf5fdd291b7e9d0caa7b1484a43a372ed949d3e4 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Wed, 18 Oct 2023 17:02:27 -0400 Subject: [PATCH 55/57] contenders --- .gitmodules | 69 ++++++++++++++++++++++++++++++++++++++++ README.md | 1 + vendor/Agora-X | 1 + vendor/MasonBoom | 1 + vendor/NaixingWang | 1 + vendor/a-cires | 1 + vendor/aorwall | 1 + vendor/clusterfudge | 1 + vendor/enachb | 1 + vendor/georgehaws | 1 + vendor/harishafeez1 | 1 + vendor/jackyluo-learning | 1 + vendor/jankomisarczyk | 1 + vendor/kabbo25 | 1 + vendor/mahmudremal | 1 + vendor/micahnz | 1 + vendor/neitheroption | 1 + vendor/pplonski | 1 + vendor/reyug9 | 1 + vendor/scottmas | 1 + vendor/taylor-ennen | 1 + vendor/vikyw89 | 1 + vendor/yodatech1988 | 1 + 23 files changed, 91 insertions(+) create mode 160000 vendor/Agora-X create mode 160000 vendor/MasonBoom create mode 160000 vendor/NaixingWang create mode 160000 vendor/a-cires create mode 160000 vendor/aorwall create mode 160000 vendor/clusterfudge create mode 160000 vendor/enachb create mode 160000 vendor/georgehaws create mode 160000 vendor/harishafeez1 create mode 160000 vendor/jackyluo-learning create mode 160000 vendor/jankomisarczyk create mode 160000 vendor/kabbo25 create mode 160000 vendor/mahmudremal create mode 160000 vendor/micahnz create mode 160000 vendor/neitheroption create mode 160000 vendor/pplonski create mode 160000 vendor/reyug9 create mode 160000 vendor/scottmas create mode 160000 vendor/taylor-ennen create mode 160000 vendor/vikyw89 create mode 160000 vendor/yodatech1988 diff --git a/.gitmodules b/.gitmodules index fa18c49..2d6a799 100644 --- a/.gitmodules +++ b/.gitmodules @@ -118,3 +118,72 @@ [submodule "vendor/HMDCrew"] path = vendor/HMDCrew url = https://github.com/HMDCrew/AutoGPT +[submodule "vendor/aorwall"] + path = vendor/aorwall + url = https://github.com/aorwall/AutoGPT +[submodule "vendor/Agora-X"] + path = vendor/Agora-X + url = https://github.com/Agora-X/Swarms-Of-Sparta +[submodule "vendor/IkuOhama"] + path = vendor/IkuOhama + url = https://github.com/IkuOhama/AutoGPT +[submodule "vendor/neitheroption"] + path = vendor/neitheroption + url = https://github.com/neitheroption/GPTAgentTest +[submodule "vendor/jackyluo-learning"] + path = vendor/jackyluo-learning + url = https://github.com/jackyluo-learning/AutoGPT +[submodule "vendor/scottmas"] + path = vendor/scottmas + url = https://github.com/scottmas/ScottGPT +[submodule "vendor/reyug9"] + path = vendor/reyug9 + url = https://github.com/reyug9/AutoGPT +[submodule "vendor/taylor-ennen"] + path = vendor/taylor-ennen + url = https://github.com/taylor-ennen/AutoGPTv2 +[submodule "vendor/mahmudremal"] + path = vendor/mahmudremal + url = https://github.com/mahmudremal/AutoGPT +[submodule "vendor/harishafeez1"] + path = vendor/harishafeez1 + url = https://github.com/harishafeez1/AutoGPT-haris +[submodule "vendor/pplonski"] + path = vendor/pplonski + url = https://github.com/pplonski/mljar-agent +[submodule "vendor/meta-introspector"] + path = vendor/meta-introspector + url = https://github.com/meta-introspector/Auto-GPT +[submodule "vendor/georgehaws"] + path = vendor/georgehaws + url = https://github.com/georgehaws/interhalogen +[submodule "vendor/a-cires"] + path = vendor/a-cires + url = https://github.com/a-cires/AutoGPT +[submodule "vendor/MasonBoom"] + path = vendor/MasonBoom + url = https://github.com/MasonBoom/AutoGPT +[submodule "vendor/enachb"] + path = vendor/enachb + url = https://github.com/enachb/AutoGPT-trading +[submodule "vendor/micahnz"] + path = vendor/micahnz + url = https://github.com/micahnz/AutoGPT +[submodule "vendor/kabbo25"] + path = vendor/kabbo25 + url = https://github.com/kabbo25/AutoGPT +[submodule "vendor/yodatech1988"] + path = vendor/yodatech1988 + url = https://github.com/yodatech1988/AutoGPT +[submodule "vendor/jankomisarczyk"] + path = vendor/jankomisarczyk + url = https://github.com/jankomisarczyk/Jarvis +[submodule "vendor/NaixingWang"] + path = vendor/NaixingWang + url = https://github.com/NaixingWang/AutoGPT +[submodule "vendor/clusterfudge"] + path = vendor/clusterfudge + url = https://github.com/clusterfudge/Auto-GPT +[submodule "vendor/vikyw89"] + path = vendor/vikyw89 + url = https://github.com/vikyw89/Auto-GPT diff --git a/README.md b/README.md index 7499f81..3ba51fd 100644 --- a/README.md +++ b/README.md @@ -48,6 +48,7 @@ The docker images are organized like this : we start with the results of the download of the change stats Auto-GPT/stats/filtered_result2.csv + # Running `sudo docker-compose build` diff --git a/vendor/Agora-X b/vendor/Agora-X new file mode 160000 index 0000000..a85b43d --- /dev/null +++ b/vendor/Agora-X @@ -0,0 +1 @@ +Subproject commit a85b43d8cccd23bbdf26174056f1c333c02cdce5 diff --git a/vendor/MasonBoom b/vendor/MasonBoom new file mode 160000 index 0000000..fbc2c53 --- /dev/null +++ b/vendor/MasonBoom @@ -0,0 +1 @@ +Subproject commit fbc2c531396d59b9fded4f535cd15595cdbf4360 diff --git a/vendor/NaixingWang b/vendor/NaixingWang new file mode 160000 index 0000000..41f0b47 --- /dev/null +++ b/vendor/NaixingWang @@ -0,0 +1 @@ +Subproject commit 41f0b472c07604fc2a8b0b87c3d4babc93e5dae8 diff --git a/vendor/a-cires b/vendor/a-cires new file mode 160000 index 0000000..4b4d84b --- /dev/null +++ b/vendor/a-cires @@ -0,0 +1 @@ +Subproject commit 4b4d84b72e906047778cb15a6280079d93babb85 diff --git a/vendor/aorwall b/vendor/aorwall new file mode 160000 index 0000000..e9b64ad --- /dev/null +++ b/vendor/aorwall @@ -0,0 +1 @@ +Subproject commit e9b64adae9fce180a392c726457e150177e746fb diff --git a/vendor/clusterfudge b/vendor/clusterfudge new file mode 160000 index 0000000..a0e383f --- /dev/null +++ b/vendor/clusterfudge @@ -0,0 +1 @@ +Subproject commit a0e383f4d9e7e0a60804bbf7e600e38a45530d96 diff --git a/vendor/enachb b/vendor/enachb new file mode 160000 index 0000000..320823c --- /dev/null +++ b/vendor/enachb @@ -0,0 +1 @@ +Subproject commit 320823c26c755d23cba47bf18cea4467b3683d5b diff --git a/vendor/georgehaws b/vendor/georgehaws new file mode 160000 index 0000000..6cc4e68 --- /dev/null +++ b/vendor/georgehaws @@ -0,0 +1 @@ +Subproject commit 6cc4e68c27113ab0cfd40cb7e6d0e9805e014061 diff --git a/vendor/harishafeez1 b/vendor/harishafeez1 new file mode 160000 index 0000000..da6311f --- /dev/null +++ b/vendor/harishafeez1 @@ -0,0 +1 @@ +Subproject commit da6311fb1ed310cf1c748193fa0c97959054396f diff --git a/vendor/jackyluo-learning b/vendor/jackyluo-learning new file mode 160000 index 0000000..766796a --- /dev/null +++ b/vendor/jackyluo-learning @@ -0,0 +1 @@ +Subproject commit 766796ae1e8c07cf2a03b607621c3da6e1f01a31 diff --git a/vendor/jankomisarczyk b/vendor/jankomisarczyk new file mode 160000 index 0000000..78b98a0 --- /dev/null +++ b/vendor/jankomisarczyk @@ -0,0 +1 @@ +Subproject commit 78b98a018b4ea112665815bcd3b797dac76b35e5 diff --git a/vendor/kabbo25 b/vendor/kabbo25 new file mode 160000 index 0000000..79da6b6 --- /dev/null +++ b/vendor/kabbo25 @@ -0,0 +1 @@ +Subproject commit 79da6b69738b06f0e85f537a759ade5c0e9b04ef diff --git a/vendor/mahmudremal b/vendor/mahmudremal new file mode 160000 index 0000000..1eadc64 --- /dev/null +++ b/vendor/mahmudremal @@ -0,0 +1 @@ +Subproject commit 1eadc64dc0a693c7c9de77ddaef857f3a36f7950 diff --git a/vendor/micahnz b/vendor/micahnz new file mode 160000 index 0000000..4840dd2 --- /dev/null +++ b/vendor/micahnz @@ -0,0 +1 @@ +Subproject commit 4840dd25a311be94ef8a7f75ed5f4c85f0dacb70 diff --git a/vendor/neitheroption b/vendor/neitheroption new file mode 160000 index 0000000..38790a2 --- /dev/null +++ b/vendor/neitheroption @@ -0,0 +1 @@ +Subproject commit 38790a27ed2c1b63a301b6a67e7590f2d30de53e diff --git a/vendor/pplonski b/vendor/pplonski new file mode 160000 index 0000000..ba9b66a --- /dev/null +++ b/vendor/pplonski @@ -0,0 +1 @@ +Subproject commit ba9b66ab363ed7f442f61967acff6ce54a0dfadd diff --git a/vendor/reyug9 b/vendor/reyug9 new file mode 160000 index 0000000..d810dfc --- /dev/null +++ b/vendor/reyug9 @@ -0,0 +1 @@ +Subproject commit d810dfc472cf1658ab30e92c9700fa6c9dd8c8a6 diff --git a/vendor/scottmas b/vendor/scottmas new file mode 160000 index 0000000..4841d31 --- /dev/null +++ b/vendor/scottmas @@ -0,0 +1 @@ +Subproject commit 4841d31179870d9edbdc2fbd9c36062bd8efcaff diff --git a/vendor/taylor-ennen b/vendor/taylor-ennen new file mode 160000 index 0000000..b4fccc1 --- /dev/null +++ b/vendor/taylor-ennen @@ -0,0 +1 @@ +Subproject commit b4fccc16715fb86b19fe9648eb5526fdf644663d diff --git a/vendor/vikyw89 b/vendor/vikyw89 new file mode 160000 index 0000000..bf03dd8 --- /dev/null +++ b/vendor/vikyw89 @@ -0,0 +1 @@ +Subproject commit bf03dd8739b64fce56981189a208a222eadb1414 diff --git a/vendor/yodatech1988 b/vendor/yodatech1988 new file mode 160000 index 0000000..d6abb27 --- /dev/null +++ b/vendor/yodatech1988 @@ -0,0 +1 @@ +Subproject commit d6abb27db61142a70defd0c75b53985ea9a71fce From a7712e6596fba20b72c451ebb9ff732460ee34fd Mon Sep 17 00:00:00 2001 From: mike dupont Date: Thu, 19 Oct 2023 09:46:36 -0400 Subject: [PATCH 56/57] ghost coder is now working, agent protocol running now you can run : ` sudo docker-compose up -d mockopenai ghostcoder ` ` sudo docker-compose logs -f mockopenai ghostcoder &` ` sudo docker-compose run arena` --- .gitmodules | 6 ++++ README.md | 4 +++ arena/Dockerfile | 8 ++++- arena/agent.json | 6 ++++ arena/run.py | 47 +++++++++++++++++++++++++++++ data.json | 6 ++++ docker-compose.yml | 31 +++++++++++++++++-- logs/todo.txt | 1 + vendor/Mutant-AI | 1 - vendor/act_base | 2 +- vendor/agent-protocol-client-python | 1 + vendor/agent-protocol-sdk-python | 1 + vendor/aorwall | 2 +- vendor/bored.sh | 10 ++++++ 14 files changed, 120 insertions(+), 6 deletions(-) create mode 100644 arena/agent.json create mode 100644 arena/run.py create mode 100644 data.json create mode 100644 logs/todo.txt delete mode 160000 vendor/Mutant-AI create mode 160000 vendor/agent-protocol-client-python create mode 160000 vendor/agent-protocol-sdk-python create mode 100644 vendor/bored.sh diff --git a/.gitmodules b/.gitmodules index 2d6a799..972d804 100644 --- a/.gitmodules +++ b/.gitmodules @@ -187,3 +187,9 @@ [submodule "vendor/vikyw89"] path = vendor/vikyw89 url = https://github.com/vikyw89/Auto-GPT +[submodule "vendor/agent-protocol-client-python"] + path = vendor/agent-protocol-client-python + url = https://github.com/AI-Engineer-Foundation/agent-protocol-client-python +[submodule "vendor/agent-protocol-sdk-python"] + path = vendor/agent-protocol-sdk-python + url = https://github.com/AI-Engineer-Foundation/agent-protocol-sdk-python diff --git a/README.md b/README.md index 3ba51fd..d7529d7 100644 --- a/README.md +++ b/README.md @@ -54,3 +54,7 @@ we start with the results of the download of the change stats Auto-GPT/stats/fil `sudo docker-compose build` `sudo docker-compose up mockopenai` `sudo docker-compose run auto-gpt-chat-gpt-prompts ` + + +# Example of verbose build +`sudo docker-compose --progress=plain --log-level DEBUG --ansi=never --verbose build ghostcoder` diff --git a/arena/Dockerfile b/arena/Dockerfile index 10e22f5..6f8a85a 100644 --- a/arena/Dockerfile +++ b/arena/Dockerfile @@ -1 +1,7 @@ -from debian \ No newline at end of file +from h4ckermike/act_base +add vendor/agent-protocol-client-python/ /opt/agent-protocol +add vendor/agent-protocol-sdk-python/ /opt/agent-protocol-sdk +add arena/agent.json /tmp/ +add arena/run.py /tmp/ +RUN pip install /opt/agent-protocol +RUN pip install /opt/agent-protocol-sdk diff --git a/arena/agent.json b/arena/agent.json new file mode 100644 index 0000000..4a4ef06 --- /dev/null +++ b/arena/agent.json @@ -0,0 +1,6 @@ +{ + "input":"Write hello", + "additional_input":{ + "test_run_id": "123" + } +} diff --git a/arena/run.py b/arena/run.py new file mode 100644 index 0000000..38cd1aa --- /dev/null +++ b/arena/run.py @@ -0,0 +1,47 @@ +import asyncio + +from agent_protocol.models import StepRequestBody +from agent_protocol_client import ( + Configuration, + ApiClient, + StepRequestBody, + TaskRequestBody, + AgentApi, +) + +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = Configuration(host="http://ghostcoder:8000") + + +async def main(): + # Enter a context with an instance of the API client + async with ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = AgentApi(api_client) + task_request_body = TaskRequestBody(input="Write 'Hello world!' to hi.txt.") + + response = await api_instance.create_agent_task( + task_request_body=task_request_body + ) + print("The response of AgentApi->create_agent_task:\n") + print(response) + print("\n\n") + + task_id = response.task_id + i = 1 + + while ( + step := await api_instance.execute_agent_task_step( + task_id=task_id, step_request_body=StepRequestBody(input=str(i)) + ) + ) and step.is_last is False: + print("The response of AgentApi->execute_agent_task_step:\n") + print(step) + print("\n\n") + i += 1 + + print("Agent finished its work!") + + +asyncio.run(main()) diff --git a/data.json b/data.json new file mode 100644 index 0000000..4a4ef06 --- /dev/null +++ b/data.json @@ -0,0 +1,6 @@ +{ + "input":"Write hello", + "additional_input":{ + "test_run_id": "123" + } +} diff --git a/docker-compose.yml b/docker-compose.yml index e63a60a..fb61965 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,11 +1,15 @@ version: '3' services: + arena: depends_on: - mockopenai - - the_agency_1 + - ghostcoder build: - context : arena + context : . + dockerfile: arena/Dockerfile + #entrypoint: bash -c "curl -d @/tmp/agent.json --request POST http://ghostcoder:8000/ap/v1/agent/tasks " + entrypoint: python /tmp/run.py act_base: #root base of action build: vendor/act_base @@ -356,3 +360,26 @@ services: #poetry run agent start forge --setup #poetry run agbenchmark --mock #poetry run agbenchmark --test=WriteFile + + ghostcoder: + #ports: + # - "8001:8000" # we can expose ports for each bot + #CMD ["poetry", "run", "python", "autogpt/__main__.py"] + entrypoint: bash -c "poetry run python -m forge agent start ghostcoder" + #entrypoint: bash + environment: + - GITHUB_PAT="${GITHUB_PAT}" + - GITHUB_REPO="jmikedupont2/ai-ticket" + - OPENAI_API_KEY=your-openai-api-key + - OPENAI_API_BASE=http://mockopenai:5000/v1 + - AGENT_WORKSPACE=/tmp/workspace/ + - DATABASE_STRING=sqlite:///agent.db + build: + context: vendor/aorwall/ + dockerfile: autogpts/ghostcoder/Dockerfile + args: + BASE_IMAGE: h4ckermike/basic_agent + volumes: + - ./vendor/aorwall/benchmark:/benchmark + - ./vendor/aorwall/autogpts/ghostcoder:/app/ #mount the source in for editing witho + - ./tests/ai_settings.yaml:/tmp/ai_settings.yaml diff --git a/logs/todo.txt b/logs/todo.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/logs/todo.txt @@ -0,0 +1 @@ + diff --git a/vendor/Mutant-AI b/vendor/Mutant-AI deleted file mode 160000 index 2f79caa..0000000 --- a/vendor/Mutant-AI +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 2f79caa6b901d006a78c1ac9e69db4465c0f971a diff --git a/vendor/act_base b/vendor/act_base index dbd8163..e0dadf6 160000 --- a/vendor/act_base +++ b/vendor/act_base @@ -1 +1 @@ -Subproject commit dbd81631ba84fd0f5c252fc7df8fe99b1f6a4bec +Subproject commit e0dadf60d906d3c7e9e96ce2fe0b69870b953dae diff --git a/vendor/agent-protocol-client-python b/vendor/agent-protocol-client-python new file mode 160000 index 0000000..a9006c7 --- /dev/null +++ b/vendor/agent-protocol-client-python @@ -0,0 +1 @@ +Subproject commit a9006c766fb00dc3d570af586be1a1297c5092d6 diff --git a/vendor/agent-protocol-sdk-python b/vendor/agent-protocol-sdk-python new file mode 160000 index 0000000..8ba4c9c --- /dev/null +++ b/vendor/agent-protocol-sdk-python @@ -0,0 +1 @@ +Subproject commit 8ba4c9c2dc8d3faca1870e2b8822fbd6d9e9df37 diff --git a/vendor/aorwall b/vendor/aorwall index e9b64ad..5cbde57 160000 --- a/vendor/aorwall +++ b/vendor/aorwall @@ -1 +1 @@ -Subproject commit e9b64adae9fce180a392c726457e150177e746fb +Subproject commit 5cbde570e53a8e4489b01120bee49e39005aade1 diff --git a/vendor/bored.sh b/vendor/bored.sh new file mode 100644 index 0000000..033d49f --- /dev/null +++ b/vendor/bored.sh @@ -0,0 +1,10 @@ +git submodule add https://github.com/jinho-kim-osd/osGPT +git submodule add https://github.com/bsenst/FritzGPT +git submodule add https://github.com/murnanedaniel/Asimov +git submodule add https://github.com/polywrap/evo.ninja +git submodule add https://github.com/cluna80/Mutant-AI +git submodule add https://github.com/gordonwilliamsburg/test gordonwilliamsburg +git submodule add https://github.com/joslangarica/AutoGPT joslangarica +git submodule add https://github.com/isayahc/AutoGPT isayahc +git submodule add https://github.com/merwanehamadi/Auto-GPT merwanehamadi +git submodule add https://github.com/HMDCrew/AutoGPT HMDCrew From 638883a50b4fd48fae5ec31c9d4b4ce66be65260 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Thu, 19 Oct 2023 11:22:42 -0400 Subject: [PATCH 57/57] getting the agency running! --- arena/Dockerfile | 4 +-- arena/agent.json | 6 ---- arena/run.py | 4 +-- docker-compose.yml | 69 +++++++++++++--------------------------------- vendor/TheAgency | 2 +- 5 files changed, 24 insertions(+), 61 deletions(-) delete mode 100644 arena/agent.json diff --git a/arena/Dockerfile b/arena/Dockerfile index 6f8a85a..8ada9ff 100644 --- a/arena/Dockerfile +++ b/arena/Dockerfile @@ -1,7 +1,7 @@ from h4ckermike/act_base add vendor/agent-protocol-client-python/ /opt/agent-protocol add vendor/agent-protocol-sdk-python/ /opt/agent-protocol-sdk -add arena/agent.json /tmp/ -add arena/run.py /tmp/ RUN pip install /opt/agent-protocol RUN pip install /opt/agent-protocol-sdk + +add arena/run.py /tmp/ diff --git a/arena/agent.json b/arena/agent.json deleted file mode 100644 index 4a4ef06..0000000 --- a/arena/agent.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "input":"Write hello", - "additional_input":{ - "test_run_id": "123" - } -} diff --git a/arena/run.py b/arena/run.py index 38cd1aa..64e652e 100644 --- a/arena/run.py +++ b/arena/run.py @@ -1,5 +1,5 @@ import asyncio - +import os from agent_protocol.models import StepRequestBody from agent_protocol_client import ( Configuration, @@ -11,7 +11,7 @@ # Defining the host is optional and defaults to http://localhost # See configuration.py for a list of all supported configuration parameters. -configuration = Configuration(host="http://ghostcoder:8000") +configuration = Configuration(host=os.environ.get("TARGET")) async def main(): diff --git a/docker-compose.yml b/docker-compose.yml index fb61965..512b937 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -9,6 +9,9 @@ services: context : . dockerfile: arena/Dockerfile #entrypoint: bash -c "curl -d @/tmp/agent.json --request POST http://ghostcoder:8000/ap/v1/agent/tasks " + environment: + - TARGET=http://the_agency_3:8000 + entrypoint: python /tmp/run.py act_base: #root base of action @@ -218,68 +221,33 @@ services: - ./vendor/Jarvis/benchmark:/benchmark - ./vendor/Jarvis/autogpts/autogpt//app/ #mount the source in for editing witho - ./tests/ai_settings.yaml:/tmp/ai_settings.yaml - the_agency_1: + + the_agency_3: depends_on: - mockopenai - - entrypoint: bash -c "poetry install && poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " - environment: - - GITHUB_PAT="${GITHUB_PAT}" - - GITHUB_REPO="jmikedupont2/ai-ticket" - - OPENAI_API_KEY=your-openai-api-key - - OPENAI_API_BASE=http://mockopenai:5000/v1 - build: - context: vendor/TheAgency/ - dockerfile: autogpts/autogpt/Dockerfile - volumes: - - ./vendor/TheAgency/benchmark:/benchmark - - ./vendor/TheAgency/autogpts/autogpt/:/app/ #mount the source in for editing witho - - ./tests/ai_settings.yaml:/tmp/ai_settings.yaml - the_agency_2: - #entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " - entrypoint: bash - environment: - - GITHUB_PAT="${GITHUB_PAT}" - - GITHUB_REPO="jmikedupont2/ai-ticket" - - OPENAI_API_KEY=your-openai-api-key - - OPENAI_API_BASE=http://mockopenai:5000/v1 - build: - context: vendor/TheAgency/ - dockerfile : autogpts/ZEROAGPT_02/Dockerfile - volumes: - - ./vendor/TheAgency/benchmark:/benchmark - - ./vendor/TheAgency/autogpts/ZEROAGPT_02/:/app/ #mount the source in for editing witho - - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml - the_agency_3: - entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " - environment: - - GITHUB_PAT="${GITHUB_PAT}" - - GITHUB_REPO="jmikedupont2/ai-ticket" - - OPENAI_API_KEY=your-openai-api-key - - OPENAI_API_BASE=http://mockopenai:5000/v1 - build: - #context: vendor/TheAgency/autogpts/ZEROAGPT_01/ - context: vendor/TheAgency/ - dockerfile : autogpts/ZEROAGPT_01/Dockerfile - - volumes: - - ./vendor/TheAgency/autogpts/ZEROAGPT_01/:/app/ #mount the source in for editing witho - - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml - the_agency_4: - entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + #entrypoint: bash -c "poetry install && poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " + entrypoint: bash -c "poetry run python -m forge agent start ZEROAGPT" environment: - GITHUB_PAT="${GITHUB_PAT}" - GITHUB_REPO="jmikedupont2/ai-ticket" - OPENAI_API_KEY=your-openai-api-key - OPENAI_API_BASE=http://mockopenai:5000/v1 + - BOT_NAME=ZEROAGPT_03 #FIXME do we need this? + - AGENT_WORKSPACE=/tmp/workspace/ + - AGENT_MODEL=/tmp/workspace/ + - OPENAI_MODEL="gpt-4" + - DATABASE_STRING=sqlite:///agent.db build: - #context: vendor/TheAgency/autogpts/ZEROAGPT_03/ context: vendor/TheAgency/ - dockerfile : autogpts/ZEROAGPT_03/Dockerfile + dockerfile: autogpts/ZEROAGPT_03/Dockerfile + args: + BASE_IMAGE: h4ckermike/basic_agent volumes: + - ./vendor/TheAgency/benchmark:/benchmark - ./vendor/TheAgency/autogpts/ZEROAGPT_03/:/app/ #mount the source in for editing witho - - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + - ./tests/ai_settings.yaml:/tmp/ai_settings.yaml + mason_boom: entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " environment: @@ -292,6 +260,7 @@ services: volumes: - ./vendor/MasonBoomPersonalAssistant/autogpts/autogpt/:/app/ #mount the source in for editixong witho - ./vendor/Auto-GPT/autogpts/autogpt/ai_settings.yaml:/tmp/ai_settings.yaml + mason_boom_pa: entrypoint: bash -c "poetry run python -m autogpt -y --skip-news --ai-settings /tmp/ai_settings.yaml " environment: diff --git a/vendor/TheAgency b/vendor/TheAgency index 8f4b05c..08198a9 160000 --- a/vendor/TheAgency +++ b/vendor/TheAgency @@ -1 +1 @@ -Subproject commit 8f4b05cf20bf5d0148ad382dc8e6035714c37651 +Subproject commit 08198a982ab6e4eef17815119fc8c2a9228f3f44