From 7ac878aa8b721c7a0b09f5ea82ee4b4c2ccbff89 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 May 2024 01:16:58 +0000 Subject: [PATCH] build(deps): bump llama-cpp-python from 0.2.75 to 0.2.76 Bumps [llama-cpp-python](https://github.com/abetlen/llama-cpp-python) from 0.2.75 to 0.2.76. - [Release notes](https://github.com/abetlen/llama-cpp-python/releases) - [Changelog](https://github.com/abetlen/llama-cpp-python/blob/main/CHANGELOG.md) - [Commits](https://github.com/abetlen/llama-cpp-python/compare/v0.2.75...v0.2.76) --- updated-dependencies: - dependency-name: llama-cpp-python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index a1f1b3e..211e976 100644 --- a/poetry.lock +++ b/poetry.lock @@ -143,12 +143,12 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "llama-cpp-python" -version = "0.2.75" +version = "0.2.76" description = "Python bindings for the llama.cpp library" optional = false python-versions = ">=3.8" files = [ - {file = "llama_cpp_python-0.2.75.tar.gz", hash = "sha256:aee9383935c42e812ee84265b1dafe5f0e3a20ee47216529b64a2ed6caaaed44"}, + {file = "llama_cpp_python-0.2.76.tar.gz", hash = "sha256:a4e2ab6b74dc87f565a21e4f1617c030f92d5b341375d7173876d238613a50ab"}, ] [package.dependencies] @@ -577,4 +577,4 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "a0cb2177a3a0ad152c29374006559fac6de8f98320f97ccb5b4a029bd4085b7e" +content-hash = "05855bea54d86aa4240dccd199a4d2a5d531e1b31c93096fd2b7a683fe894669" diff --git a/pyproject.toml b/pyproject.toml index 9323b87..e624b47 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" [tool.poetry.dependencies] python = "^3.9" -llama-cpp-python = {extras = ["server"], version = "^0.2.75"} +llama-cpp-python = {extras = ["server"], version = "^0.2.76"} uvicorn = "^0.29.0"