diff --git a/llm/poetry.lock b/llm/poetry.lock index 38b2290..d979e2e 100644 --- a/llm/poetry.lock +++ b/llm/poetry.lock @@ -721,12 +721,12 @@ requests = ">=2,<3" [[package]] name = "llama-cpp-python" -version = "0.2.50" +version = "0.2.72" description = "Python bindings for the llama.cpp library" optional = false python-versions = ">=3.8" files = [ - {file = "llama_cpp_python-0.2.50.tar.gz", hash = "sha256:28caf4e665dac62ad1d347061b7a96669af7fb9e7f1e4e8c17e736504e321a51"}, + {file = "llama_cpp_python-0.2.72.tar.gz", hash = "sha256:7da4957043927f73d4425c919c843581e5a3ceb5e65cafbc29bfb45703814a56"}, ] [package.dependencies] @@ -738,7 +738,7 @@ typing-extensions = ">=4.5.0" [package.extras] all = ["llama_cpp_python[dev,server,test]"] dev = ["black (>=23.3.0)", "httpx (>=0.24.1)", "mkdocs (>=1.4.3)", "mkdocs-material (>=9.1.18)", "mkdocstrings[python] (>=0.22.0)", "pytest (>=7.4.0)", "twine (>=4.0.2)"] -server = ["fastapi (>=0.100.0)", "pydantic-settings (>=2.0.1)", "sse-starlette (>=1.6.1)", "starlette-context (>=0.3.6,<0.4)", "uvicorn (>=0.22.0)"] +server = ["PyYAML (>=5.1)", "fastapi (>=0.100.0)", "pydantic-settings (>=2.0.1)", "sse-starlette (>=1.6.1)", "starlette-context (>=0.3.6,<0.4)", "uvicorn (>=0.22.0)"] test = ["httpx (>=0.24.1)", "pytest (>=7.4.0)", "scipy (>=1.10)"] [[package]] @@ -1568,4 +1568,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "3267f23ba8f793923d31046310ab3275f964c50369461a8d94b5182ab994dd83" +content-hash = "f171eeab3c58119278295cc4dd7492ac62153a1fec70216413ad3165cf89c5e4" diff --git a/llm/pyproject.toml b/llm/pyproject.toml index 4f03f94..a50dbd8 100644 --- a/llm/pyproject.toml +++ b/llm/pyproject.toml @@ -8,7 +8,7 @@ authors = ["Kleczyk "] python = "^3.11" openai = "^1.8.0" langchain = "^0.1.11" -llama-cpp-python = "^0.2.29" +llama-cpp-python = "^0.2.72" uvicorn = "^0.26.0"