Skip to content

Commit

Permalink
fix(settings): resolve PydanticUserError in repo_agent/settings.py (
Browse files Browse the repository at this point in the history
#80)

- Update dependencies to latest versions
- Switch to llama-index-llms-openai
- Specify minimum Python version as 3.11
- Export to requirements.txt
  • Loading branch information
Umpire2018 authored Oct 10, 2024
1 parent e559bf1 commit 607aeba
Show file tree
Hide file tree
Showing 6 changed files with 3,123 additions and 2,727 deletions.
3,107 changes: 1,657 additions & 1,450 deletions pdm.lock

Large diffs are not rendered by default.

7 changes: 3 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ maintainers = [
{name = "Edwards Arno", email = "[email protected]"},
]
license = {text = "Apache-2.0"}
requires-python = ">=3.10,<4.0"
requires-python = ">=3.11,<4.0"
dependencies = [
"loguru>=0.7.2",
"openai>=1.10.0",
Expand All @@ -21,16 +21,15 @@ dependencies = [
"pydantic-settings>=2.2.1",
"tomli>=2.0.1",
"tomli-w>=1.0.0",
"llama-index<0.10.0",
"colorama>=0.4.6",
"llama-index-llms-openai>=0.2.12",
]
name = "repoagent"
version = "0.1.3"
version = "0.1.4"
description = "An LLM-Powered Framework for Repository-level Code Documentation Generation."
readme = "README.md"
classifiers = [
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Scientific/Engineering :: Artificial Intelligence"
Expand Down
5 changes: 3 additions & 2 deletions repo_agent/chat_with_repo/prompt.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
from llama_index.llms import OpenAI
from llama_index.llms.openai import OpenAI
from llama_index.core.llms.function_calling import FunctionCallingLLM

from repo_agent.chat_with_repo.json_handler import JsonFileProcessor
from repo_agent.log import logger


class TextAnalysisTool:
def __init__(self, llm, db_path):
def __init__(self, llm: FunctionCallingLLM, db_path):
self.jsonsearch = JsonFileProcessor(db_path)
self.llm = llm
self.db_path = db_path
Expand Down
4 changes: 2 additions & 2 deletions repo_agent/chat_with_repo/rag.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import json

from llama_index import PromptTemplate
from llama_index.llms import OpenAI
from llama_index.core import PromptTemplate
from llama_index.llms.openai import OpenAI
from openai import OpenAI as AI

from repo_agent.chat_with_repo.json_handler import JsonFileProcessor
Expand Down
5 changes: 3 additions & 2 deletions repo_agent/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
SecretStr,
field_serializer,
field_validator,
FieldSerializationInfo
)
from pydantic_settings import BaseSettings

Expand All @@ -35,7 +36,7 @@ class ProjectSettings(BaseSettings):
log_level: LogLevel = LogLevel.INFO

@field_serializer("ignore_list")
def serialize_ignore_list(self, ignore_list: list[str] = []):
def serialize_ignore_list(self, ignore_list: list[str], info: FieldSerializationInfo):
if ignore_list == [""]:
self.ignore_list = [] # If the ignore_list is empty, set it to an empty list
return []
Expand All @@ -62,7 +63,7 @@ def set_log_level(cls, v: str) -> LogLevel:
raise ValueError(f"Invalid log level: {v}")

@field_serializer("target_repo")
def serialize_target_repo(self, target_repo: DirectoryPath):
def serialize_target_repo(self, target_repo: DirectoryPath, info: FieldSerializationInfo):
return str(target_repo)


Expand Down
Loading

0 comments on commit 607aeba

Please sign in to comment.