This repository has been archived by the owner on Jul 29, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
pyproject.toml
102 lines (96 loc) · 1.7 KB
/
pyproject.toml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "auto-llama"
version = "0.0.5"
description = "Supercharge your local LLM with different agents"
readme = "README.md"
license = { file = "LICENSE" }
keywords = ["AI", "LLM", "Llama"]
authors = [
{ name = "LufixSch" }
]
classifiers = [
"Programming Language :: Python :: 3",
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Operating System :: OS Independent",
]
dependencies = [
"Pillow",
"numpy"
]
[project.optional-dependencies]
"extras.text" = [
"num2words",
"nltk",
"spacy",
"coreferee",
"txtai",
"beautifulsoup4",
"pypdf"
]
"extras.audio" = [
"txtai[pipeline-audio]"
]
extras = [
"auto-llama[extras.text]",
"auto-llama[extras.audio]"
]
"agents.research" = [
"wikipedia",
"duckduckgo_search",
"arxiv",
"auto-llama[text]"
]
"agents.code" = [
"docker",
"pandas",
"requests"
]
"agents.similarity" = [
'txtai'
]
agents = [
'auto-llama[agents.research, agents.code, agebts.similarity]',
]
"llm.openai" = [
"openai"
]
llm = [
'auto-llama[llm.openai]'
]
"memory.txtai" = [
"auto-llama[module.nlp]"
]
memory = [
"auto-llama[memory.txtai]"
]
"selector.txtai" = [
"auto-llama[extras.text]"
]
selector = [
"auto-llama[selector.txtai]"
]
"preprocessor.coref" = [
"auto-llama[extras.text]"
]
preprocessor = [
"auto-llama[preprocessor.coref]"
]
all = [
"auto-llama[extras, agent, llm, memory, selector]"
]
[tool.hatch.build]
packages = [
"pkg/auto_llama/",
"pkg/auto_llama_agents/",
"pkg/auto_llama_memory/"
]
[tool.black]
line-length = 120
include = '\.pyi?$'
[tool.isort]
profile = 'black'
extend_skip = ['__init__.py']