-
-
Notifications
You must be signed in to change notification settings - Fork 23
/
env.sample.jsonc
87 lines (72 loc) · 4.13 KB
/
env.sample.jsonc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
{
// ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――
// Brave Search API Key
// Used by agent function `search_web()` which agents can use to help with their task
// https://brave.com/search/api/ (there is a free plan)
// "Data for AI" plan is prefered over "Data for Search" since it includes some useful additional props
"brave_search_api_key": "[BRAVE_SEARCH_API_KEY]",
// ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――
// Current Model
// Selected `model` that agents must use in the list of OpenAI API endpoints
"current_model": "Open-Orca/Mistral-7B-OpenOrca",
// ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――
// Project Description
// Description of the program you want the agents to develop
// You can also set it to `null` if you want the Product Owner agent
// to prompt you for your project desciption each time you run OADS.
"initial_project_description": "Create a \"guess the number\" CLI game in Python.",
// ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――
// List of OpenAI-Compatible API endpoints
// The `model` key must be unique.
// https://microsoft.github.io/FLAML/docs/reference/autogen/oai/completion/#create
"models": [
// Any model using Azure OpenAI API
{
"model": "[AZURE_OPENAI_STUDIO_DEPLOYMENT_NAME]",
"api_key": "[AZURE_OPENAI_API_KEY]",
"api_base": "https://[AZURE_OPENAI_RESOURCE_NAME].openai.azure.com",
"api_type": "azure",
// https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions
"api_version": "2023-08-01-preview"
},
// `gpt-3.5-turbo-16k` using OpenAI API
{
"model": "gpt-3.5-turbo-16k",
"api_key": "[OPEN_AI_API_KEY]"
},
// `gpt-4` using OpenAI API
{
"model": "gpt-4",
"api_key": "[OPEN_AI_API_KEY]"
},
// `gpt-4-32k` using OpenAI API
{
"model": "gpt-4-32k",
"api_key": "[OPEN_AI_API_KEY]"
},
// Open-source LLM deployment using "Text Generation Web UI" with `OpenAI` extension enabled:
// https://github.com/ivangabriele/openai-autogen-dev-studio#open-source-llms
// https://github.com/oobabooga/text-generation-webui/tree/main/extensions/openai#an-openedai-api-openai-like
// !!! FUNCTION CALLING IS NOT SUPPORTED BY Text Generation Web UI !!!
// This can be also be any inference endpoint compatible following OpenAI API specs,
// regardless of the model you use behind it.
{
"model": "Open-Orca/Mistral-7B-OpenOrca",
"api_base": "https://[YOUR_CONTAINER_ID]-5001.proxy.runpod.net", // or your local/public endpoint
// Unless you have setup your endpoint with an API key, you can leave this dummy value:
"api_key": "sk-111111111111111111111111111111111111111111111111",
"api_type": "open_ai"
}
]
// ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――
// Funtionary LLM API Endpoint Configuration
// This must be a secondary deployment. Don't use this endpoint in your models.
// You can deploy it in one click using this Github repository:
// https://github.com/ivangabriele/docker-functionary
// "functionary_model": {
// "model": "musabgultekin/functionary-7b-v1",
// "api_base": "https://eoefr8r4dxwu0n-8000.proxy.runpod.net//v1",
// "api_key": "functionary",
// "api_type": "open_ai"
// }
}