-
Notifications
You must be signed in to change notification settings - Fork 42
/
ai_model.py
149 lines (116 loc) · 5.44 KB
/
ai_model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
# MIT License
# Copyright (c) 2023-2024 wunderwuzzi23
# Greetings from Seattle!
from abc import ABC, abstractmethod
from openai import OpenAI
from groq import Groq
from ollama import Client
from openai import AzureOpenAI
from anthropic import Anthropic
import os
class AIModel(ABC):
@abstractmethod
def chat(self, model, messages):
pass
@abstractmethod
def moderate(self, message):
pass
@staticmethod
def get_model_client(config):
api_provider=config["api"]
if api_provider == "" or api_provider==None:
api_provider = "groq"
if api_provider == "groq":
return GroqModel(api_key=os.environ.get("GROQ_API_KEY"))
elif api_provider == "openai":
api_key = os.getenv("OPENAI_API_KEY")
if not api_key:
api_key=config["openai_api_key"]
if not api_key: #If statement to avoid "invalid filepath" error
home_path = os.path.expanduser("~")
api_key=open(os.path.join(home_path,".openai.apikey"), "r").readline().strip()
api_key = api_key
return OpenAIModel(api_key=api_key)
elif api_provider == "azure":
api_key = os.getenv("AZURE_OPENAI_API_KEY")
if not api_key:
api_key=config["azure_openai_api_key"]
if not api_key:
home_path = os.path.expanduser("~")
api_key=open(os.path.join(home_path,".azureopenai.apikey"), "r").readline().strip()
return AzureOpenAIModel(
api_key=api_key,
azure_endpoint=config["azure_endpoint"],
api_version=config["azure_api_version"])
elif api_provider == "ollama":
ollama_api = os.environ.get("OLLAMA_ENDPOINT", "http://localhost:11434")
#ollama_model = os.environ.get("OLLAMA_MODEL", "llama3-8b-8192")
return OllamaModel(ollama_api)
if api_provider == "anthropic":
api_key = os.getenv("ANTHROPIC_API_KEY")
if not api_key:
api_key=config["anthropic_api_key"]
return AnthropicModel(api_key=api_key)
else:
raise ValueError(f"Invalid AI model provider: {api_provider}")
class GroqModel(AIModel):
def __init__(self, api_key):
self.client = Groq(api_key=api_key)
def chat(self, messages, model, temperature, max_tokens):
resp = self.client.chat.completions.create(model=model,
messages=messages,
temperature=temperature,
max_tokens=max_tokens)
return resp.choices[0].message.content
def moderate(self, message):
pass
class OpenAIModel(AIModel):
def __init__(self, api_key):
self.client = OpenAI(api_key=api_key)
def chat(self, messages, model, temperature, max_tokens):
resp = self.client.chat.completions.create(model=model,
messages=messages,
temperature=temperature,
max_tokens=max_tokens)
return resp.choices[0].message.content
def moderate(self, message):
return self.client.moderations.create(input=message)
class OllamaModel(AIModel):
def __init__(self, host):
self.client = Client(host=host)
def chat(self, messages, model, temperature, max_tokens):
resp = self.client.chat(model=model,
messages=messages)
return resp["message"]["content"]
def moderate(self, message):
pass
class AzureOpenAIModel(AIModel):
def __init__(self, azure_endpoint, api_key, api_version):
self.client = AzureOpenAI(azure_endpoint=azure_endpoint, api_key=api_key, api_version=api_version)
def chat(self, messages, model, temperature, max_tokens):
resp = self.client.chat.completions.create(model=model,
messages=messages,
temperature=temperature,
max_tokens=max_tokens)
return resp.choices[0].message.content
def moderate(self, message):
return self.client.moderations.create(input=message)
class AnthropicModel(AIModel):
def __init__(self, api_key):
self.client = Anthropic(api_key=api_key)
def chat(self, messages, model, temperature, max_tokens):
## Anthropic requires the system prompt to be passed separately
## Hence extracting system prompt role from the messages
## and then passing the messages without the system role
## messages is not subscriptable, so we need to convert it to a list
system_prompt = next((m.get("content", "") for m in messages if m.get("role") == "system"), "")
# Remove system messages from the list
user_messages = [m for m in messages if m.get("role") != "system"]
resp = self.client.messages.create(model=model,
system=system_prompt,
messages=user_messages,
temperature=temperature,
max_tokens=max_tokens)
return resp.content[0].text
def moderate(self, message):
pass