Skip to content

Commit

Permalink
Merge branch 'master' into server-dep
Browse files Browse the repository at this point in the history
  • Loading branch information
crspeller committed Sep 30, 2024
2 parents 54cb7b9 + 2d68eb6 commit 1193659
Show file tree
Hide file tree
Showing 12 changed files with 5,722 additions and 4,099 deletions.
2 changes: 1 addition & 1 deletion .nvmrc
Original file line number Diff line number Diff line change
@@ -1 +1 @@
16.13.1
20.11
4 changes: 2 additions & 2 deletions server/ai/configuration.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ func (c *BotConfig) IsValid() bool {
isInvalid := c.Name == "" ||
c.DisplayName == "" ||
c.Service.Type == "" ||
(c.Service.Type == "openaicompatible" && c.Service.APIURL == "") ||
(c.Service.Type != "asksage" && c.Service.Type != "openaicompatible" && c.Service.APIKey == "")
((c.Service.Type == "openaicompatible" || c.Service.Type == "azure") && c.Service.APIURL == "") ||
(c.Service.Type != "asksage" && c.Service.Type != "openaicompatible" && c.Service.Type != "azure" && c.Service.APIKey == "")
return !isInvalid
}
62 changes: 35 additions & 27 deletions server/ai/openai/openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ import (
"image/png"
"io"
"net/http"
"net/url"
"strings"
"time"

Expand Down Expand Up @@ -39,40 +38,49 @@ const OpenAIMaxImageSize = 20 * 1024 * 1024 // 20 MB

var ErrStreamingTimeout = errors.New("timeout streaming")

func NewCompatible(llmService ai.ServiceConfig, httpClient *http.Client, metricsService metrics.LLMetrics) *OpenAI {
apiKey := llmService.APIKey
endpointURL := strings.TrimSuffix(llmService.APIURL, "/")
defaultModel := llmService.DefaultModel
config := openaiClient.DefaultConfig(apiKey)
config.BaseURL = endpointURL
config.HTTPClient = httpClient

parsedURL, err := url.Parse(endpointURL)
if err == nil && strings.HasSuffix(parsedURL.Host, "openai.azure.com") {
config = openaiClient.DefaultAzureConfig(apiKey, endpointURL)
config.APIVersion = "2023-07-01-preview"
}
func NewAzure(llmService ai.ServiceConfig, httpClient *http.Client, metricsService metrics.LLMetrics) *OpenAI {
return newOpenAI(llmService, httpClient, metricsService,
func(apiKey string) openaiClient.ClientConfig {
config := openaiClient.DefaultAzureConfig(apiKey, strings.TrimSuffix(llmService.APIURL, "/"))
config.APIVersion = "2024-06-01"
return config
},
)
}

streamingTimeout := StreamingTimeoutDefault
if llmService.StreamingTimeoutSeconds > 0 {
streamingTimeout = time.Duration(llmService.StreamingTimeoutSeconds) * time.Second
}
return &OpenAI{
client: openaiClient.NewClientWithConfig(config),
defaultModel: defaultModel,
tokenLimit: llmService.TokenLimit,
streamingTimeout: streamingTimeout,
metricsService: metricsService,
}
func NewCompatible(llmService ai.ServiceConfig, httpClient *http.Client, metricsService metrics.LLMetrics) *OpenAI {
return newOpenAI(llmService, httpClient, metricsService,
func(apiKey string) openaiClient.ClientConfig {
config := openaiClient.DefaultConfig(apiKey)
config.BaseURL = strings.TrimSuffix(llmService.APIURL, "/")
return config
},
)
}

func New(llmService ai.ServiceConfig, httpClient *http.Client, metricsService metrics.LLMetrics) *OpenAI {
return newOpenAI(llmService, httpClient, metricsService,
func(apiKey string) openaiClient.ClientConfig {
config := openaiClient.DefaultConfig(apiKey)
config.OrgID = llmService.OrgID
return config
},
)
}

func newOpenAI(
llmService ai.ServiceConfig,
httpClient *http.Client,
metricsService metrics.LLMetrics,
baseConfigFunc func(apiKey string) openaiClient.ClientConfig,
) *OpenAI {
apiKey := llmService.APIKey
defaultModel := llmService.DefaultModel
if defaultModel == "" {
defaultModel = openaiClient.GPT3Dot5Turbo
}
config := openaiClient.DefaultConfig(llmService.APIKey)
config.OrgID = llmService.OrgID

config := baseConfigFunc(apiKey)
config.HTTPClient = httpClient

streamingTimeout := StreamingTimeoutDefault
Expand Down
4 changes: 4 additions & 0 deletions server/plugin.go
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,8 @@ func (p *Plugin) getLLM(llmBotConfig ai.BotConfig) ai.LanguageModel {
llm = openai.New(llmBotConfig.Service, p.llmUpstreamHTTPClient, llmMetrics)
case "openaicompatible":
llm = openai.NewCompatible(llmBotConfig.Service, p.llmUpstreamHTTPClient, llmMetrics)
case "azure":
llm = openai.NewAzure(llmBotConfig.Service, p.llmUpstreamHTTPClient, llmMetrics)
case "anthropic":
llm = anthropic.New(llmBotConfig.Service, p.llmUpstreamHTTPClient, llmMetrics)
case "asksage":
Expand Down Expand Up @@ -185,6 +187,8 @@ func (p *Plugin) getTranscribe() ai.Transcriber {
return openai.New(botConfig.Service, p.llmUpstreamHTTPClient, llmMetrics)
case "openaicompatible":
return openai.NewCompatible(botConfig.Service, p.llmUpstreamHTTPClient, llmMetrics)
case "azure":
return openai.NewAzure(botConfig.Service, p.llmUpstreamHTTPClient, llmMetrics)
}
return nil
}
Expand Down
1 change: 1 addition & 0 deletions webapp/.eslintrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -643,6 +643,7 @@
"@typescript-eslint/prefer-interface": 0,
"@typescript-eslint/explicit-function-return-type": 0,
"@typescript-eslint/explicit-module-boundary-types": 0,
"@typescript-eslint/no-explicit-any": 0,
"@typescript-eslint/indent": [
2,
4,
Expand Down
Loading

0 comments on commit 1193659

Please sign in to comment.