-
Notifications
You must be signed in to change notification settings - Fork 68
/
main.go
115 lines (103 loc) · 3.23 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
package main
import (
"github.com/diemus/azure-openai-proxy/pkg/azure"
"github.com/diemus/azure-openai-proxy/pkg/openai"
"github.com/gin-gonic/gin"
"log"
"net/http"
"os"
)
var (
Address = "0.0.0.0:8080"
ProxyMode = "azure"
)
func init() {
gin.SetMode(gin.ReleaseMode)
if v := os.Getenv("AZURE_OPENAI_PROXY_ADDRESS"); v != "" {
Address = v
}
if v := os.Getenv("AZURE_OPENAI_PROXY_MODE"); v != "" {
ProxyMode = v
}
log.Printf("loading azure openai proxy address: %s", Address)
log.Printf("loading azure openai proxy mode: %s", ProxyMode)
}
func main() {
router := gin.Default()
if ProxyMode == "azure" {
router.GET("/v1/models", handleGetModels)
router.OPTIONS("/v1/*path", handleOptions)
router.POST("/v1/chat/completions", handleAzureProxy)
router.POST("/v1/completions", handleAzureProxy)
router.POST("/v1/embeddings", handleAzureProxy)
} else {
router.Any("*path", handleOpenAIProxy)
}
router.Run(Address)
}
func handleGetModels(c *gin.Context) {
// BUGFIX: fix options request, see https://github.com/diemus/azure-openai-proxy/issues/3
models := []string{"gpt-4", "gpt-4-0314", "gpt-4-32k", "gpt-4-32k-0314", "gpt-3.5-turbo", "gpt-3.5-turbo-0301", "text-davinci-003", "text-embedding-ada-002"}
result := azure.ListModelResponse{
Object: "list",
}
for _, model := range models {
result.Data = append(result.Data, azure.Model{
ID: model,
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: []azure.ModelPermission{
{
ID: "",
Object: "model",
Created: 1679602087,
AllowCreateEngine: true,
AllowSampling: true,
AllowLogprobs: true,
AllowSearchIndices: true,
AllowView: true,
AllowFineTuning: true,
Organization: "*",
Group: nil,
IsBlocking: false,
},
},
Root: model,
Parent: nil,
})
}
c.JSON(200, result)
}
func handleOptions(c *gin.Context) {
// BUGFIX: fix options request, see https://github.com/diemus/azure-openai-proxy/issues/1
c.Header("Access-Control-Allow-Origin", "*")
c.Header("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE")
c.Header("Access-Control-Allow-Headers", "Content-Type, Authorization")
c.Status(200)
return
}
func handleAzureProxy(c *gin.Context) {
// BUGFIX: fix options request, see https://github.com/diemus/azure-openai-proxy/issues/1
if c.Request.Method == http.MethodOptions {
c.Header("Access-Control-Allow-Origin", "*")
c.Header("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE")
c.Header("Access-Control-Allow-Headers", "Content-Type, Authorization")
c.Status(200)
return
}
server := azure.NewOpenAIReverseProxy()
server.ServeHTTP(c.Writer, c.Request)
//BUGFIX: try to fix the difference between azure and openai
//Azure's response is missing a \n at the end of the stream
//see https://github.com/Chanzhaoyu/chatgpt-web/issues/831
if c.Writer.Header().Get("Content-Type") == "text/event-stream" {
if _, err := c.Writer.Write([]byte("\n")); err != nil {
log.Printf("rewrite azure response error: %v", err)
}
}
}
func handleOpenAIProxy(c *gin.Context) {
server := openai.NewOpenAIReverseProxy()
server.ServeHTTP(c.Writer, c.Request)
}