Skip to content

Commit

Permalink
Merge branch 'Hk-Gosuto:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
MTDickens authored Feb 12, 2024
2 parents fbfb4b2 + c8b7378 commit d058654
Show file tree
Hide file tree
Showing 93 changed files with 1,485 additions and 384 deletions.
15 changes: 10 additions & 5 deletions .github/workflows/app.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,9 @@ jobs:
- os: ubuntu-latest
arch: x86_64
rust_target: x86_64-unknown-linux-gnu
- os: macos-latest
arch: x86_64
rust_target: x86_64-apple-darwin
- os: macos-latest
arch: aarch64
rust_target: aarch64-apple-darwin
rust_target: x86_64-apple-darwin,aarch64-apple-darwin
- os: windows-latest
arch: x86_64
rust_target: x86_64-pc-windows-msvc
Expand All @@ -60,13 +57,14 @@ jobs:
uses: actions/setup-node@v3
with:
node-version: 18
cache: 'yarn'
- name: install Rust stable
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.config.rust_target }}
- uses: Swatinem/rust-cache@v2
with:
key: ${{ matrix.config.rust_target }}
key: ${{ matrix.config.os }}
- name: install dependencies (ubuntu only)
if: matrix.config.os == 'ubuntu-latest'
run: |
Expand All @@ -79,8 +77,15 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
with:
releaseId: ${{ needs.create-release.outputs.release_id }}
args: ${{ matrix.config.os == 'macos-latest' && '--target universal-apple-darwin' || '' }}

publish-release:
permissions:
Expand Down
84 changes: 84 additions & 0 deletions .github/workflows/deploy_preview.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
name: VercelPreviewDeployment

on:
pull_request_target:
types:
- opened
- synchronize
- reopened

env:
VERCEL_TEAM: ${{ secrets.VERCEL_TEAM }}
VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }}
VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }}
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }}
VERCEL_PR_DOMAIN_SUFFIX: ${{ secrets.VERCEL_PR_DOMAIN_SUFFIX }}

permissions:
contents: read
statuses: write
pull-requests: write

jobs:
deploy-preview:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
ref: ${{ github.event.pull_request.head.sha }}

- name: Extract branch name
shell: bash
run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> "$GITHUB_OUTPUT"
id: extract_branch

- name: Hash branch name
uses: pplanel/[email protected]
id: hash_branch
with:
input: ${{ steps.extract_branch.outputs.branch }}
method: MD5

- name: Set Environment Variables
id: set_env
if: github.event_name == 'pull_request_target'
run: |
echo "VERCEL_ALIAS_DOMAIN=${{ github.event.pull_request.number }}-${{ github.workflow }}.${VERCEL_PR_DOMAIN_SUFFIX}" >> $GITHUB_OUTPUT
- name: Install Vercel CLI
run: npm install --global vercel@latest

- name: Cache dependencies
uses: actions/cache@v2
id: cache-npm
with:
path: ~/.npm
key: npm-${{ hashFiles('package-lock.json') }}
restore-keys: npm-

- name: Pull Vercel Environment Information
run: vercel pull --yes --environment=preview --token=${VERCEL_TOKEN}

- name: Deploy Project Artifacts to Vercel
id: vercel
env:
META_TAG: ${{ steps.hash_branch.outputs.digest }}-${{ github.run_number }}-${{ github.run_attempt}}
run: |
set -e
vercel pull --yes --environment=preview --token=${VERCEL_TOKEN}
vercel build --token=${VERCEL_TOKEN}
vercel deploy --prebuilt --archive=tgz --token=${VERCEL_TOKEN} --meta base_hash=${{ env.META_TAG }}
DEFAULT_URL=$(vercel ls --token=${VERCEL_TOKEN} --meta base_hash=${{ env.META_TAG }})
ALIAS_URL=$(vercel alias set ${DEFAULT_URL} ${{ steps.set_env.outputs.VERCEL_ALIAS_DOMAIN }} --token=${VERCEL_TOKEN} --scope ${VERCEL_TEAM}| awk '{print $3}')
echo "New preview URL: ${DEFAULT_URL}"
echo "New alias URL: ${ALIAS_URL}"
echo "VERCEL_URL=${ALIAS_URL}" >> "$GITHUB_OUTPUT"
- uses: mshick/add-pr-comment@v2
with:
message: |
Your build has completed!
[Preview deployment](${{ steps.vercel.outputs.VERCEL_URL }})
40 changes: 40 additions & 0 deletions .github/workflows/remove_deploy_preview.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
name: Removedeploypreview

permissions:
contents: read
statuses: write
pull-requests: write

env:
VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }}
VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }}
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }}

on:
pull_request_target:
types:
- closed

jobs:
delete-deployments:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2

- name: Extract branch name
shell: bash
run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT
id: extract_branch

- name: Hash branch name
uses: pplanel/[email protected]
id: hash_branch
with:
input: ${{ steps.extract_branch.outputs.branch }}
method: MD5

- name: Call the delete-deployment-preview.sh script
env:
META_TAG: ${{ steps.hash_branch.outputs.digest }}
run: |
bash ./scripts/delete-deployment-preview.sh
11 changes: 5 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,6 @@
一键免费部署你的跨平台私人 ChatGPT 应用, 支持 GPT3, GPT4 & Gemini Pro 模型。(基于 LangChain 实现插件功能)

[![Web][Web-image]][web-url]
[![Windows][Windows-image]][download-url]
[![MacOS][MacOS-image]][download-url]
[![Linux][Linux-image]][download-url]

[网页版](https://chat-gpt-next-web-gosuto.vercel.app/) / [反馈](https://github.com/Hk-Gosuto/ChatGPT-Next-Web-LangChain/issues)

Expand Down Expand Up @@ -126,7 +123,9 @@
- [x] 支持 ChatSession 级别插件功能开关

仅在使用非 `0301``0314` 版本模型时会出现插件开关,其它模型默认为关闭状态,开关也不会显示。


最新版本中已经移除上面两个模型。

- [ ] 支持添加自定义插件

## 最新动态
Expand Down Expand Up @@ -200,9 +199,9 @@ OpenAI 接口代理 URL,如果你手动配置了 openai 接口代理,请填

如果你不想让用户使用 GPT-4,将此环境变量设置为 1 即可。

### `HIDE_BALANCE_QUERY` (可选)
### `ENABLE_BALANCE_QUERY` (可选)

如果你不想让用户查询余额,将此环境变量设置为 1 即可。
如果你想启用余额查询功能,将此环境变量设置为 1 即可。

### `GOOGLE_API_KEY` (可选)

Expand Down
2 changes: 1 addition & 1 deletion README_CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

一键免费部署你的私人 ChatGPT 网页应用,支持 GPT3, GPT4 & Gemini Pro 模型。

[演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N) / [QQ 群](https://user-images.githubusercontent.com/16968934/228190818-7dd00845-e9b9-4363-97e5-44c507ac76da.jpeg) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) / [Donate](#捐赠-donate-usdt)
[演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N)

[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web)

Expand Down
12 changes: 8 additions & 4 deletions app/api/common.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { NextRequest, NextResponse } from "next/server";
import { getServerSideConfig } from "../config/server";
import { DEFAULT_MODELS, OPENAI_BASE_URL, GEMINI_BASE_URL } from "../constant";
import { DEFAULT_MODELS, OPENAI_BASE_URL } from "../constant";
import { collectModelTable } from "../utils/model";
import { makeAzurePath } from "../azure";

Expand Down Expand Up @@ -65,10 +65,14 @@ export async function requestOpenai(req: NextRequest) {
path = makeAzurePath(path, serverConfig.azureApiVersion);
}

const clonedBody = await req.text();
const jsonBody = JSON.parse(clonedBody) as { model?: string };
let jsonBody;
let clonedBody;
if (req.method !== "GET" && req.method !== "HEAD") {
clonedBody = await req.text();
jsonBody = JSON.parse(clonedBody) as { model?: string };
}
if (serverConfig.isAzure) {
baseUrl = `${baseUrl}/${jsonBody.model}`;
baseUrl = `${baseUrl}/${jsonBody?.model}`;
}
const fetchUrl = `${baseUrl}/${path}`;
const fetchOptions: RequestInit = {
Expand Down
2 changes: 1 addition & 1 deletion app/api/cors/[...path]/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,4 @@ export const POST = handle;
export const GET = handle;
export const OPTIONS = handle;

export const runtime = "nodejs";
export const runtime = "edge";
37 changes: 37 additions & 0 deletions app/api/cors/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import { NextRequest, NextResponse } from "next/server";

async function handle(req: NextRequest) {
if (req.method === "OPTIONS") {
return NextResponse.json({ body: "OK" }, { status: 200 });
}

const targetUrl = req.nextUrl.searchParams.get("url");

if (!targetUrl) {
return NextResponse.json({ body: "Bad Url" }, { status: 500 });
}

const method = req.headers.get("method") ?? undefined;
const fetchOptions: RequestInit = {
headers: {
authorization: req.headers.get("authorization") ?? "",
},
method,
// @ts-ignore
duplex: "half",
};

const fetchResult = await fetch(targetUrl, fetchOptions);

console.log("[Any Proxy]", targetUrl, {
status: fetchResult.status,
statusText: fetchResult.statusText,
});

return fetchResult;
}

export const GET = handle;
export const OPTIONS = handle;

export const runtime = "edge";
9 changes: 2 additions & 7 deletions app/api/google/[...path]/route.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth";
import { getServerSideConfig } from "@/app/config/server";
import { GEMINI_BASE_URL, ModelProvider } from "@/app/constant";
import { GOOGLE_BASE_URL, ModelProvider } from "@/app/constant";

async function handle(
req: NextRequest,
Expand All @@ -17,7 +17,7 @@ async function handle(

const serverConfig = getServerSideConfig();

let baseUrl = serverConfig.googleBaseUrl || GEMINI_BASE_URL;
let baseUrl = serverConfig.googleBaseUrl || GOOGLE_BASE_URL;

if (!baseUrl.startsWith("http")) {
baseUrl = `https://${baseUrl}`;
Expand Down Expand Up @@ -101,19 +101,14 @@ export const POST = handle;

export const runtime = "edge";
export const preferredRegion = [
"arn1",
"bom1",
"cdg1",
"cle1",
"cpt1",
"dub1",
"fra1",
"gru1",
"hnd1",
"iad1",
"icn1",
"kix1",
"lhr1",
"pdx1",
"sfo1",
"sin1",
Expand Down
2 changes: 1 addition & 1 deletion app/client/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ export function getHeaders(ignoreHeaders?: boolean) {
const accessStore = useAccessStore.getState();
let headers: Record<string, string> = {};
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
const isGoogle = modelConfig.model === "gemini-pro";
const isGoogle = modelConfig.model.startsWith("gemini");
if (!ignoreHeaders && !isGoogle) {
headers = {
"Content-Type": "application/json",
Expand Down
10 changes: 1 addition & 9 deletions app/client/platforms/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,7 @@ import {
LLMUsage,
} from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import Locale from "../../locales";
import { getServerSideConfig } from "@/app/config/server";
import de from "@/app/locales/de";

export class GeminiProApi implements LLMApi {
toolAgentChat(options: AgentChatOptions): Promise<void> {
throw new Error("Method not implemented.");
Expand Down
7 changes: 6 additions & 1 deletion app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
"use client";
import {
ApiPath,
DEFAULT_API_HOST,
Expand Down Expand Up @@ -53,7 +54,9 @@ export class ChatGPTApi implements LLMApi {

if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
baseUrl = isApp ? DEFAULT_API_HOST : ApiPath.OpenAI;
baseUrl = isApp
? DEFAULT_API_HOST + "/proxy" + ApiPath.OpenAI
: ApiPath.OpenAI;
}

if (baseUrl.endsWith("/")) {
Expand All @@ -68,6 +71,8 @@ export class ChatGPTApi implements LLMApi {
return [baseUrl, model, path].join("/");
}

console.log("[Proxy Endpoint] ", baseUrl, path);

return [baseUrl, path].join("/");
}

Expand Down
20 changes: 14 additions & 6 deletions app/components/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -521,12 +521,20 @@ export function ChatActions(props: {
if (items[i].type.indexOf("image") === -1) continue;
const file = items[i].getAsFile();
if (file !== null) {
api.file.upload(file).then((fileName) => {
props.imageSelected({
fileName,
fileUrl: `/api/file/${fileName}`,
});
});
setUploadLoading(true);
api.file
.upload(file)
.then((uploadFile) => {
props.imageSelected({
fileName: uploadFile.fileName,
fileUrl: uploadFile.filePath,
});
})
.catch((e) => {
console.error("[Upload]", e);
showToast(prettyObject(e));
})
.finally(() => setUploadLoading(false));
}
}
};
Expand Down
Loading

0 comments on commit d058654

Please sign in to comment.