Skip to content

Commit

Permalink
Merge remote-tracking branch 'up/main'
Browse files Browse the repository at this point in the history
# Conflicts:
#	app/components/chat.tsx
#	app/store/chat.ts
  • Loading branch information
ahzmr committed Dec 13, 2024
2 parents b2381b2 + 83cea3a commit bf5cdc9
Show file tree
Hide file tree
Showing 54 changed files with 2,277 additions and 775 deletions.
14 changes: 12 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -96,10 +96,11 @@ For enterprise inquiries, please contact: **business@nextchat.dev**
- [x] Artifacts: Easily preview, copy and share generated content/webpages through a separate window [#5092](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/pull/5092)
- [x] Plugins: support network search, calculator, any other apis etc. [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165) [#5353](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5353)
- [x] network search, calculator, any other apis etc. [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165) [#5353](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5353)
- [x] Supports Realtime Chat [#5672](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5672)
- [ ] local knowledge base

## What's New

- 🚀 v2.15.8 Now supports Realtime Chat [#5672](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5672)
- 🚀 v2.15.4 The Application supports using Tauri fetch LLM API, MORE SECURITY! [#5379](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5379)
- 🚀 v2.15.0 Now supports Plugins! Read this: [NextChat-Awesome-Plugins](https://github.com/ChatGPTNextWeb/NextChat-Awesome-Plugins)
- 🚀 v2.14.0 Now supports Artifacts & SD
Expand Down Expand Up @@ -134,10 +135,11 @@ For enterprise inquiries, please contact: **business@nextchat.dev**
- [x] Artifacts: 通过独立窗口,轻松预览、复制和分享生成的内容/可交互网页 [#5092](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/pull/5092)
- [x] 插件机制,支持`联网搜索``计算器`、调用其他平台 api [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165) [#5353](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5353)
- [x] 支持联网搜索、计算器、调用其他平台 api [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165) [#5353](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5353)
- [x] 支持 Realtime Chat [#5672](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5672)
- [ ] 本地知识库

## 最新动态

- 🚀 v2.15.8 现在支持Realtime Chat [#5672](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5672)
- 🚀 v2.15.4 客户端支持Tauri本地直接调用大模型API,更安全![#5379](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5379)
- 🚀 v2.15.0 现在支持插件功能了!了解更多:[NextChat-Awesome-Plugins](https://github.com/ChatGPTNextWeb/NextChat-Awesome-Plugins)
- 🚀 v2.14.0 现在支持 Artifacts & SD 了。
Expand Down Expand Up @@ -301,6 +303,14 @@ iflytek Api Key.

iflytek Api Secret.

### `CHATGLM_API_KEY` (optional)

ChatGLM Api Key.

### `CHATGLM_URL` (optional)

ChatGLM Api Url.

### `HIDE_USER_API_KEY` (optional)

> Default: Empty
Expand Down
7 changes: 7 additions & 0 deletions README_CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,13 @@ ByteDance Api Url.

讯飞星火Api Secret.

### `CHATGLM_API_KEY` (可选)

ChatGLM Api Key.

### `CHATGLM_URL` (可选)

ChatGLM Api Url.


### `HIDE_USER_API_KEY` (可选)
Expand Down
4 changes: 2 additions & 2 deletions app/api/common.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { NextRequest, NextResponse } from "next/server";
import { getServerSideConfig } from "../config/server";
import { OPENAI_BASE_URL, ServiceProvider } from "../constant";
import { isModelAvailableInServer } from "../utils/model";
import { cloudflareAIGatewayUrl } from "../utils/cloudflare";
import { getModelProvider, isModelAvailableInServer } from "../utils/model";

const serverConfig = getServerSideConfig();

Expand Down Expand Up @@ -71,7 +71,7 @@ export async function requestOpenai(req: NextRequest) {
.filter((v) => !!v && !v.startsWith("-") && v.includes(modelName))
.forEach((m) => {
const [fullName, displayName] = m.split("=");
const [_, providerName] = fullName.split("@");
const [_, providerName] = getModelProvider(fullName);
if (providerName === "azure" && !displayName) {
const [_, deployId] = (serverConfig?.azureUrl ?? "").split(
"deployments/",
Expand Down
7 changes: 5 additions & 2 deletions app/api/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,11 @@ function getModels(remoteModelRes: OpenAIListModelResponse) {
if (config.disableGPT4) {
remoteModelRes.data = remoteModelRes.data.filter(
(m) =>
!(m.id.startsWith("gpt-4") || m.id.startsWith("chatgpt-4o")) ||
m.id.startsWith("gpt-4o-mini"),
!(
m.id.startsWith("gpt-4") ||
m.id.startsWith("chatgpt-4o") ||
m.id.startsWith("o1")
) || m.id.startsWith("gpt-4o-mini"),
);
}

Expand Down
14 changes: 14 additions & 0 deletions app/api/proxy.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { NextRequest, NextResponse } from "next/server";
import { getServerSideConfig } from "@/app/config/server";

export async function handle(
req: NextRequest,
Expand All @@ -9,6 +10,7 @@ export async function handle(
if (req.method === "OPTIONS") {
return NextResponse.json({ body: "OK" }, { status: 200 });
}
const serverConfig = getServerSideConfig();

// remove path params from searchParams
req.nextUrl.searchParams.delete("path");
Expand All @@ -31,6 +33,18 @@ export async function handle(
return true;
}),
);
// if dalle3 use openai api key
const baseUrl = req.headers.get("x-base-url");
if (baseUrl?.includes("api.openai.com")) {
if (!serverConfig.apiKey) {
return NextResponse.json(
{ error: "OpenAI API key not configured" },
{ status: 500 },
);
}
headers.set("Authorization", `Bearer ${serverConfig.apiKey}`);
}

const controller = new AbortController();
const fetchOptions: RequestInit = {
headers,
Expand Down
12 changes: 9 additions & 3 deletions app/client/platforms/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import { RequestPayload } from "./openai";
import { fetch } from "@/app/utils/stream";

export class GeminiProApi implements LLMApi {
path(path: string): string {
path(path: string, shouldStream = false): string {
const accessStore = useAccessStore.getState();

let baseUrl = "";
Expand All @@ -51,15 +51,18 @@ export class GeminiProApi implements LLMApi {
console.log("[Proxy Endpoint] ", baseUrl, path);

let chatPath = [baseUrl, path].join("/");
if (shouldStream) {
chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse";
}

chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse";
return chatPath;
}
extractMessage(res: any) {
console.log("[Response] gemini-pro response: ", res);

return (
res?.candidates?.at(0)?.content?.parts.at(0)?.text ||
res?.at(0)?.candidates?.at(0)?.content?.parts.at(0)?.text ||
res?.error?.message ||
""
);
Expand Down Expand Up @@ -166,7 +169,10 @@ export class GeminiProApi implements LLMApi {
options.onController?.(controller);
try {
// https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb
const chatPath = this.path(Google.ChatPath(modelConfig.model));
const chatPath = this.path(
Google.ChatPath(modelConfig.model),
shouldStream,
);

const chatPayload = {
method: "POST",
Expand Down
10 changes: 8 additions & 2 deletions app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ export interface RequestPayload {
frequency_penalty: number;
top_p: number;
max_tokens?: number;
max_completion_tokens?: number;
}

export interface DalleRequestPayload {
Expand Down Expand Up @@ -223,7 +224,7 @@ export class ChatGPTApi implements LLMApi {
// O1 not support image, tools (plugin in ChatGPTNextWeb) and system, stream, logprobs, temperature, top_p, n, presence_penalty, frequency_penalty yet.
requestPayload = {
messages,
stream: !isO1 ? options.config.stream : false,
stream: options.config.stream,
model: modelConfig.model,
temperature: !isO1 ? modelConfig.temperature : 1,
presence_penalty: !isO1 ? modelConfig.presence_penalty : 0,
Expand All @@ -233,6 +234,11 @@ export class ChatGPTApi implements LLMApi {
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
};

// O1 使用 max_completion_tokens 控制token数 (https://platform.openai.com/docs/guides/reasoning#controlling-costs)
if (isO1) {
requestPayload["max_completion_tokens"] = modelConfig.max_tokens;
}

// add max_tokens to vision model
if (visionModel) {
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
Expand All @@ -241,7 +247,7 @@ export class ChatGPTApi implements LLMApi {

console.log("[Request] openai payload: ", requestPayload);

const shouldStream = !isDalle3 && !!options.config.stream && !isO1;
const shouldStream = !isDalle3 && !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);

Expand Down
6 changes: 4 additions & 2 deletions app/components/auth.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ import {
trackSettingsPageGuideToCPaymentClick,
trackAuthorizationPageButtonToCPaymentClick,
} from "../utils/auth-settings-events";
import clsx from "clsx";

const storage = safeLocalStorage();

export function AuthPage() {
Expand Down Expand Up @@ -54,7 +56,7 @@ export function AuthPage() {
onClick={() => navigate(Path.Home)}
></IconButton>
</div>
<div className={`no-dark ${styles["auth-logo"]}`}>
<div className={clsx("no-dark", styles["auth-logo"])}>
<BotIcon />
</div>

Expand Down Expand Up @@ -163,7 +165,7 @@ function TopBanner() {
onMouseEnter={handleMouseEnter}
onMouseLeave={handleMouseLeave}
>
<div className={`${styles["top-banner-inner"]} no-dark`}>
<div className={clsx(styles["top-banner-inner"], "no-dark")}>
<Logo className={styles["top-banner-logo"]}></Logo>
<span>
{Locale.Auth.TopTips}
Expand Down
24 changes: 14 additions & 10 deletions app/components/button.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import * as React from "react";

import styles from "./button.module.scss";
import { CSSProperties } from "react";
import clsx from "clsx";

export type ButtonType = "primary" | "danger" | null;

Expand All @@ -22,12 +23,16 @@ export function IconButton(props: {
}) {
return (
<button
className={
styles["icon-button"] +
` ${props.bordered && styles.border} ${props.shadow && styles.shadow} ${
props.className ?? ""
} clickable ${styles[props.type ?? ""]}`
}
className={clsx(
"clickable",
styles["icon-button"],
{
[styles.border]: props.bordered,
[styles.shadow]: props.shadow,
},
styles[props.type ?? ""],
props.className,
)}
onClick={props.onClick}
title={props.title}
disabled={props.disabled}
Expand All @@ -40,10 +45,9 @@ export function IconButton(props: {
{props.icon && (
<div
aria-label={props.text || props.title}
className={
styles["icon-button-icon"] +
` ${props.type === "primary" && "no-dark"}`
}
className={clsx(styles["icon-button-icon"], {
"no-dark": props.type === "primary",
})}
>
{props.icon}
</div>
Expand Down
13 changes: 7 additions & 6 deletions app/components/chat-list.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import { Mask } from "../store/mask";
import { useRef, useEffect } from "react";
import { showConfirm } from "./ui-lib";
import { useMobileScreen } from "../utils";
import clsx from "clsx";

export function ChatItem(props: {
onClick?: () => void;
Expand Down Expand Up @@ -45,11 +46,11 @@ export function ChatItem(props: {
<Draggable draggableId={`${props.id}`} index={props.index}>
{(provided) => (
<div
className={`${styles["chat-item"]} ${
props.selected &&
(currentPath === Path.Chat || currentPath === Path.Home) &&
styles["chat-item-selected"]
}`}
className={clsx(styles["chat-item"], {
[styles["chat-item-selected"]]:
props.selected &&
(currentPath === Path.Chat || currentPath === Path.Home),
})}
onClick={props.onClick}
ref={(ele) => {
draggableRef.current = ele;
Expand All @@ -63,7 +64,7 @@ export function ChatItem(props: {
>
{props.narrow ? (
<div className={styles["chat-item-narrow"]}>
<div className={styles["chat-item-avatar"] + " no-dark"}>
<div className={clsx(styles["chat-item-avatar"], "no-dark")}>
<MaskAvatar
avatar={props.mask.avatar}
model={props.mask.modelConfig.model}
Expand Down
Loading

0 comments on commit bf5cdc9

Please sign in to comment.