Skip to content

Commit

Permalink
Standardize HF_ACCESS_TOKEN -> HF_TOKEN (huggingface#610)
Browse files Browse the repository at this point in the history
* Standardize HF_ACCESS_TOKEN -> HF_TOKEN

* Replace HF_ACCESS_TOKEN by HF_TOKEN in .env

* Add legacy support for HF_ACCESS_TOKEN

---------

Co-authored-by: Nathan Sarrazin <sarrazin.nathan@gmail.com>
  • Loading branch information
Wauplin and nsarrazin authored Dec 6, 2023
1 parent ea856f7 commit 3cbea34
Show file tree
Hide file tree
Showing 8 changed files with 32 additions and 20 deletions.
4 changes: 3 additions & 1 deletion .env
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,12 @@ MONGODB_DB_NAME=chat-ui
MONGODB_DIRECT_CONNECTION=false

COOKIE_NAME=hf-chat
HF_ACCESS_TOKEN=#hf_<token> from from https://huggingface.co/settings/token
HF_TOKEN=#hf_<token> from from https://huggingface.co/settings/token
HF_API_ROOT=https://api-inference.huggingface.co/models
OPENAI_API_KEY=#your openai api key here

HF_ACCESS_TOKEN=#LEGACY! Use HF_TOKEN instead

# used to activate search with web functionality. disabled if none are defined. choose one of the following:
YDC_API_KEY=#your docs.you.com api key here
SERPER_API_KEY=#your serper.dev api key here
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/deploy-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
SERPER_API_KEY: ${{ secrets.SERPER_API_KEY }}
OPENID_CONFIG: ${{ secrets.OPENID_CONFIG }}
MONGODB_URL: ${{ secrets.MONGODB_URL }}
HF_ACCESS_TOKEN: ${{ secrets.HF_ACCESS_TOKEN }}
HF_DEPLOYMENT_TOKEN: ${{ secrets.HF_DEPLOYMENT_TOKEN }}
run: npm run updateProdEnv
sync-to-hub:
runs-on: ubuntu-latest
Expand All @@ -39,5 +39,5 @@ jobs:
lfs: true
- name: Push to hub
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
run: git push https://nsarrazin:$HF_TOKEN@huggingface.co/spaces/huggingchat/chat-ui main
HF_DEPLOYMENT_TOKEN: ${{ secrets.HF_DEPLOYMENT_TOKEN }}
run: git push https://nsarrazin:$HF_DEPLOYMENT_TOKEN@huggingface.co/spaces/huggingchat/chat-ui main
4 changes: 2 additions & 2 deletions .github/workflows/deploy-staging.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,5 +20,5 @@ jobs:
lfs: true
- name: Push to hub
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
run: git push https://nsarrazin:$HF_TOKEN@huggingface.co/spaces/huggingchat/chat-ui-staging main
HF_DEPLOYMENT_TOKEN: ${{ secrets.HF_DEPLOYMENT_TOKEN }}
run: git push https://nsarrazin:$HF_DEPLOYMENT_TOKEN@huggingface.co/spaces/huggingchat/chat-ui-staging main
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ If you don't want to configure, setup, and launch your own Chat UI yourself, you

You can deploy your own customized Chat UI instance with any supported [LLM](https://huggingface.co/models?pipeline_tag=text-generation&sort=trending) of your choice on [Hugging Face Spaces](https://huggingface.co/spaces). To do so, use the chat-ui template [available here](https://huggingface.co/new-space?template=huggingchat/chat-ui-template).

Set `HUGGING_FACE_HUB_TOKEN` in [Space secrets](https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables) to deploy a model with gated access or a model in a private repository. It's also compatible with [Inference for PROs](https://huggingface.co/blog/inference-pro) curated list of powerful models with higher rate limits. Make sure to create your personal token first in your [User Access Tokens settings](https://huggingface.co/settings/tokens).
Set `HF_TOKEN` in [Space secrets](https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables) to deploy a model with gated access or a model in a private repository. It's also compatible with [Inference for PROs](https://huggingface.co/blog/inference-pro) curated list of powerful models with higher rate limits. Make sure to create your personal token first in your [User Access Tokens settings](https://huggingface.co/settings/tokens).

Read the full tutorial [here](https://huggingface.co/docs/hub/spaces-sdks-docker-chatui#chatui-on-spaces).

Expand All @@ -42,7 +42,7 @@ Start by creating a `.env.local` file in the root of the repository. The bare mi

```env
MONGODB_URL=<the URL to your MongoDB instance>
HF_ACCESS_TOKEN=<your access token>
HF_TOKEN=<your access token>
```

### Database
Expand Down Expand Up @@ -397,7 +397,7 @@ You can then add the generated information and the `authorization` parameter to
]
```

Please note that if `HF_ACCESS_TOKEN` is also set or not empty, it will take precedence.
Please note that if `HF_TOKEN` is also set or not empty, it will take precedence.

#### Models hosted on multiple custom endpoints

Expand Down
8 changes: 4 additions & 4 deletions scripts/updateProdEnv.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import fs from "fs";

const HF_TOKEN = process.env.HF_TOKEN; // token used for pushing to hub
const HF_DEPLOYMENT_TOKEN = process.env.HF_DEPLOYMENT_TOKEN; // token used for pushing to hub

const SERPER_API_KEY = process.env.SERPER_API_KEY;
const OPENID_CONFIG = process.env.OPENID_CONFIG;
const MONGODB_URL = process.env.MONGODB_URL;
const HF_ACCESS_TOKEN = process.env.HF_ACCESS_TOKEN; // token used for API requests in prod
const HF_TOKEN = process.env.HF_TOKEN ?? process.env.HF_ACCESS_TOKEN; // token used for API requests in prod

// Read the content of the file .env.template
const PUBLIC_CONFIG = fs.readFileSync(".env.template", "utf8");
Expand All @@ -15,7 +15,7 @@ const full_config = `${PUBLIC_CONFIG}
MONGODB_URL=${MONGODB_URL}
OPENID_CONFIG=${OPENID_CONFIG}
SERPER_API_KEY=${SERPER_API_KEY}
HF_ACCESS_TOKEN=${HF_ACCESS_TOKEN}
HF_TOKEN=${HF_TOKEN}
`;

// Make an HTTP POST request to add the space secrets
Expand All @@ -27,7 +27,7 @@ fetch(`https://huggingface.co/api/spaces/huggingchat/chat-ui/secrets`, {
description: `Env variable for HuggingChat. Last updated ${new Date().toISOString()}`,
}),
headers: {
Authorization: `Bearer ${HF_TOKEN}`,
Authorization: `Bearer ${HF_DEPLOYMENT_TOKEN}`,
"Content-Type": "application/json",
},
});
7 changes: 5 additions & 2 deletions src/lib/server/endpoints/llamacpp/endpointLlamacpp.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { HF_ACCESS_TOKEN } from "$env/static/private";
import { HF_ACCESS_TOKEN, HF_TOKEN } from "$env/static/private";
import { buildPrompt } from "$lib/buildPrompt";
import type { TextGenerationStreamOutput } from "@huggingface/inference";
import type { Endpoint } from "../endpoints";
Expand All @@ -9,7 +9,10 @@ export const endpointLlamacppParametersSchema = z.object({
model: z.any(),
type: z.literal("llamacpp"),
url: z.string().url().default("http://127.0.0.1:8080"),
accessToken: z.string().min(1).default(HF_ACCESS_TOKEN),
accessToken: z
.string()
.min(1)
.default(HF_TOKEN ?? HF_ACCESS_TOKEN),
});

export function endpointLlamacpp(
Expand Down
6 changes: 3 additions & 3 deletions src/lib/server/endpoints/tgi/endpointTgi.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { HF_ACCESS_TOKEN } from "$env/static/private";
import { HF_ACCESS_TOKEN, HF_TOKEN } from "$env/static/private";
import { buildPrompt } from "$lib/buildPrompt";
import { textGenerationStream } from "@huggingface/inference";
import type { Endpoint } from "../endpoints";
Expand All @@ -9,7 +9,7 @@ export const endpointTgiParametersSchema = z.object({
model: z.any(),
type: z.literal("tgi"),
url: z.string().url(),
accessToken: z.string().default(HF_ACCESS_TOKEN),
accessToken: z.string().default(HF_TOKEN ?? HF_ACCESS_TOKEN),
authorization: z.string().optional(),
});

Expand All @@ -35,7 +35,7 @@ export function endpointTgi(input: z.input<typeof endpointTgiParametersSchema>):
use_cache: false,
fetch: async (endpointUrl, info) => {
if (info && authorization && !accessToken) {
// Set authorization header if it is defined and HF_ACCESS_TOKEN is empty
// Set authorization header if it is defined and HF_TOKEN is empty
info.headers = {
...info.headers,
Authorization: authorization,
Expand Down
11 changes: 9 additions & 2 deletions src/lib/server/models.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,11 @@
import { HF_ACCESS_TOKEN, HF_API_ROOT, MODELS, OLD_MODELS, TASK_MODEL } from "$env/static/private";
import {
HF_TOKEN,
HF_API_ROOT,
MODELS,
OLD_MODELS,
TASK_MODEL,
HF_ACCESS_TOKEN,
} from "$env/static/private";
import type { ChatTemplateInput } from "$lib/types/Template";
import { compileTemplate } from "$lib/utils/template";
import { z } from "zod";
Expand Down Expand Up @@ -80,7 +87,7 @@ const addEndpoint = (m: Awaited<ReturnType<typeof processModel>>) => ({
return endpointTgi({
type: "tgi",
url: `${HF_API_ROOT}/${m.name}`,
accessToken: HF_ACCESS_TOKEN,
accessToken: HF_TOKEN ?? HF_ACCESS_TOKEN,
weight: 1,
model: m,
});
Expand Down

0 comments on commit 3cbea34

Please sign in to comment.