Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix accesstokens #962

Merged
merged 3 commits into from
Mar 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 11 additions & 2 deletions service/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,21 @@ router.post('/chat-process', [auth, limiter], async (req, res) => {
try {
const { prompt, options = {}, systemMessage } = req.body as RequestProps
let firstChunk = true
let chatLength = 0
let newChatLength = 0
await chatReplyProcess({
message: prompt,
lastContext: options,
process: (chat: ChatMessage) => {
res.write(firstChunk ? JSON.stringify(chat) : `\n${JSON.stringify(chat)}`)
firstChunk = false
if (firstChunk) {
res.write(`${JSON.stringify(chat)}t1h1i4s5i1s4a1s9i1l9l8y1s0plit`)
firstChunk = false
}
else if (chatLength !== chat.text.length) {
newChatLength = chat.text.length
res.write(chat.text.substring(chatLength, newChatLength))
chatLength = newChatLength
}
},
systemMessage,
})
Expand Down
178 changes: 90 additions & 88 deletions src/views/chat/index.vue
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,9 @@ async function onConversation() {
scrollToBottom()

try {
let lastText = ''
const magicSplit = 't1h1i4s5i1s4a1s9i1l9l8y1s0plit'
let renderText = ''
let firstTime = true
const fetchChatAPIOnce = async () => {
await fetchChatAPIProcess<Chat.ConversationResponse>({
prompt: message,
Expand All @@ -117,42 +119,49 @@ async function onConversation() {
const xhr = event.target
const { responseText } = xhr
// Always process the final line
const lastIndex = responseText.lastIndexOf('\n', responseText.length - 2)
let chunk = responseText
if (lastIndex !== -1)
chunk = responseText.substring(lastIndex)
try {
const data = JSON.parse(chunk)
updateChat(
+uuid,
dataSources.value.length - 1,
{
dateTime: new Date().toLocaleString(),
text: lastText + data.text ?? '',
inversion: false,
error: false,
loading: false,
conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id },
requestOptions: { prompt: message, options: { ...options } },
},
)

if (openLongReply && data.detail.choices[0].finish_reason === 'length') {
options.parentMessageId = data.id
lastText = data.text
message = ''
return fetchChatAPIOnce()
}

scrollToBottomIfAtBottom()
}
catch (error) {
//
const splitIndexBegin = responseText.search(magicSplit)
if (splitIndexBegin !== -1) {
const splitIndexEnd = splitIndexBegin + magicSplit.length

const firstChunk = responseText.substring(0, splitIndexBegin)
const deltaText = responseText.substring(splitIndexEnd)
try {
const data = JSON.parse(firstChunk)
if (firstTime) {
firstTime = false
renderText = data.text ?? ''
}
else {
renderText = deltaText ?? ''
}
updateChat(
+uuid,
dataSources.value.length - 1,
{
dateTime: new Date().toLocaleString(),
text: renderText,
inversion: false,
error: false,
loading: false,
conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id },
requestOptions: { prompt: message, ...options },
},
)

if (openLongReply && data.detail.choices[0].finish_reason === 'length') {
options.parentMessageId = data.id
message = ''
return fetchChatAPIOnce()
}
}
catch (error) {
//
}
}
},
})
}

await fetchChatAPIOnce()
}
catch (error: any) {
Expand Down Expand Up @@ -237,7 +246,9 @@ async function onRegenerate(index: number) {
)

try {
let lastText = ''
const magicSplit = 't1h1i4s5i1s4a1s9i1l9l8y1s0plit'
let renderText = ''
let firstTime = true
const fetchChatAPIOnce = async () => {
await fetchChatAPIProcess<Chat.ConversationResponse>({
prompt: message,
Expand All @@ -247,35 +258,45 @@ async function onRegenerate(index: number) {
const xhr = event.target
const { responseText } = xhr
// Always process the final line
const lastIndex = responseText.lastIndexOf('\n', responseText.length - 2)
let chunk = responseText
if (lastIndex !== -1)
chunk = responseText.substring(lastIndex)
try {
const data = JSON.parse(chunk)
updateChat(
+uuid,
index,
{
dateTime: new Date().toLocaleString(),
text: lastText + data.text ?? '',
inversion: false,
error: false,
loading: false,
conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id },
requestOptions: { prompt: message, ...options },
},
)

if (openLongReply && data.detail.choices[0].finish_reason === 'length') {
options.parentMessageId = data.id
lastText = data.text
message = ''
return fetchChatAPIOnce()

const splitIndexBegin = responseText.search(magicSplit)
if (splitIndexBegin !== -1) {
const splitIndexEnd = splitIndexBegin + magicSplit.length

const firstChunk = responseText.substring(0, splitIndexBegin)
const deltaText = responseText.substring(splitIndexEnd)
try {
const data = JSON.parse(firstChunk)
if (firstTime) {
firstTime = false
renderText = data.text ?? ''
}
else {
renderText = deltaText ?? ''
}
updateChat(
+uuid,
index,
{
dateTime: new Date().toLocaleString(),
text: renderText,
inversion: false,
error: false,
loading: false,
conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id },
requestOptions: { prompt: message, ...options },
},
)

if (openLongReply && data.detail.choices[0].finish_reason === 'length') {
options.parentMessageId = data.id
message = ''
return fetchChatAPIOnce()
}
}
catch (error) {
//
}
}
catch (error) {
//
}
},
})
Expand Down Expand Up @@ -467,20 +488,13 @@ onUnmounted(() => {
<template>
<div class="flex flex-col w-full h-full">
<HeaderComponent
v-if="isMobile"
:using-context="usingContext"
@export="handleExport"
v-if="isMobile" :using-context="usingContext" @export="handleExport"
@toggle-using-context="toggleUsingContext"
/>
<main class="flex-1 overflow-hidden">
<div
id="scrollRef"
ref="scrollRef"
class="h-full overflow-hidden overflow-y-auto"
>
<div id="scrollRef" ref="scrollRef" class="h-full overflow-hidden overflow-y-auto">
<div
id="image-wrapper"
class="w-full max-w-screen-xl m-auto dark:bg-[#101014]"
id="image-wrapper" class="w-full max-w-screen-xl m-auto dark:bg-[#101014]"
:class="[isMobile ? 'p-2' : 'p-4']"
>
<template v-if="!dataSources.length">
Expand All @@ -492,14 +506,8 @@ onUnmounted(() => {
<template v-else>
<div>
<Message
v-for="(item, index) of dataSources"
:key="index"
:date-time="item.dateTime"
:text="item.text"
:inversion="item.inversion"
:error="item.error"
:loading="item.loading"
@regenerate="onRegenerate(index)"
v-for="(item, index) of dataSources" :key="index" :date-time="item.dateTime" :text="item.text"
:inversion="item.inversion" :error="item.error" :loading="item.loading" @regenerate="onRegenerate(index)"
@delete="handleDelete(index)"
/>
<div class="sticky bottom-0 left-0 flex justify-center">
Expand Down Expand Up @@ -536,15 +544,9 @@ onUnmounted(() => {
<NAutoComplete v-model:value="prompt" :options="searchOptions" :render-label="renderOption">
<template #default="{ handleInput, handleBlur, handleFocus }">
<NInput
ref="inputRef"
v-model:value="prompt"
type="textarea"
:placeholder="placeholder"
:autosize="{ minRows: 1, maxRows: isMobile ? 4 : 8 }"
@input="handleInput"
@focus="handleFocus"
@blur="handleBlur"
@keypress="handleEnter"
ref="inputRef" v-model:value="prompt" type="textarea" :placeholder="placeholder"
:autosize="{ minRows: 1, maxRows: isMobile ? 4 : 8 }" @input="handleInput" @focus="handleFocus"
@blur="handleBlur" @keypress="handleEnter"
/>
</template>
</NAutoComplete>
Expand Down