Skip to content

Commit

Permalink
fix: #3673 - API responds with Request body is too large (#3729)
Browse files Browse the repository at this point in the history
  • Loading branch information
louis-jan authored Sep 24, 2024
1 parent 886b1cb commit dbc4bed
Show file tree
Hide file tree
Showing 3 changed files with 50 additions and 2 deletions.
41 changes: 41 additions & 0 deletions core/src/node/api/restful/helper/builder.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,47 @@ describe('builder helper functions', () => {
})
})

it('should return the error on status not ok', async () => {
const request = { body: { model: 'model1' } }
const mockSend = jest.fn()
const reply = {
code: jest.fn().mockReturnThis(),
send: jest.fn(),
headers: jest.fn().mockReturnValue({
send: mockSend,
}),
raw: {
writeHead: jest.fn(),
pipe: jest.fn(),
},
}

;(existsSync as jest.Mock).mockReturnValue(true)
;(readdirSync as jest.Mock).mockReturnValue(['file1'])
;(readFileSync as jest.Mock).mockReturnValue(
JSON.stringify({ id: 'model1', engine: 'openai' })
)

// Mock fetch
const fetch = require('node-fetch')
fetch.mockResolvedValue({
status: 400,
headers: new Map([
['content-type', 'application/json'],
['x-request-id', '123456'],
]),
body: { pipe: jest.fn() },
text: jest.fn().mockResolvedValue({ error: 'Mock error response' }),
})
await chatCompletions(request, reply)
expect(reply.code).toHaveBeenCalledWith(400)
expect(mockSend).toHaveBeenCalledWith(
expect.objectContaining({
error: 'Mock error response',
})
)
})

it('should return the chat completions', async () => {
const request = { body: { model: 'model1' } }
const reply = {
Expand Down
6 changes: 4 additions & 2 deletions core/src/node/api/restful/helper/builder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -353,8 +353,10 @@ export const chatCompletions = async (request: any, reply: any) => {
body: JSON.stringify(request.body),
})
if (response.status !== 200) {
console.error(response)
reply.code(400).send(response)
// Forward the error response to client via reply
const responseBody = await response.text()
const responseHeaders = Object.fromEntries(response.headers)
reply.code(response.status).headers(responseHeaders).send(responseBody)
} else {
reply.raw.writeHead(200, {
'Content-Type': request.body.stream === true ? 'text/event-stream' : 'application/json',
Expand Down
5 changes: 5 additions & 0 deletions server/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,11 @@ export const startServer = async (configs?: ServerConfig): Promise<boolean> => {
// Initialize Fastify server with logging
server = fastify({
logger: new Logger(),
// Set body limit to 100MB - Default is 1MB
// According to OpenAI - a batch input file can be up to 100 MB in size
// Whisper endpoints accept up to 25MB
// Vision endpoints accept up to 4MB
bodyLimit: 104_857_600
})

// Register CORS if enabled
Expand Down

0 comments on commit dbc4bed

Please sign in to comment.