From a62116f6cc471bde1ec14386782672b31e9dde04 Mon Sep 17 00:00:00 2001 From: Manuel Spigolon Date: Tue, 14 Sep 2021 14:20:13 +0200 Subject: [PATCH] feat breaking: throw file size limit on toBuffer calls (#265) * add toBuffer aligned to throwFileSizeLimit * add docs --- README.md | 14 +++- index.js | 9 +++ test/multipart-fileLimit.test.js | 133 +++++++++++++++++++++++++++++++ 3 files changed, 155 insertions(+), 1 deletion(-) create mode 100644 test/multipart-fileLimit.test.js diff --git a/README.md b/README.md index 2332a73b..a3166755 100644 --- a/README.md +++ b/README.md @@ -93,7 +93,10 @@ fastify.register(require('fastify-multipart'), { **Note**: if the file stream that is provided by `data.file` is not consumed, like in the example below with the usage of pump, the promise will not be fulfilled at the end of the multipart processing. This behavior is inherited from [busboy](https://github.com/mscdex/busboy). -**Note**: if you set a `fileSize` limit and you want to know if the file limit was reached you can listen to `data.file.on('limit')` or check at the end of the stream the property `data.file.truncated`. +**Note**: if you set a `fileSize` limit and you want to know if the file limit was reached you can: +- listen to `data.file.on('limit')` +- or check at the end of the stream the property `data.file.truncated` +- or call `data.file.toBuffer()` and wait for the error to be thrown ```js const data = await req.file() @@ -103,6 +106,15 @@ if (data.file.truncated) { // before the `limits.fileSize` has been reached reply.send(new fastify.multipartErrors.FilesLimitError()); } + +// OR +const data = await req.file() +try { + const buffer = await data.toBuffer() +} catch (err) { + // fileSize limit reached! +} + ``` Additionally, you can pass per-request options to the `req.file`, `req.files`, `req.saveRequestFiles` or `req.multipartIterator` function. diff --git a/index.js b/index.js index 94e01a87..8d2b1de0 100644 --- a/index.js +++ b/index.js @@ -321,6 +321,7 @@ function fastifyMultipart (fastify, options, done) { opts ]) + this.log.trace({ busboyOptions }, 'Providing options to busboy') const bb = busboy(busboyOptions) request.on('close', cleanup) @@ -406,6 +407,14 @@ function fastifyMultipart (fastify, options, done) { const fileChunks = [] for await (const chunk of this.file) { fileChunks.push(chunk) + + if (throwFileSizeLimit && this.file.truncated) { + const err = new RequestFileTooLargeError() + err.part = this + + onError(err) + throw err + } } this._buf = Buffer.concat(fileChunks) return this._buf diff --git a/test/multipart-fileLimit.test.js b/test/multipart-fileLimit.test.js new file mode 100644 index 00000000..19e2c703 --- /dev/null +++ b/test/multipart-fileLimit.test.js @@ -0,0 +1,133 @@ +'use strict' + +const util = require('util') +const crypto = require('crypto') +const test = require('tap').test +const FormData = require('form-data') +const Fastify = require('fastify') +const multipart = require('..') +const http = require('http') +const stream = require('stream') +const pump = util.promisify(stream.pipeline) +const EventEmitter = require('events') +const { once } = EventEmitter + +test('should throw fileSize limitation error when consuming the stream', async function (t) { + t.plan(4) + + const fastify = Fastify() + t.teardown(fastify.close.bind(fastify)) + + fastify.register(multipart, { + throwFileSizeLimit: true, + limits: { + fileSize: 524288 + } + }) + + fastify.post('/', async function (req, reply) { + t.ok(req.isMultipart()) + + const part = await req.file() + t.pass('the file is not consumed yet') + + try { + await part.toBuffer() + t.fail('it should throw') + } catch (error) { + t.ok(error) + reply.send(error) + } + }) + + await fastify.listen(0) + + // request + const form = new FormData() + const opts = { + protocol: 'http:', + hostname: 'localhost', + port: fastify.server.address().port, + path: '/', + headers: form.getHeaders(), + method: 'POST' + } + + const randomFileBuffer = Buffer.alloc(600000) + crypto.randomFillSync(randomFileBuffer) + + const req = http.request(opts) + form.append('upload', randomFileBuffer) + + pump(form, req) + + try { + const [res] = await once(req, 'response') + t.equal(res.statusCode, 413) + res.resume() + await once(res, 'end') + } catch (error) { + t.error(error, 'request') + } +}) + +test('should NOT throw fileSize limitation error when consuming the stream', async function (t) { + t.plan(5) + + const fastify = Fastify() + t.teardown(fastify.close.bind(fastify)) + + fastify.register(multipart, { + throwFileSizeLimit: false, + limits: { + fileSize: 524288 + } + }) + const fileInputLength = 600000 + + fastify.post('/', async function (req, reply) { + t.ok(req.isMultipart()) + + const part = await req.file() + t.pass('the file is not consumed yet') + + try { + const buffer = await part.toBuffer() + t.ok(part.file.truncated) + t.notSame(buffer.length, fileInputLength) + reply.send(new fastify.multipartErrors.FilesLimitError()) + } catch (error) { + t.fail('it should not throw') + } + }) + + await fastify.listen(0) + + // request + const form = new FormData() + const opts = { + protocol: 'http:', + hostname: 'localhost', + port: fastify.server.address().port, + path: '/', + headers: form.getHeaders(), + method: 'POST' + } + + const randomFileBuffer = Buffer.alloc(fileInputLength) + crypto.randomFillSync(randomFileBuffer) + + const req = http.request(opts) + form.append('upload', randomFileBuffer) + + pump(form, req) + + try { + const [res] = await once(req, 'response') + t.equal(res.statusCode, 413) + res.resume() + await once(res, 'end') + } catch (error) { + t.error(error, 'request') + } +})