diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 721ff308..104438c3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -18,7 +18,7 @@ jobs: # We do not want to run CRUD tests in parallel max-parallel: 1 matrix: - node-version: [16.x, 18.x, 19.x] + node-version: [16.x, 18.x, 19.0.1] # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ steps: diff --git a/README.md b/README.md index 2056e0a8..4969ef47 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,7 @@ $ npm install tus-node-server - **Amazon S3** +using Key/Secret ```js server.datastore = new tus.S3Store({ bucket: 'bucket-name', @@ -43,6 +44,23 @@ $ npm install tus-node-server }) ``` + using [credentials](https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Credentials.html#constructor-property) to fetch credentials inside a AWS container, such as an ECS container, which will inject the required environment variables. The `credentials` config is directly passed into the AWS SDK so you can refer to the AWS docs for the supported values for `credentials`. + + For example, with `ECSCredentials`: + + ```js + server.datastore = new tus.S3Store({ + path: '/files', + bucket: 'bucket-name', + credentials: new AWS.ECSCredentials({ + httpOptions: { timeout: 5000 }, + maxRetries: 10, + }), + region: 'eu-west-1', + partSize: 8 * 1024 * 1024, // each uploaded part will have ~8MB, + tmpDirPrefix: 'tus-s3-store', + }); + ``` ## Quick Start #### Use the [tus-node-deploy](https://hub.docker.com/r/bhstahl/tus-node-deploy/) Docker image @@ -129,7 +147,9 @@ const fastify = require('fastify')({logger: true}) * without any parser to leave body untouched * @see https://www.fastify.io/docs/latest/Reference/ContentTypeParser/ */ -fastify.addContentTypeParser('application/offset+octet-stream', async () => true) +fastify.addContentTypeParser( + 'application/offset+octet-stream', (request, payload, done) => done(null) +); /** * let tus handle preparation and filehandling requests @@ -254,7 +274,7 @@ const server = new tus.Server({ }) ``` -## Demo +## Demo Start the demo server using Local File Storage diff --git a/lib/Server.ts b/lib/Server.ts index 903c014d..7c57e4d8 100644 --- a/lib/Server.ts +++ b/lib/Server.ts @@ -193,4 +193,12 @@ export class Server extends EventEmitter { listen(...args: any[]): http.Server { return http.createServer(this.handle.bind(this)).listen(...args) } + + cleanUpExpiredUploads(): Promise { + if (!this.datastore.hasExtension('expiration')) { + throw ERRORS.UNSUPPORTED_EXPIRATION_EXTENSION + } + + return this.datastore.deleteExpired() + } } diff --git a/lib/configstores/MemoryConfigstore.ts b/lib/configstores/MemoryConfigstore.ts index 5f4a8d18..76729ad9 100644 --- a/lib/configstores/MemoryConfigstore.ts +++ b/lib/configstores/MemoryConfigstore.ts @@ -20,4 +20,8 @@ export default class MemoryConfigstore { async delete(key: string) { return this.data.delete(key) } + + get all(): Record { + return Object.fromEntries(this.data.entries()) + } } diff --git a/lib/constants.ts b/lib/constants.ts index 774ea460..4cd92445 100644 --- a/lib/constants.ts +++ b/lib/constants.ts @@ -59,6 +59,10 @@ export const ERRORS = { status_code: 501, body: 'creation-defer-length extension is not (yet) supported.\n', }, + UNSUPPORTED_EXPIRATION_EXTENSION: { + status_code: 501, + body: 'expiration extension is not (yet) supported.\n', + }, } as const export const EVENT_ENDPOINT_CREATED = 'EVENT_ENDPOINT_CREATED' as const export const EVENT_FILE_CREATED = 'EVENT_FILE_CREATED' as const diff --git a/lib/handlers/HeadHandler.ts b/lib/handlers/HeadHandler.ts index 2bc10e0c..7aaaae54 100644 --- a/lib/handlers/HeadHandler.ts +++ b/lib/handlers/HeadHandler.ts @@ -12,6 +12,21 @@ export default class HeadHandler extends BaseHandler { } const file = await this.store.getUpload(id) + + // If a Client does attempt to resume an upload which has since + // been removed by the Server, the Server SHOULD respond with the + // with the 404 Not Found or 410 Gone status. The latter one SHOULD + // be used if the Server is keeping track of expired uploads. + const now = new Date() + if ( + this.store.hasExtension('expiration') && + this.store.getExpiration() > 0 && + file.creation_date && + now > new Date(new Date(file.creation_date).getTime() + this.store.getExpiration()) + ) { + throw ERRORS.FILE_NO_LONGER_EXISTS + } + // The Server MUST prevent the client and/or proxies from // caching the response by adding the Cache-Control: no-store // header to the response. diff --git a/lib/handlers/PatchHandler.ts b/lib/handlers/PatchHandler.ts index 5ae0d1d8..392c4d64 100644 --- a/lib/handlers/PatchHandler.ts +++ b/lib/handlers/PatchHandler.ts @@ -32,6 +32,21 @@ export default class PatchHandler extends BaseHandler { const file = await this.store.getUpload(id) + // If a Client does attempt to resume an upload which has since + // been removed by the Server, the Server SHOULD respond with the + // with the 404 Not Found or 410 Gone status. The latter one SHOULD + // be used if the Server is keeping track of expired uploads. + const creation = file.creation_date ? new Date(file.creation_date) : new Date() + const expiration = new Date(creation.getTime() + this.store.getExpiration()) + const now = new Date() + if ( + this.store.hasExtension('expiration') && + this.store.getExpiration() > 0 && + now > expiration + ) { + throw ERRORS.FILE_NO_LONGER_EXISTS + } + if (file.offset !== offset) { // If the offsets do not match, the Server MUST respond with the 409 Conflict status without modifying the upload resource. log( @@ -67,11 +82,27 @@ export default class PatchHandler extends BaseHandler { this.emit(EVENTS.EVENT_UPLOAD_COMPLETE, {file}) } - // It MUST include the Upload-Offset header containing the new offset. - const headers = { + const headers: { + 'Upload-Offset': number + 'Upload-Expires'?: string + } = { 'Upload-Offset': new_offset, } + if ( + this.store.hasExtension('expiration') && + this.store.getExpiration() > 0 && + file.creation_date && + (file.size === undefined || new_offset < file.size) + ) { + const creation = new Date(file.creation_date) + // Value MUST be in RFC 7231 datetime format + const dateString = new Date( + creation.getTime() + this.store.getExpiration() + ).toUTCString() + headers['Upload-Expires'] = dateString + } + // The Server MUST acknowledge successful PATCH requests with the 204 return this.write(res, 204, headers) } diff --git a/lib/handlers/PostHandler.ts b/lib/handlers/PostHandler.ts index ac4fb890..9ea8d819 100644 --- a/lib/handlers/PostHandler.ts +++ b/lib/handlers/PostHandler.ts @@ -73,7 +73,10 @@ export default class PostHandler extends BaseHandler { const url = this.generateUrl(req, file.id) this.emit(EVENTS.EVENT_ENDPOINT_CREATED, {url}) - const optional_headers: {'Upload-Offset'?: string} = {} + const optional_headers: { + 'Upload-Offset'?: string + 'Upload-Expires'?: string + } = {} // The request MIGHT include a Content-Type header when using creation-with-upload extension if (!RequestValidator.isInvalidHeader('content-type', req.headers['content-type'])) { @@ -85,6 +88,23 @@ export default class PostHandler extends BaseHandler { } } + // The Upload-Expires response header indicates the time after which the unfinished upload expires. + // If expiration is known at creation time, Upload-Expires header MUST be included in the response + if ( + this.store.hasExtension('expiration') && + this.store.getExpiration() > 0 && + file.creation_date + ) { + const created = await this.store.getUpload(file.id) + if (created.offset !== Number.parseInt(upload_length as string, 10)) { + const creation = new Date(file.creation_date) + // Value MUST be in RFC 7231 datetime format + optional_headers['Upload-Expires'] = new Date( + creation.getTime() + this.store.getExpiration() + ).toUTCString() + } + } + return this.write(res, 201, {Location: url, ...optional_headers}) } } diff --git a/lib/models/Upload.ts b/lib/models/Upload.ts index 94753bb0..e2c688ed 100644 --- a/lib/models/Upload.ts +++ b/lib/models/Upload.ts @@ -3,6 +3,7 @@ type TUpload = { size?: number offset: number metadata?: string + creation_date?: string } export default class Upload { @@ -10,6 +11,7 @@ export default class Upload { metadata?: TUpload['metadata'] size?: TUpload['size'] offset: TUpload['offset'] + creation_date: TUpload['creation_date'] constructor(upload: TUpload) { if (!upload.id) { @@ -20,6 +22,8 @@ export default class Upload { this.size = upload.size this.offset = upload.offset this.metadata = upload.metadata + + this.creation_date = upload.creation_date ?? new Date().toISOString() } get sizeIsDeferred(): boolean { diff --git a/lib/stores/DataStore.ts b/lib/stores/DataStore.ts index 865101d8..7bf59bab 100644 --- a/lib/stores/DataStore.ts +++ b/lib/stores/DataStore.ts @@ -69,4 +69,15 @@ export default class DataStore extends EventEmitter { * Called in PATCH requests when upload length is known after being defered. */ async declareUploadLength(id: string, upload_length: number) {} + + /** + * Returns number of expired uploads that were deleted. + */ + async deleteExpired(): Promise { + return 0 + } + + getExpiration(): number { + return 0 + } } diff --git a/lib/stores/FileStore.ts b/lib/stores/FileStore.ts index f639c1d2..86916478 100644 --- a/lib/stores/FileStore.ts +++ b/lib/stores/FileStore.ts @@ -17,11 +17,13 @@ type Store = { get(key: string): Upload | undefined set(key: string, value: Upload): void delete(key: string): void + all: Record } type Options = { directory: string configstore?: Store + expirationPeriodInMilliseconds?: number } const MASK = '0777' @@ -32,16 +34,19 @@ const log = debug('tus-node-server:stores:filestore') export default class FileStore extends DataStore { directory: string configstore: Store + expirationPeriodInMilliseconds: number - constructor({directory, configstore}: Options) { + constructor({directory, configstore, expirationPeriodInMilliseconds}: Options) { super() this.directory = directory this.configstore = configstore ?? new Configstore(`${pkg.name}-${pkg.version}`) + this.expirationPeriodInMilliseconds = expirationPeriodInMilliseconds ?? 0 this.extensions = [ 'creation', 'creation-with-upload', 'creation-defer-length', 'termination', + 'expiration', ] // TODO: this async call can not happen in the constructor this.checkOrCreateDirectory() @@ -172,7 +177,13 @@ export default class FileStore extends DataStore { } return resolve( - new Upload({id, size: file.size, offset: stats.size, metadata: file.metadata}) + new Upload({ + id, + size: file.size, + offset: stats.size, + metadata: file.metadata, + creation_date: file.creation_date, + }) ) }) }) @@ -189,4 +200,39 @@ export default class FileStore extends DataStore { this.configstore.set(id, file) } + + async deleteExpired(): Promise { + const now = new Date() + const toDelete: Promise[] = [] + + const uploadInfos = this.configstore.all + for (const file_id of Object.keys(uploadInfos)) { + try { + const info = uploadInfos[file_id] + if ( + info && + 'creation_date' in info && + this.getExpiration() > 0 && + info.size !== info.offset && + info.creation_date + ) { + const creation = new Date(info.creation_date) + const expires = new Date(creation.getTime() + this.getExpiration()) + if (now > expires) { + toDelete.push(this.remove(file_id)) + } + } + } catch (error) { + if (error !== ERRORS.FILE_NO_LONGER_EXISTS) { + throw error + } + } + } + + return Promise.all(toDelete).then(() => toDelete.length) + } + + getExpiration(): number { + return this.expirationPeriodInMilliseconds + } } diff --git a/lib/stores/S3Store.ts b/lib/stores/S3Store.ts index 627a845a..1c3c22df 100644 --- a/lib/stores/S3Store.ts +++ b/lib/stores/S3Store.ts @@ -65,11 +65,12 @@ export default class S3Store extends DataStore { constructor(options: Options) { super() const {bucket, partSize, ...rest} = options - // TODO: these are deprecated. Remove these, look up new best practise, - // and reflect that in the docs - assert.ok(options.accessKeyId, '[S3Store] `accessKeyId` must be set') - assert.ok(options.secretAccessKey, '[S3Store] `secretAccessKey` must be set') - assert.ok(bucket, '[S3Store] `bucket` must be set') + if (options.accessKeyId || options.secretAccessKey) { + assert.ok(options.accessKeyId, '[S3Store] `accessKeyId` must be set') + assert.ok(options.secretAccessKey, '[S3Store] `secretAccessKey` must be set') + } else { + assert.ok(options.credentials, '[S3Store] `credentials` must be set') + } this.extensions = ['creation', 'creation-with-upload', 'creation-defer-length'] this.bucket = bucket @@ -481,13 +482,12 @@ export default class S3Store extends DataStore { try { const parts = await this.retrieveParts(id) - return { + return new Upload({ id, ...this.cache.get(id)?.file, offset: calcOffsetFromParts(parts), size: metadata.file.size, - sizeIsDeferred: metadata.file.sizeIsDeferred, - } + }) // eslint-disable-next-line @typescript-eslint/no-explicit-any } catch (error: any) { if (error.code !== 'NoSuchUpload') { @@ -498,13 +498,12 @@ export default class S3Store extends DataStore { // When the last part of an upload is finished and the file is successfully written to S3, // the upload will no longer be present and requesting it will result in a 404. // In that case we return the upload_length as size. - return { + return new Upload({ id, ...this.cache.get(id)?.file, offset: metadata.file.offset, size: metadata.file.size, - sizeIsDeferred: metadata.file.sizeIsDeferred, - } + }) } } diff --git a/package.json b/package.json index c8bc2b86..553ca82c 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "tus-node-server", - "version": "0.8.1", + "version": "0.9.0", "description": "Node.js tus server", "packageManager": "yarn@3.2.3", "keywords": [ @@ -82,6 +82,6 @@ "typescript": "^4.8.4" }, "engines": { - "node": ">=14.14" + "node": ">=16" } } diff --git a/test/Test-EndToEnd.ts b/test/Test-EndToEnd.ts index 63abd7e8..2a774d3e 100644 --- a/test/Test-EndToEnd.ts +++ b/test/Test-EndToEnd.ts @@ -13,6 +13,7 @@ import GCSDataStore from '../lib/stores/GCSDataStore' import {TUS_RESUMABLE} from '../lib/constants' import type http from 'node:http' +import MemoryConfigstore from '../lib/configstores/MemoryConfigstore' const STORE_PATH = '/test/output' const PROJECT_ID = 'tus-node-server' @@ -302,6 +303,142 @@ describe('EndToEnd', () => { }) }) + describe('FileStore with defined expirationPeriodInMilliseconds option', () => { + let file_id: string + + before(() => { + server = new Server({ + path: STORE_PATH, + datastore: new FileStore({ + directory: `./${STORE_PATH}`, + expirationPeriodInMilliseconds: 50, + configstore: new MemoryConfigstore(), + }), + }) + listener = server.listen() + agent = request.agent(listener) + }) + + after(() => { + listener.close() + }) + + describe('OPTIONS', () => { + it('should respond with expiration in Tus-Extension header', (done) => { + agent + .options(STORE_PATH) + .set('Tus-Resumable', TUS_RESUMABLE) + .expect(204) + .expect('Tus-Resumable', TUS_RESUMABLE) + .expect('Tus-Extension', 'expiration') + .end((_, res) => { + res.headers['tus-extension'].includes('expiration') + done() + }) + }) + }) + + describe('POST', () => { + it('should respond with Upload-Expires', (done) => { + agent + .post(STORE_PATH) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Length', `${TEST_FILE_SIZE}`) + .set('Upload-Metadata', TEST_METADATA) + .set('Tus-Resumable', TUS_RESUMABLE) + .expect(201) + .end((_, res) => { + assert.equal('upload-expires' in res.headers, true) + file_id = res.headers.location.split('/').pop() + done() + }) + }) + }) + + describe('PATCH', () => { + it('unfinished upload response contains header Upload-Expires', (done) => { + agent + .post(STORE_PATH) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Length', `${TEST_FILE_SIZE}`) + .set('Upload-Metadata', TEST_METADATA) + .set('Tus-Resumable', TUS_RESUMABLE) + .expect(201) + .end((_, res) => { + assert.equal('upload-expires' in res.headers, true) + file_id = res.headers.location.split('/').pop() + }) + + const msg = 'tus test' + const write_stream = agent + .patch(`${STORE_PATH}/${file_id}`) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Offset', '0') + .set('Content-Type', 'application/offset+octet-stream') + write_stream.on('response', (res) => { + assert.equal(res.statusCode, 204) + assert.equal(res.header['tus-resumable'], TUS_RESUMABLE) + assert.equal(res.header['upload-offset'], `${msg.length}`) + assert.equal('upload-expires' in res.headers, true) + done() + }) + write_stream.write(msg) + write_stream.end(() => {}) + }) + + it('expired upload responds with 410 Gone', (done) => { + agent + .post(STORE_PATH) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Length', `${TEST_FILE_SIZE}`) + .set('Upload-Metadata', TEST_METADATA) + .set('Tus-Resumable', TUS_RESUMABLE) + .expect(201) + .end((_, res) => { + assert.equal('upload-expires' in res.headers, true) + file_id = res.headers.location.split('/').pop() + + setTimeout(() => { + const msg = 'tus test' + const write_stream = agent + .patch(`${STORE_PATH}/${file_id}`) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Offset', '0') + .set('Content-Type', 'application/offset+octet-stream') + write_stream.on('response', (res) => { + assert.equal(res.statusCode, 410) + done() + }) + write_stream.write(msg) + write_stream.end(() => {}) + }, 51) + }) + }) + }) + + describe('deleteExpiredFiles', () => { + it('HEAD request to expired upload returns 410 Gone', (done) => { + agent + .head(`${STORE_PATH}/${file_id}`) + .set('Tus-Resumable', TUS_RESUMABLE) + .expect(410) + .end(done) + }) + + it('can delete expired files', (done) => { + server.datastore + .deleteExpired() + .catch((error) => { + done(error) + }) + .then((deleted) => { + assert.equal(deleted >= 1, true) + done() + }) + }) + }) + }) + describe('GCSDataStore', () => { let file_id: string let deferred_file_id: string