Skip to content

Commit

Permalink
Merge branch '1.x' into s3-optimal-part-size
Browse files Browse the repository at this point in the history
* 1.x:
  0.9.0
  Allow `credentials` instead of key/secret for the S3 store (#282)
  Update engines.node version requirement
  Add the `Expiration` extension, implement it in `FileStore` (#320)
  Lock CI Node.js version to 19.0.1
  Clarify example for Fastify (#311)
  • Loading branch information
Murderlon committed Nov 18, 2022
2 parents dca2a22 + 24de83b commit c96a88f
Show file tree
Hide file tree
Showing 14 changed files with 320 additions and 21 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
# We do not want to run CRUD tests in parallel
max-parallel: 1
matrix:
node-version: [16.x, 18.x, 19.x]
node-version: [16.x, 18.x, 19.0.1]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/

steps:
Expand Down
24 changes: 22 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ $ npm install tus-node-server

- **Amazon S3**

using Key/Secret
```js
server.datastore = new tus.S3Store({
bucket: 'bucket-name',
Expand All @@ -43,6 +44,23 @@ $ npm install tus-node-server
})
```

using [credentials](https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Credentials.html#constructor-property) to fetch credentials inside a AWS container, such as an ECS container, which will inject the required environment variables. The `credentials` config is directly passed into the AWS SDK so you can refer to the AWS docs for the supported values for `credentials`.

For example, with `ECSCredentials`:

```js
server.datastore = new tus.S3Store({
path: '/files',
bucket: 'bucket-name',
credentials: new AWS.ECSCredentials({
httpOptions: { timeout: 5000 },
maxRetries: 10,
}),
region: 'eu-west-1',
partSize: 8 * 1024 * 1024, // each uploaded part will have ~8MB,
tmpDirPrefix: 'tus-s3-store',
});
```
## Quick Start

#### Use the [tus-node-deploy](https://hub.docker.com/r/bhstahl/tus-node-deploy/) Docker image
Expand Down Expand Up @@ -129,7 +147,9 @@ const fastify = require('fastify')({logger: true})
* without any parser to leave body untouched
* @see https://www.fastify.io/docs/latest/Reference/ContentTypeParser/
*/
fastify.addContentTypeParser('application/offset+octet-stream', async () => true)
fastify.addContentTypeParser(
'application/offset+octet-stream', (request, payload, done) => done(null)
);

/**
* let tus handle preparation and filehandling requests
Expand Down Expand Up @@ -254,7 +274,7 @@ const server = new tus.Server({
})
```
## Demo
## Demo
Start the demo server using Local File Storage
Expand Down
8 changes: 8 additions & 0 deletions lib/Server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -193,4 +193,12 @@ export class Server extends EventEmitter {
listen(...args: any[]): http.Server {
return http.createServer(this.handle.bind(this)).listen(...args)
}

cleanUpExpiredUploads(): Promise<number> {
if (!this.datastore.hasExtension('expiration')) {
throw ERRORS.UNSUPPORTED_EXPIRATION_EXTENSION
}

return this.datastore.deleteExpired()
}
}
4 changes: 4 additions & 0 deletions lib/configstores/MemoryConfigstore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,8 @@ export default class MemoryConfigstore {
async delete(key: string) {
return this.data.delete(key)
}

get all(): Record<string, Upload> {
return Object.fromEntries(this.data.entries())
}
}
4 changes: 4 additions & 0 deletions lib/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,10 @@ export const ERRORS = {
status_code: 501,
body: 'creation-defer-length extension is not (yet) supported.\n',
},
UNSUPPORTED_EXPIRATION_EXTENSION: {
status_code: 501,
body: 'expiration extension is not (yet) supported.\n',
},
} as const
export const EVENT_ENDPOINT_CREATED = 'EVENT_ENDPOINT_CREATED' as const
export const EVENT_FILE_CREATED = 'EVENT_FILE_CREATED' as const
Expand Down
15 changes: 15 additions & 0 deletions lib/handlers/HeadHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,21 @@ export default class HeadHandler extends BaseHandler {
}

const file = await this.store.getUpload(id)

// If a Client does attempt to resume an upload which has since
// been removed by the Server, the Server SHOULD respond with the
// with the 404 Not Found or 410 Gone status. The latter one SHOULD
// be used if the Server is keeping track of expired uploads.
const now = new Date()
if (
this.store.hasExtension('expiration') &&
this.store.getExpiration() > 0 &&
file.creation_date &&
now > new Date(new Date(file.creation_date).getTime() + this.store.getExpiration())
) {
throw ERRORS.FILE_NO_LONGER_EXISTS
}

// The Server MUST prevent the client and/or proxies from
// caching the response by adding the Cache-Control: no-store
// header to the response.
Expand Down
35 changes: 33 additions & 2 deletions lib/handlers/PatchHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,21 @@ export default class PatchHandler extends BaseHandler {

const file = await this.store.getUpload(id)

// If a Client does attempt to resume an upload which has since
// been removed by the Server, the Server SHOULD respond with the
// with the 404 Not Found or 410 Gone status. The latter one SHOULD
// be used if the Server is keeping track of expired uploads.
const creation = file.creation_date ? new Date(file.creation_date) : new Date()
const expiration = new Date(creation.getTime() + this.store.getExpiration())
const now = new Date()
if (
this.store.hasExtension('expiration') &&
this.store.getExpiration() > 0 &&
now > expiration
) {
throw ERRORS.FILE_NO_LONGER_EXISTS
}

if (file.offset !== offset) {
// If the offsets do not match, the Server MUST respond with the 409 Conflict status without modifying the upload resource.
log(
Expand Down Expand Up @@ -67,11 +82,27 @@ export default class PatchHandler extends BaseHandler {
this.emit(EVENTS.EVENT_UPLOAD_COMPLETE, {file})
}

// It MUST include the Upload-Offset header containing the new offset.
const headers = {
const headers: {
'Upload-Offset': number
'Upload-Expires'?: string
} = {
'Upload-Offset': new_offset,
}

if (
this.store.hasExtension('expiration') &&
this.store.getExpiration() > 0 &&
file.creation_date &&
(file.size === undefined || new_offset < file.size)
) {
const creation = new Date(file.creation_date)
// Value MUST be in RFC 7231 datetime format
const dateString = new Date(
creation.getTime() + this.store.getExpiration()
).toUTCString()
headers['Upload-Expires'] = dateString
}

// The Server MUST acknowledge successful PATCH requests with the 204
return this.write(res, 204, headers)
}
Expand Down
22 changes: 21 additions & 1 deletion lib/handlers/PostHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,10 @@ export default class PostHandler extends BaseHandler {
const url = this.generateUrl(req, file.id)
this.emit(EVENTS.EVENT_ENDPOINT_CREATED, {url})

const optional_headers: {'Upload-Offset'?: string} = {}
const optional_headers: {
'Upload-Offset'?: string
'Upload-Expires'?: string
} = {}

// The request MIGHT include a Content-Type header when using creation-with-upload extension
if (!RequestValidator.isInvalidHeader('content-type', req.headers['content-type'])) {
Expand All @@ -85,6 +88,23 @@ export default class PostHandler extends BaseHandler {
}
}

// The Upload-Expires response header indicates the time after which the unfinished upload expires.
// If expiration is known at creation time, Upload-Expires header MUST be included in the response
if (
this.store.hasExtension('expiration') &&
this.store.getExpiration() > 0 &&
file.creation_date
) {
const created = await this.store.getUpload(file.id)
if (created.offset !== Number.parseInt(upload_length as string, 10)) {
const creation = new Date(file.creation_date)
// Value MUST be in RFC 7231 datetime format
optional_headers['Upload-Expires'] = new Date(
creation.getTime() + this.store.getExpiration()
).toUTCString()
}
}

return this.write(res, 201, {Location: url, ...optional_headers})
}
}
4 changes: 4 additions & 0 deletions lib/models/Upload.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,15 @@ type TUpload = {
size?: number
offset: number
metadata?: string
creation_date?: string
}

export default class Upload {
id: TUpload['id']
metadata?: TUpload['metadata']
size?: TUpload['size']
offset: TUpload['offset']
creation_date: TUpload['creation_date']

constructor(upload: TUpload) {
if (!upload.id) {
Expand All @@ -20,6 +22,8 @@ export default class Upload {
this.size = upload.size
this.offset = upload.offset
this.metadata = upload.metadata

this.creation_date = upload.creation_date ?? new Date().toISOString()
}

get sizeIsDeferred(): boolean {
Expand Down
11 changes: 11 additions & 0 deletions lib/stores/DataStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -69,4 +69,15 @@ export default class DataStore extends EventEmitter {
* Called in PATCH requests when upload length is known after being defered.
*/
async declareUploadLength(id: string, upload_length: number) {}

/**
* Returns number of expired uploads that were deleted.
*/
async deleteExpired(): Promise<number> {
return 0
}

getExpiration(): number {
return 0
}
}
50 changes: 48 additions & 2 deletions lib/stores/FileStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,13 @@ type Store = {
get(key: string): Upload | undefined
set(key: string, value: Upload): void
delete(key: string): void
all: Record<string, Upload>
}

type Options = {
directory: string
configstore?: Store
expirationPeriodInMilliseconds?: number
}

const MASK = '0777'
Expand All @@ -32,16 +34,19 @@ const log = debug('tus-node-server:stores:filestore')
export default class FileStore extends DataStore {
directory: string
configstore: Store
expirationPeriodInMilliseconds: number

constructor({directory, configstore}: Options) {
constructor({directory, configstore, expirationPeriodInMilliseconds}: Options) {
super()
this.directory = directory
this.configstore = configstore ?? new Configstore(`${pkg.name}-${pkg.version}`)
this.expirationPeriodInMilliseconds = expirationPeriodInMilliseconds ?? 0
this.extensions = [
'creation',
'creation-with-upload',
'creation-defer-length',
'termination',
'expiration',
]
// TODO: this async call can not happen in the constructor
this.checkOrCreateDirectory()
Expand Down Expand Up @@ -172,7 +177,13 @@ export default class FileStore extends DataStore {
}

return resolve(
new Upload({id, size: file.size, offset: stats.size, metadata: file.metadata})
new Upload({
id,
size: file.size,
offset: stats.size,
metadata: file.metadata,
creation_date: file.creation_date,
})
)
})
})
Expand All @@ -189,4 +200,39 @@ export default class FileStore extends DataStore {

this.configstore.set(id, file)
}

async deleteExpired(): Promise<number> {
const now = new Date()
const toDelete: Promise<void>[] = []

const uploadInfos = this.configstore.all
for (const file_id of Object.keys(uploadInfos)) {
try {
const info = uploadInfos[file_id]
if (
info &&
'creation_date' in info &&
this.getExpiration() > 0 &&
info.size !== info.offset &&
info.creation_date
) {
const creation = new Date(info.creation_date)
const expires = new Date(creation.getTime() + this.getExpiration())
if (now > expires) {
toDelete.push(this.remove(file_id))
}
}
} catch (error) {
if (error !== ERRORS.FILE_NO_LONGER_EXISTS) {
throw error
}
}
}

return Promise.all(toDelete).then(() => toDelete.length)
}

getExpiration(): number {
return this.expirationPeriodInMilliseconds
}
}
21 changes: 10 additions & 11 deletions lib/stores/S3Store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,11 +65,12 @@ export default class S3Store extends DataStore {
constructor(options: Options) {
super()
const {bucket, partSize, ...rest} = options
// TODO: these are deprecated. Remove these, look up new best practise,
// and reflect that in the docs
assert.ok(options.accessKeyId, '[S3Store] `accessKeyId` must be set')
assert.ok(options.secretAccessKey, '[S3Store] `secretAccessKey` must be set')
assert.ok(bucket, '[S3Store] `bucket` must be set')
if (options.accessKeyId || options.secretAccessKey) {
assert.ok(options.accessKeyId, '[S3Store] `accessKeyId` must be set')
assert.ok(options.secretAccessKey, '[S3Store] `secretAccessKey` must be set')
} else {
assert.ok(options.credentials, '[S3Store] `credentials` must be set')
}

this.extensions = ['creation', 'creation-with-upload', 'creation-defer-length']
this.bucket = bucket
Expand Down Expand Up @@ -481,13 +482,12 @@ export default class S3Store extends DataStore {

try {
const parts = await this.retrieveParts(id)
return {
return new Upload({
id,
...this.cache.get(id)?.file,
offset: calcOffsetFromParts(parts),
size: metadata.file.size,
sizeIsDeferred: metadata.file.sizeIsDeferred,
}
})
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (error: any) {
if (error.code !== 'NoSuchUpload') {
Expand All @@ -498,13 +498,12 @@ export default class S3Store extends DataStore {
// When the last part of an upload is finished and the file is successfully written to S3,
// the upload will no longer be present and requesting it will result in a 404.
// In that case we return the upload_length as size.
return {
return new Upload({
id,
...this.cache.get(id)?.file,
offset: metadata.file.offset,
size: metadata.file.size,
sizeIsDeferred: metadata.file.sizeIsDeferred,
}
})
}
}

Expand Down
Loading

0 comments on commit c96a88f

Please sign in to comment.