diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..79fb78d
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,35 @@
+name: CI
+
+on:
+ push:
+ pull_request:
+ workflow_dispatch:
+
+env:
+ FORCE_COLOR: 2
+
+jobs:
+ test:
+ name: Node ${{ matrix.node }} on ${{ matrix.os }}
+ runs-on: ${{ matrix.os }}
+
+ strategy:
+ fail-fast: false
+ matrix:
+ node: [12, 14, 16, 18]
+ os: [ubuntu-latest, windows-latest]
+
+ steps:
+ - name: Clone repository
+ uses: actions/checkout@v3
+
+ - name: Set up Node.js
+ uses: actions/setup-node@v3
+ with:
+ node-version: ${{ matrix.node }}
+
+ - name: Install npm dependencies
+ run: npm install
+
+ - name: Run tests
+ run: npm run test-ci
diff --git a/.gitignore b/.gitignore
index 239ecff..9eb6759 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,3 @@
node_modules
yarn.lock
+/coverage
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 8a830b8..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-language: node_js
-node_js:
- - '13'
- - '12'
- - '10'
diff --git a/index.js b/index.js
index 6aa67ca..e3c5125 100644
--- a/index.js
+++ b/index.js
@@ -1,72 +1,71 @@
-'use strict';
-const path = require('path');
-const fs = require('graceful-fs');
-const decompressTar = require('decompress-tar');
-const decompressTarbz2 = require('decompress-tarbz2');
-const decompressTargz = require('decompress-targz');
-const decompressUnzip = require('decompress-unzip');
-const makeDir = require('make-dir');
-const pify = require('pify');
-const stripDirs = require('strip-dirs');
+import {Buffer} from 'node:buffer';
+import path from 'node:path';
+import process from 'node:process';
+import decompressTar from 'decompress-tar';
+import decompressTarbz2 from 'decompress-tarbz2';
+import decompressTargz from 'decompress-targz';
+import decompressUnzip from 'decompress-unzip';
+import fs from 'graceful-fs';
+import makeDir from 'make-dir';
+import pify from 'pify';
+import stripDirs from 'strip-dirs';
const fsP = pify(fs);
-const runPlugins = (input, opts) => {
- if (opts.plugins.length === 0) {
+const runPlugins = (input, options) => {
+ if (options.plugins.length === 0) {
return Promise.resolve([]);
}
- return Promise.all(opts.plugins.map(x => x(input, opts))).then(files => files.reduce((a, b) => a.concat(b)));
+ return Promise.all(options.plugins.map(x => x(input, options)))
+ // eslint-disable-next-line unicorn/no-array-reduce, unicorn/prefer-spread
+ .then(files => files.reduce((a, b) => a.concat(b)));
};
-const safeMakeDir = (dir, realOutputPath) => {
- return fsP.realpath(dir)
- .catch(_ => {
- const parent = path.dirname(dir);
- return safeMakeDir(parent, realOutputPath);
- })
- .then(realParentPath => {
- if (realParentPath.indexOf(realOutputPath) !== 0) {
- throw (new Error('Refusing to create a directory outside the output path.'));
- }
-
- return makeDir(dir).then(fsP.realpath);
- });
-};
+const safeMakeDir = (dir, realOutputPath) => fsP.realpath(dir)
+ .catch(_ => {
+ const parent = path.dirname(dir);
+ return safeMakeDir(parent, realOutputPath);
+ })
+ .then(realParentPath => {
+ if (realParentPath.indexOf(realOutputPath) !== 0) {
+ throw new Error('Refusing to create a directory outside the output path.');
+ }
-const preventWritingThroughSymlink = (destination, realOutputPath) => {
- return fsP.readlink(destination)
- .catch(_ => {
- // Either no file exists, or it's not a symlink. In either case, this is
- // not an escape we need to worry about in this phase.
- return null;
- })
- .then(symlinkPointsTo => {
- if (symlinkPointsTo) {
- throw new Error('Refusing to write into a symlink');
- }
-
- // No symlink exists at `destination`, so we can continue
- return realOutputPath;
- });
-};
+ return makeDir(dir).then(fsP.realpath);
+ });
+
+const preventWritingThroughSymlink = (destination, realOutputPath) => fsP.readlink(destination)
+ // Either no file exists, or it's not a symlink. In either case, this is
+ // not an escape we need to worry about in this phase.
+ .catch(_ => null)
+ .then(symlinkPointsTo => {
+ if (symlinkPointsTo) {
+ throw new Error('Refusing to write into a symlink');
+ }
+
+ // No symlink exists at `destination`, so we can continue
+ return realOutputPath;
+ });
-const extractFile = (input, output, opts) => runPlugins(input, opts).then(files => {
- if (opts.strip > 0) {
+const extractFile = (input, output, options) => runPlugins(input, options).then(files => {
+ if (options.strip > 0) {
files = files
.map(x => {
- x.path = stripDirs(x.path, opts.strip);
+ x.path = stripDirs(x.path, options.strip);
return x;
})
.filter(x => x.path !== '.');
}
- if (typeof opts.filter === 'function') {
- files = files.filter(opts.filter);
+ if (typeof options.filter === 'function') {
+ // eslint-disable-next-line unicorn/no-array-callback-reference
+ files = files.filter(options.filter);
}
- if (typeof opts.map === 'function') {
- files = files.map(opts.map);
+ if (typeof options.map === 'function') {
+ // eslint-disable-next-line unicorn/no-array-callback-reference
+ files = files.map(options.map);
}
if (!output) {
@@ -75,7 +74,7 @@ const extractFile = (input, output, opts) => runPlugins(input, opts).then(files
return Promise.all(files.map(x => {
const dest = path.join(output, x.path);
- const mode = x.mode & ~process.umask();
+ const mode = x.mode & ~process.umask(); // eslint-disable-line no-bitwise
const now = new Date();
if (x.type === 'directory') {
@@ -88,11 +87,10 @@ const extractFile = (input, output, opts) => runPlugins(input, opts).then(files
return makeDir(output)
.then(outputPath => fsP.realpath(outputPath))
- .then(realOutputPath => {
+ .then(realOutputPath =>
// Attempt to ensure parent directory exists (failing if it's outside the output dir)
- return safeMakeDir(path.dirname(dest), realOutputPath)
- .then(() => realOutputPath);
- })
+ safeMakeDir(path.dirname(dest), realOutputPath).then(() => realOutputPath),
+ )
.then(realOutputPath => {
if (x.type === 'file') {
return preventWritingThroughSymlink(dest, realOutputPath);
@@ -100,14 +98,12 @@ const extractFile = (input, output, opts) => runPlugins(input, opts).then(files
return realOutputPath;
})
- .then(realOutputPath => {
- return fsP.realpath(path.dirname(dest))
- .then(realDestinationDir => {
- if (realDestinationDir.indexOf(realOutputPath) !== 0) {
- throw (new Error('Refusing to write outside output directory: ' + realDestinationDir));
- }
- });
- })
+ .then(realOutputPath => fsP.realpath(path.dirname(dest))
+ .then(realDestinationDir => {
+ if (realDestinationDir.indexOf(realOutputPath) !== 0) {
+ throw new Error(`Refusing to write outside output directory: ${realDestinationDir}`);
+ }
+ }))
.then(() => {
if (x.type === 'link') {
return fsP.link(x.linkname, dest);
@@ -128,24 +124,29 @@ const extractFile = (input, output, opts) => runPlugins(input, opts).then(files
}));
});
-module.exports = (input, output, opts) => {
+const decompress = (input, output, options) => {
if (typeof input !== 'string' && !Buffer.isBuffer(input)) {
return Promise.reject(new TypeError('Input file required'));
}
if (typeof output === 'object') {
- opts = output;
+ options = output;
output = null;
}
- opts = Object.assign({plugins: [
- decompressTar(),
- decompressTarbz2(),
- decompressTargz(),
- decompressUnzip()
- ]}, opts);
+ options = {
+ plugins: [
+ decompressTar(),
+ decompressTarbz2(),
+ decompressTargz(),
+ decompressUnzip(),
+ ],
+ ...options,
+ };
const read = typeof input === 'string' ? fsP.readFile(input) : Promise.resolve(input);
- return read.then(buf => extractFile(buf, output, opts));
+ return read.then(buf => extractFile(buf, output, options));
};
+
+export default decompress;
diff --git a/package.json b/package.json
index d6133a8..04bb0eb 100644
--- a/package.json
+++ b/package.json
@@ -10,10 +10,18 @@
"url": "github.com/kevva"
},
"engines": {
- "node": ">=4"
+ "node": "^12.20.0 || ^14.14.0 || >=16.0.0"
},
"scripts": {
- "test": "xo && ava"
+ "ava": "ava",
+ "xo": "xo",
+ "test": "npm run xo && npm run ava",
+ "test-ci": "npm run xo && c8 ava"
+ },
+ "main": "index.js",
+ "type": "module",
+ "exports": {
+ ".": "./index.js"
},
"files": [
"index.js"
@@ -30,32 +38,21 @@
"unzip"
],
"dependencies": {
- "decompress-tar": "^4.0.0",
- "decompress-tarbz2": "^4.0.0",
- "decompress-targz": "^4.0.0",
+ "decompress-tar": "^4.1.1",
+ "decompress-tarbz2": "^4.1.1",
+ "decompress-targz": "^4.1.1",
"decompress-unzip": "^4.0.1",
- "graceful-fs": "^4.1.10",
- "make-dir": "^1.0.0",
- "pify": "^2.3.0",
- "strip-dirs": "^2.0.0"
+ "graceful-fs": "^4.2.10",
+ "make-dir": "^3.1.0",
+ "pify": "^5.0.0",
+ "strip-dirs": "^3.0.0"
},
"devDependencies": {
- "ava": "*",
- "esm": "^3.2.25",
- "is-jpg": "^1.0.0",
- "path-exists": "^3.0.0",
- "pify": "^2.3.0",
+ "ava": "^4.3.0",
+ "c8": "^7.11.3",
+ "is-jpg": "^3.0.0",
+ "path-exists": "^5.0.0",
"rimraf": "^3.0.2",
- "xo": "*"
- },
- "ava": {
- "require": [
- "esm"
- ]
- },
- "xo": {
- "rules": {
- "promise/prefer-await-to-then": "off"
- }
+ "xo": "^0.49.0"
}
}
diff --git a/readme.md b/readme.md
index fd23f91..fa24e55 100644
--- a/readme.md
+++ b/readme.md
@@ -1,4 +1,4 @@
-# decompress [![Build Status](https://travis-ci.org/kevva/decompress.svg?branch=master)](https://travis-ci.org/kevva/decompress)
+# decompress [![CI](https://github.com/kevva/decompress/actions/workflows/ci.yml/badge.svg)](https://github.com/kevva/decompress/actions/workflows/ci.yml)
> Extracting archives made easy
@@ -6,15 +6,15 @@
## Install
-```
-$ npm install decompress
+```sh
+npm install decompress
```
## Usage
```js
-const decompress = require('decompress');
+import decompress from 'decompress';
decompress('unicorn.zip', 'dist').then(files => {
console.log('done!');
@@ -87,15 +87,15 @@ decompress('unicorn.zip', 'dist', {
##### plugins
-Type: `Array`
-Default: `[decompressTar(), decompressTarbz2(), decompressTargz(), decompressUnzip()]`
+* Type: `Array`
+* Default: `[decompressTar(), decompressTarbz2(), decompressTargz(), decompressUnzip()]`
Array of [plugins](https://www.npmjs.com/browse/keyword/decompressplugin) to use.
##### strip
-Type: `number`
-Default: `0`
+* Type: `number`
+* Default: `0`
Remove leading directory components from extracted files.
diff --git a/test.js b/test.js
index ba99d68..e2ff80d 100644
--- a/test.js
+++ b/test.js
@@ -1,14 +1,17 @@
-import fs from 'fs';
-import path from 'path';
+import fs from 'node:fs';
+import path from 'node:path';
+import {fileURLToPath} from 'node:url';
+import {promisify} from 'node:util';
import isJpg from 'is-jpg';
-import pathExists from 'path-exists';
+import {pathExists} from 'path-exists';
import pify from 'pify';
import rimraf from 'rimraf';
import test from 'ava';
-import m from '.';
+import decompress from './index.js';
const fsP = pify(fs);
-const rimrafP = pify(rimraf);
+const rimrafP = promisify(rimraf);
+const __dirname = path.dirname(fileURLToPath(import.meta.url));
test.serial.afterEach('ensure decompressed files and directories are cleaned up', async () => {
await rimrafP(path.join(__dirname, 'directory'));
@@ -21,10 +24,10 @@ test.serial.afterEach('ensure decompressed files and directories are cleaned up'
});
test('extract file', async t => {
- const tarFiles = await m(path.join(__dirname, 'fixtures', 'file.tar'));
- const tarbzFiles = await m(path.join(__dirname, 'fixtures', 'file.tar.bz2'));
- const targzFiles = await m(path.join(__dirname, 'fixtures', 'file.tar.gz'));
- const zipFiles = await m(path.join(__dirname, 'fixtures', 'file.zip'));
+ const tarFiles = await decompress(path.join(__dirname, 'fixtures', 'file.tar'));
+ const tarbzFiles = await decompress(path.join(__dirname, 'fixtures', 'file.tar.bz2'));
+ const targzFiles = await decompress(path.join(__dirname, 'fixtures', 'file.tar.gz'));
+ const zipFiles = await decompress(path.join(__dirname, 'fixtures', 'file.zip'));
t.is(tarFiles[0].path, 'test.jpg');
t.true(isJpg(tarFiles[0].data));
@@ -38,13 +41,13 @@ test('extract file', async t => {
test('extract file using buffer', async t => {
const tarBuf = await fsP.readFile(path.join(__dirname, 'fixtures', 'file.tar'));
- const tarFiles = await m(tarBuf);
+ const tarFiles = await decompress(tarBuf);
const tarbzBuf = await fsP.readFile(path.join(__dirname, 'fixtures', 'file.tar.bz2'));
- const tarbzFiles = await m(tarbzBuf);
+ const tarbzFiles = await decompress(tarbzBuf);
const targzBuf = await fsP.readFile(path.join(__dirname, 'fixtures', 'file.tar.gz'));
- const targzFiles = await m(targzBuf);
+ const targzFiles = await decompress(targzBuf);
const zipBuf = await fsP.readFile(path.join(__dirname, 'fixtures', 'file.zip'));
- const zipFiles = await m(zipBuf);
+ const zipFiles = await decompress(zipBuf);
t.is(tarFiles[0].path, 'test.jpg');
t.is(tarbzFiles[0].path, 'test.jpg');
@@ -53,7 +56,7 @@ test('extract file using buffer', async t => {
});
test.serial('extract file to directory', async t => {
- const files = await m(path.join(__dirname, 'fixtures', 'file.tar'), __dirname);
+ const files = await decompress(path.join(__dirname, 'fixtures', 'file.tar'), __dirname);
t.is(files[0].path, 'test.jpg');
t.true(isJpg(files[0].data));
@@ -61,18 +64,18 @@ test.serial('extract file to directory', async t => {
});
test.serial('extract symlink', async t => {
- await m(path.join(__dirname, 'fixtures', 'symlink.tar'), __dirname, {strip: 1});
+ await decompress(path.join(__dirname, 'fixtures', 'symlink.tar'), __dirname, {strip: 1});
t.is(await fsP.realpath(path.join(__dirname, 'symlink')), path.join(__dirname, 'file.txt'));
});
test.serial('extract directory', async t => {
- await m(path.join(__dirname, 'fixtures', 'directory.tar'), __dirname);
+ await decompress(path.join(__dirname, 'fixtures', 'directory.tar'), __dirname);
t.true(await pathExists(path.join(__dirname, 'directory')));
});
test('strip option', async t => {
- const zipFiles = await m(path.join(__dirname, 'fixtures', 'strip.zip'), {strip: 1});
- const tarFiles = await m(path.join(__dirname, 'fixtures', 'strip.tar'), {strip: 1});
+ const zipFiles = await decompress(path.join(__dirname, 'fixtures', 'strip.zip'), {strip: 1});
+ const tarFiles = await decompress(path.join(__dirname, 'fixtures', 'strip.tar'), {strip: 1});
t.is(zipFiles[0].path, 'test-strip.jpg');
t.true(isJpg(zipFiles[0].data));
@@ -81,61 +84,61 @@ test('strip option', async t => {
});
test('filter option', async t => {
- const files = await m(path.join(__dirname, 'fixtures', 'file.tar'), {
- filter: x => x.path !== 'test.jpg'
+ const files = await decompress(path.join(__dirname, 'fixtures', 'file.tar'), {
+ filter: x => x.path !== 'test.jpg',
});
t.is(files.length, 0);
});
test('map option', async t => {
- const files = await m(path.join(__dirname, 'fixtures', 'file.tar'), {
- map: x => {
+ const files = await decompress(path.join(__dirname, 'fixtures', 'file.tar'), {
+ map(x) {
x.path = `unicorn-${x.path}`;
return x;
- }
+ },
});
t.is(files[0].path, 'unicorn-test.jpg');
});
test.serial('set mtime', async t => {
- const files = await m(path.join(__dirname, 'fixtures', 'file.tar'), __dirname);
+ const files = await decompress(path.join(__dirname, 'fixtures', 'file.tar'), __dirname);
const stat = await fsP.stat(path.join(__dirname, 'test.jpg'));
t.deepEqual(files[0].mtime, stat.mtime);
});
test('return emptpy array if no plugins are set', async t => {
- const files = await m(path.join(__dirname, 'fixtures', 'file.tar'), {plugins: []});
+ const files = await decompress(path.join(__dirname, 'fixtures', 'file.tar'), {plugins: []});
t.is(files.length, 0);
});
test.serial('throw when a location outside the root is given', async t => {
await t.throwsAsync(async () => {
- await m(path.join(__dirname, 'fixtures', 'slipping.tar.gz'), 'dist');
+ await decompress(path.join(__dirname, 'fixtures', 'slipping.tar.gz'), 'dist');
}, {message: /Refusing/});
});
test.serial('throw when a location outside the root including symlinks is given', async t => {
await t.throwsAsync(async () => {
- await m(path.join(__dirname, 'fixtures', 'slip.zip'), 'dist');
+ await decompress(path.join(__dirname, 'fixtures', 'slip.zip'), 'dist');
}, {message: /Refusing/});
});
test.serial('throw when a top-level symlink outside the root is given', async t => {
await t.throwsAsync(async () => {
- await m(path.join(__dirname, 'fixtures', 'slip2.zip'), 'dist');
+ await decompress(path.join(__dirname, 'fixtures', 'slip2.zip'), 'dist');
}, {message: /Refusing/});
});
test.serial('throw when a directory outside the root including symlinks is given', async t => {
await t.throwsAsync(async () => {
- await m(path.join(__dirname, 'fixtures', 'slipping_directory.tar.gz'), 'dist');
+ await decompress(path.join(__dirname, 'fixtures', 'slipping_directory.tar.gz'), 'dist');
}, {message: /Refusing/});
});
test.serial('allows filenames and directories to be written with dots in their names', async t => {
- const files = await m(path.join(__dirname, 'fixtures', 'edge_case_dots.tar.gz'), __dirname);
+ const files = await decompress(path.join(__dirname, 'fixtures', 'edge_case_dots.tar.gz'), __dirname);
t.is(files.length, 6);
t.deepEqual(files.map(f => f.path).sort(), [
'edge_case_dots/',
@@ -143,18 +146,18 @@ test.serial('allows filenames and directories to be written with dots in their n
'edge_case_dots/sample../',
'edge_case_dots/ending_dots..',
'edge_case_dots/x',
- 'edge_case_dots/sample../test.txt'
+ 'edge_case_dots/sample../test.txt',
].sort());
});
test.serial('allows top-level file', async t => {
- const files = await m(path.join(__dirname, 'fixtures', 'top_level_example.tar.gz'), 'dist');
+ const files = await decompress(path.join(__dirname, 'fixtures', 'top_level_example.tar.gz'), 'dist');
t.is(files.length, 1);
t.is(files[0].path, 'example.txt');
});
test.serial('throw when chained symlinks to /tmp/dist allow escape outside root directory', async t => {
await t.throwsAsync(async () => {
- await m(path.join(__dirname, 'fixtures', 'slip3.zip'), '/tmp/dist');
+ await decompress(path.join(__dirname, 'fixtures', 'slip3.zip'), '/tmp/dist');
}, {message: /Refusing/});
});