From 8a6bfde4a2cd1fbef03cdceb0ff987912237dd26 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Thu, 23 Jan 2020 20:18:39 +0000 Subject: [PATCH] refactor: async iterables (#1183) TLDR; * Remove Node.js streams and pull streams * Remove callbacks * Remove `peer-info` and `peer-id` --- Now that internals are all async/await/iterables and `fetch` the next step is to bubble up that goodness to the core API, removing the multiple stream APIs and removing callback support. I'm also proposing removing `peer-info` and `peer-id`, since these drastically increase the bundle size by pulling in `libp2p-crypto`, for which 99% of it's capability is unused. In place of `peer-id` we return a `CID`, which can easily be converted to a `PeerId` instance via: ```js const peerId = PeerId.createFromCID(peerCid) ``` In place of `peer-info` we return an object `{ id: CID, addrs: Multiaddr[] }`, which can easily be converted to a `PeerInfo` like: ```js const peerInfo = new PeerInfo(PeerId.createFromCID(info.id)) info.addrs.forEach(addr => peerInfo.multiaddrs.add(addr)) ``` refs https://github.com/ipfs/js-ipfs/issues/1670 refs https://github.com/ipfs/js-ipfs/issues/2611 refs https://github.com/ipfs/interface-js-ipfs-core/issues/394 TODO: * [x] Refactor local tests * [x] Remove `addFromFs` and `addFromUrl` and export `globSource` and `urlSource` instead * [x] Refactor `interface-ipfs-core` tests * [x] Document new APIs in `interface-ipfs-core` * [x] Update README with API changes * [x] [Document upgrade path from Node.js/pull streams to async iterables](https://gist.github.com/alanshaw/04b2ddc35a6fff25c040c011ac6acf26) Depends on: * [x] https://github.com/ipfs/js-ipfs-utils/pull/15 * [x] https://github.com/ipfs/interface-js-ipfs-core/pull/567 BREAKING CHANGE: Callbacks are no longer supported on any API methods. Please use a utility such as [`callbackify`](https://www.npmjs.com/package/callbackify) on API methods that return Promises to emulate previous behaviour. BREAKING CHANGE: `PeerId` and `PeerInfo` classes are no longer statically exported from `ipfs-http-client` since they are no longer used internally. BREAKING CHANGE: `pin.add` results now contain a `cid` property (a [CID instance](https://github.com/multiformats/js-cid)) instead of a string `hash` property. BREAKING CHANGE: `pin.ls` now returns an async iterable. BREAKING CHANGE: `pin.ls` results now contain a `cid` property (a [CID instance](https://github.com/multiformats/js-cid)) instead of a string `hash` property. BREAKING CHANGE: `pin.rm` results now contain a `cid` property (a [CID instance](https://github.com/multiformats/js-cid)) instead of a string `hash` property. BREAKING CHANGE: `add` now returns an async iterable. BREAKING CHANGE: `add` results now contain a `cid` property (a [CID instance](https://github.com/multiformats/js-cid)) instead of a string `hash` property. BREAKING CHANGE: `addReadableStream`, `addPullStream` have been removed. BREAKING CHANGE: `ls` now returns an async iterable. BREAKING CHANGE: `ls` results now contain a `cid` property (whose value is a [CID instance](https://github.com/multiformats/js-cid)) instead of a `hash` property. BREAKING CHANGE: `files.ls` now returns an async iterable. BREAKING CHANGE: `files.readPullStream` and `files.readReadableStream` have been removed. BREAKING CHANGE: `files.read` now returns an async iterable. BREAKING CHANGE: `files.lsPullStream` and `files.lsReadableStream` have been removed. BREAKING CHANGE: `files.ls` now returns an async iterable. BREAKING CHANGE: `files.ls` results now contain a `cid` property (whose value is a [CID instance](https://github.com/multiformats/js-cid)) instead of a `hash` property. BREAKING CHANGE: `files.ls` no longer takes a `long` option (in core) - you will receive all data by default. BREAKING CHANGE: `files.stat` result now contains a `cid` property (whose value is a [CID instance](https://github.com/multiformats/js-cid)) instead of a `hash` property. BREAKING CHANGE: `get` now returns an async iterable. The `content` property value for objects yielded from the iterator is now an async iterable that yields [`BufferList`](https://github.com/rvagg/bl) objects. BREAKING CHANGE: `stats.bw` now returns an async iterable. BREAKING CHANGE: `addFromStream` has been removed. Use `add` instead. BREAKING CHANGE: `isIPFS` is no longer exported from the client, please `npm i is-ipfs` or include the CDN script tag `` to use this utility in your applications. BREAKING CHANGE: `addFromFs` has been removed. Please use the exported `globSource` utility and pass the result to `add`. See the [glob source documentation](https://github.com/ipfs/js-ipfs-http-client#glob-source) for more details and an example. BREAKING CHANGE: `addFromURL` has been removed. Please use the exported `urlSource` utility and pass the result to `add`. See the [URL source documentation](https://github.com/ipfs/js-ipfs-http-client#url-source) for more details and an example. BREAKING CHANGE: `name.resolve` now returns an async iterable. It yields increasingly more accurate resolved values as they are discovered until the best value is selected from the quorum of 16. The "best" resolved value is the last item yielded from the iterator. If you are interested only in this best value you could use `it-last` to extract it like so: ```js const last = require('it-last') await last(ipfs.name.resolve('/ipns/QmHash')) ``` BREAKING CHANGE: `block.rm` now returns an async iterable. BREAKING CHANGE: `block.rm` now yields objects of `{ cid: CID, error: Error }`. BREAKING CHANGE: `dht.findProvs`, `dht.provide`, `dht.put` and `dht.query` now all return an async iterable. BREAKING CHANGE: `dht.findPeer`, `dht.findProvs`, `dht.provide`, `dht.put` and `dht.query` now yield/return an object `{ id: CID, addrs: Multiaddr[] }` instead of a `PeerInfo` instance(s). BREAKING CHANGE: `refs` and `refs.local` now return an async iterable. BREAKING CHANGE: `object.data` now returns an async iterable that yields `Buffer` objects. BREAKING CHANGE: `ping` now returns an async iterable. BREAKING CHANGE: `repo.gc` now returns an async iterable. BREAKING CHANGE: `swarm.peers` now returns an array of objects with a `peer` property that is a `CID`, instead of a `PeerId` instance. BREAKING CHANGE: `swarm.addrs` now returns an array of objects `{ id: CID, addrs: Multiaddr[] }` instead of `PeerInfo` instances. BREAKING CHANGE: `block.stat` result now contains a `cid` property (whose value is a [CID instance](https://github.com/multiformats/js-cid)) instead of a `key` property. BREAKING CHANGE: `bitswap.wantlist` now returns an array of [CID](https://github.com/multiformats/js-cid) instances. BREAKING CHANGE: `bitswap.stat` result has changed - `wantlist` and `peers` values are now an array of [CID](https://github.com/multiformats/js-cid) instances. --- README.md | 107 +++++++++++----- package.json | 47 +++---- src/add-from-fs/index.browser.js | 3 - src/add-from-fs/index.js | 8 -- src/add-from-url.js | 21 ---- src/add/index.js | 5 +- src/bitswap/index.js | 10 +- src/bitswap/stat.js | 5 +- src/bitswap/wantlist.js | 2 +- src/block/index.js | 27 +--- src/block/{rm-async-iterator.js => rm.js} | 17 ++- src/block/stat.js | 3 +- src/bootstrap/index.js | 8 +- src/cat.js | 2 +- src/config/index.js | 8 +- src/config/profiles/index.js | 6 +- src/dag/index.js | 8 +- src/dht/find-peer.js | 28 +++-- src/dht/find-provs.js | 21 ++-- src/dht/get.js | 27 ++-- src/dht/index.js | 37 ++---- src/dht/provide.js | 26 ++-- src/dht/put.js | 23 ++-- src/dht/query.js | 19 ++- src/diag/index.js | 8 +- src/files/index.js | 38 ++---- src/files/ls.js | 21 ++-- src/files/read.js | 2 +- src/files/stat.js | 10 +- src/get.js | 2 +- src/index.js | 144 +++------------------- src/key/index.js | 14 +-- src/lib/converters.js | 20 --- src/lib/stream-to-iterable.js | 25 ---- src/log/index.js | 6 +- src/log/tail.js | 2 +- src/ls.js | 82 ++++++------ src/name/index.js | 6 +- src/name/pubsub/index.js | 8 +- src/name/resolve.js | 11 +- src/object/data.js | 11 +- src/object/index.js | 15 +-- src/object/patch/index.js | 10 +- src/pin/add.js | 8 +- src/pin/index.js | 8 +- src/pin/ls.js | 20 ++- src/pin/rm.js | 3 +- src/ping.js | 2 +- src/pubsub/index.js | 55 ++------- src/pubsub/subscribe.js | 6 +- src/refs/index.js | 2 +- src/refs/local.js | 2 +- src/repo/gc.js | 2 +- src/repo/index.js | 9 +- src/stats/bw.js | 2 +- src/stats/index.js | 22 +--- src/swarm/addrs.js | 12 +- src/swarm/disconnect.js | 2 +- src/swarm/index.js | 12 +- src/swarm/peers.js | 4 +- test/custom-headers.spec.js | 2 +- test/exports.spec.js | 6 - test/files-mfs.spec.js | 105 ++++++++-------- test/get.spec.js | 28 +++-- test/interface.spec.js | 143 +++++++++------------ test/lib.stream-to-iterable.spec.js | 40 ------ test/log.spec.js | 3 +- test/node/swarm.js | 4 +- test/ping.spec.js | 42 +------ test/request-api.spec.js | 73 +++++------ test/stats.spec.js | 3 +- test/sub-modules.spec.js | 19 --- test/utils/expect-timeout.js | 16 --- 73 files changed, 601 insertions(+), 957 deletions(-) delete mode 100644 src/add-from-fs/index.browser.js delete mode 100644 src/add-from-fs/index.js delete mode 100644 src/add-from-url.js rename src/block/{rm-async-iterator.js => rm.js} (76%) delete mode 100644 src/lib/converters.js delete mode 100644 src/lib/stream-to-iterable.js delete mode 100644 test/lib.stream-to-iterable.spec.js delete mode 100644 test/utils/expect-timeout.js diff --git a/README.md b/README.md index 9c6d3a3c7..88ed592f8 100644 --- a/README.md +++ b/README.md @@ -53,6 +53,8 @@ - [Additional Options](#additional-options) - [Instance Utils](#instance-utils) - [Static Types and Utils](#static-types-and-utils) + - [Glob source](#glob-source) + - [URL source](#url-source) - [Development](#development) - [Testing](#testing) - [Contribute](#contribute) @@ -210,34 +212,20 @@ const ipfs = ipfsClient({ timeout: '2m' }) - [Regular Files API](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md) - [`ipfs.add(data, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#add) - - [`ipfs.addPullStream([options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#addpullstream) - - [`ipfs.addReadableStream([options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#addreadablestream) - - [`ipfs.addFromStream(stream)`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#addfromstream) - - [`ipfs.addFromFs(path, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#addfromfs) - - [`ipfs.addFromURL(url, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#addfromurl) - [`ipfs.cat(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#cat) - - [`ipfs.catPullStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#catpullstream) - - [`ipfs.catReadableStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#catreadablestream) - [`ipfs.get(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#get) - - [`ipfs.getPullStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#getpullstream) - - [`ipfs.getReadableStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#getreadablestream) - [`ipfs.ls(ipfsPath)`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#ls) - - [`ipfs.lsPullStream(ipfsPath)`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#lspullstream) - - [`ipfs.lsReadableStream(ipfsPath)`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#lsreadablestream) - [MFS (mutable file system) specific](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#mutable-file-system) - - _Explore the Mutable File System through interactive coding challenges in our [ProtoSchool tutorial](https://proto.school/#/mutable-file-system/)._ - [`ipfs.files.cp([from, to])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filescp) - [`ipfs.files.flush([path])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesflush) - [`ipfs.files.ls([path], [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesls) - [`ipfs.files.mkdir(path, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesmkdir) - [`ipfs.files.mv([from, to])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesmv) - [`ipfs.files.read(path, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesread) - - [`ipfs.files.readPullStream(path, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesreadpullstream) - - [`ipfs.files.readReadableStream(path, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesreadreadablestream) - [`ipfs.files.rm(path, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesrm) - [`ipfs.files.stat(path, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesstat) - [`ipfs.files.write(path, content, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#fileswrite) + _Explore the Mutable File System through interactive coding challenges in our [ProtoSchool tutorial](https://proto.school/#/mutable-file-system/)._ - [block](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/BLOCK.md) - [`ipfs.block.get(cid, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/BLOCK.md#blockget) @@ -246,20 +234,15 @@ const ipfs = ipfsClient({ timeout: '2m' }) - [refs](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/REFS.md) - [`ipfs.refs(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/REFS.md#refs) - - [`ipfs.refsReadableStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/REFS.md#refsreadablestream) - - [`ipfs.refsPullStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/REFS.md#refspullstream) - [`ipfs.refs.local()`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/REFS.md#refslocal) - - [`ipfs.refs.localReadableStream()`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/REFS.md#refslocalreadablestream) - - [`ipfs.refs.localPullStream()`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/REFS.md#refslocalpullstream) #### Graph - [dag](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/DAG.md) - - _Explore the DAG API through interactive coding challenges in our [ProtoSchool tutorial](https://proto.school/#/basics)._ - [`ipfs.dag.get(cid, [path], [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/DAG.md#dagget) - [`ipfs.dag.put(dagNode, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/DAG.md#dagput) - [`ipfs.dag.tree(cid, [path], [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/DAG.md#dagtree) + _Explore the DAG API through interactive coding challenges in our [ProtoSchool tutorial](https://proto.school/#/basics)._ - [object](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/OBJECT.md) - [`ipfs.object.data(multihash, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/OBJECT.md#objectdata) @@ -278,9 +261,6 @@ const ipfs = ipfsClient({ timeout: '2m' }) - [`ipfs.pin.ls([hash], [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/PIN.md#pinls) - [`ipfs.pin.rm(hash, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/PIN.md#pinrm) -- refs - - `ipfs.refs.local()` - #### Network - [bootstrap](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/BOOTSTRAP.md) @@ -326,8 +306,6 @@ const ipfs = ipfsClient({ timeout: '2m' }) - [`ipfs.dns(domain)`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/MISCELLANEOUS.md#dns) - [`ipfs.id()`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/MISCELLANEOUS.md#id) - [`ipfs.ping(id, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/MISCELLANEOUS.md#ping) - - [`ipfs.pingPullStream(id, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/MISCELLANEOUS.md#pingpullstream) - - [`ipfs.pingReadableStream(id, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/MISCELLANEOUS.md#pingreadablestream) - [`ipfs.stop()`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/MISCELLANEOUS.md#stop). Alias to `ipfs.shutdown`. - [`ipfs.version()`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/MISCELLANEOUS.md#version) @@ -341,8 +319,6 @@ const ipfs = ipfsClient({ timeout: '2m' }) - [stats](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/STATS.md) - [`ipfs.stats.bitswap()`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/STATS.md#statsbitswap) - [`ipfs.stats.bw([options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/STATS.md#statsbw) - - [`ipfs.stats.bwPullStream([options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/STATS.md#statsbwpullstream) - - [`ipfs.stats.bwReadableStream([options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/STATS.md#statsbwreadablestream) - [`ipfs.stats.repo([options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/STATS.md#statsrepo) - log @@ -382,15 +358,14 @@ Call this on your client instance to return an object containing the `host`, `po Aside from the default export, `ipfs-http-client` exports various types and utilities that are included in the bundle: -- [`isIPFS`](https://www.npmjs.com/package/is-ipfs) - [`Buffer`](https://www.npmjs.com/package/buffer) -- [`PeerId`](https://www.npmjs.com/package/peer-id) -- [`PeerInfo`](https://www.npmjs.com/package/peer-info) - [`multiaddr`](https://www.npmjs.com/package/multiaddr) - [`multibase`](https://www.npmjs.com/package/multibase) - [`multicodec`](https://www.npmjs.com/package/multicodec) -- [`multihash`](https://www.npmjs.com/package/multihash) +- [`multihash`](https://www.npmjs.com/package/multihashes) - [`CID`](https://www.npmjs.com/package/cids) +- [`globSource`](https://github.com/ipfs/js-ipfs-utils/blob/master/src/files/glob-source.js) (not available in the browser) +- [`urlSource`](https://github.com/ipfs/js-ipfs-utils/blob/master/src/files/url-source.js) These can be accessed like this, for example: @@ -400,6 +375,74 @@ const { CID } = require('ipfs-http-client') import { CID } from 'ipfs-http-client' ``` +##### Glob source + +A utility to allow files on the file system to be easily added to IPFS. + +###### `globSource(path, [options])` + +- `path`: A path to a single file or directory to glob from +- `options`: Optional options +- `options.recursive`: If `path` is a directory, use option `{ recursive: true }` to add the directory and all its sub-directories. +- `options.ignore`: To exclude file globs from the directory, use option `{ ignore: ['ignore/this/folder/**', 'and/this/file'] }`. +- `options.hidden`: Hidden/dot files (files or folders starting with a `.`, for example, `.git/`) are not included by default. To add them, use the option `{ hidden: true }`. + +Returns an async iterable that yields `{ path, content }` objects suitable for passing to `ipfs.add`. + +###### Example + +```js +const IpfsHttpClient = require('ipfs-http-client') +const { globSource } = IpfsHttpClient +const ipfs = IpfsHttpClient() + +for await (const file of ipfs.add(globSource('./docs', { recursive: true }))) { + console.log(file) +} +/* +{ + path: 'docs/assets/anchor.js', + cid: CID('QmVHxRocoWgUChLEvfEyDuuD6qJ4PhdDL2dTLcpUy3dSC2'), + size: 15347 +} +{ + path: 'docs/assets/bass-addons.css', + cid: CID('QmPiLWKd6yseMWDTgHegb8T7wVS7zWGYgyvfj7dGNt2viQ'), + size: 232 +} +... +*/ +``` + +##### URL source + +A utility to allow content from the internet to be easily added to IPFS. + +###### `urlSource(url)` + +- `url`: A string URL or [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL) instance to send HTTP GET request to + +Returns an async iterable that yields `{ path, content }` objects suitable for passing to `ipfs.add`. + +###### Example + +```js +const IpfsHttpClient = require('ipfs-http-client') +const { urlSource } = IpfsHttpClient +const ipfs = IpfsHttpClient() + +for await (const file of ipfs.add(urlSource('https://ipfs.io/images/ipfs-logo.svg'))) { + console.log(file) +} +/* +{ + path: 'ipfs-logo.svg', + cid: CID('QmTqZhR6f7jzdhLgPArDPnsbZpvvgxzCZycXK7ywkLxSyU'), + size: 3243 +} +*/ +``` + ## Development ### Testing diff --git a/package.json b/package.json index 033449aad..add5c2ebb 100644 --- a/package.json +++ b/package.json @@ -15,13 +15,9 @@ ], "main": "src/index.js", "browser": { - "glob": false, - "fs": false, - "stream": "readable-stream", - "ky-universal": "ky/umd", "./src/add/form-data.js": "./src/add/form-data.browser.js", - "./src/add-from-fs/index.js": "./src/add-from-fs/index.browser.js", - "./src/lib/buffer-to-form-data.js": "./src/lib/buffer-to-form-data.browser.js" + "./src/lib/buffer-to-form-data.js": "./src/lib/buffer-to-form-data.browser.js", + "ipfs-utils/src/files/glob-source": false }, "repository": "github:ipfs/js-ipfs-http-client", "scripts": { @@ -42,55 +38,42 @@ }, "dependencies": { "abort-controller": "^3.0.0", - "async-iterator-to-pull-stream": "^1.3.0", "bignumber.js": "^9.0.0", - "bl": "^4.0.0", "bs58": "^4.0.1", "buffer": "^5.4.2", - "callbackify": "^1.1.0", "cids": "~0.7.1", "debug": "^4.1.0", - "err-code": "^2.0.0", - "explain-error": "^1.0.4", "form-data": "^3.0.0", "ipfs-block": "~0.8.1", - "ipfs-utils": "^0.4.2", - "ipld-dag-cbor": "~0.15.0", - "ipld-dag-pb": "^0.18.1", - "ipld-raw": "^4.0.0", - "is-ipfs": "~0.6.1", - "it-all": "^1.0.1", - "it-glob": "0.0.7", + "ipfs-utils": "^0.7.1", + "ipld-dag-cbor": "^0.15.1", + "ipld-dag-pb": "^0.18.2", + "ipld-raw": "^4.0.1", "it-tar": "^1.1.1", "it-to-stream": "^0.1.1", "iterable-ndjson": "^1.1.0", "ky": "^0.15.0", "ky-universal": "^0.3.0", "merge-options": "^2.0.0", - "multiaddr": "^6.0.6", - "multiaddr-to-uri": "^5.0.0", + "multiaddr": "^7.2.1", + "multiaddr-to-uri": "^5.1.0", "multibase": "~0.6.0", "multicodec": "^1.0.0", "multihashes": "~0.4.14", "parse-duration": "^0.1.1", - "peer-id": "~0.12.3", - "peer-info": "~0.15.1", - "promise-nodeify": "^3.0.1" + "stream-to-it": "^0.2.0" }, "devDependencies": { "aegir": "^20.4.1", "async": "^3.1.0", "browser-process-platform": "~0.1.1", - "cross-env": "^6.0.0", - "detect-node": "^2.0.4", "go-ipfs-dep": "^0.4.22", - "interface-ipfs-core": "^0.128.0", - "ipfsd-ctl": "^1.0.0", - "ndjson": "^1.5.0", - "nock": "^11.4.0", - "pull-stream": "^3.6.14", - "pump": "^3.0.0", - "stream-equal": "^1.1.1" + "interface-ipfs-core": "^0.129.0", + "ipfsd-ctl": "^1.0.2", + "it-all": "^1.0.1", + "it-concat": "^1.0.0", + "it-pipe": "^1.1.0", + "nock": "^11.7.2" }, "engines": { "node": ">=10.3.0", diff --git a/src/add-from-fs/index.browser.js b/src/add-from-fs/index.browser.js deleted file mode 100644 index 81d551294..000000000 --- a/src/add-from-fs/index.browser.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' - -module.exports = () => () => { throw new Error('unavailable in the browser') } diff --git a/src/add-from-fs/index.js b/src/add-from-fs/index.js deleted file mode 100644 index 7403f7e46..000000000 --- a/src/add-from-fs/index.js +++ /dev/null @@ -1,8 +0,0 @@ -'use strict' - -const globSource = require('ipfs-utils/src/files/glob-source') - -module.exports = (config) => { - const add = require('../add')(config) - return (path, options) => add(globSource(path, options), options) -} diff --git a/src/add-from-url.js b/src/add-from-url.js deleted file mode 100644 index deb3f4bad..000000000 --- a/src/add-from-url.js +++ /dev/null @@ -1,21 +0,0 @@ -'use strict' - -const kyDefault = require('ky-universal').default -const toIterable = require('./lib/stream-to-iterable') - -module.exports = (config) => { - const add = require('./add')(config) - - return async function * addFromURL (url, options) { - options = options || {} - - const { body } = await kyDefault.get(url) - - const input = { - path: decodeURIComponent(new URL(url).pathname.split('/').pop() || ''), - content: toIterable(body) - } - - yield * add(input, options) - } -} diff --git a/src/add/index.js b/src/add/index.js index e8b3c22f0..cd162c2f9 100644 --- a/src/add/index.js +++ b/src/add/index.js @@ -1,8 +1,9 @@ 'use strict' const ndjson = require('iterable-ndjson') +const CID = require('cids') const configure = require('../lib/configure') -const toIterable = require('../lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') const { toFormData } = require('./form-data') const toCamel = require('../lib/object-to-camel') @@ -55,7 +56,7 @@ module.exports = configure(({ ky }) => { function toCoreInterface ({ name, hash, size, mode, mtime, mtimeNsecs }) { const output = { path: name, - hash, + cid: new CID(hash), size: parseInt(size) } diff --git a/src/bitswap/index.js b/src/bitswap/index.js index fb47d4da0..5c8354498 100644 --- a/src/bitswap/index.js +++ b/src/bitswap/index.js @@ -1,9 +1,7 @@ 'use strict' -const callbackify = require('callbackify') - -module.exports = (config) => ({ - wantlist: callbackify.variadic(require('./wantlist')(config)), - stat: callbackify.variadic(require('./stat')(config)), - unwant: callbackify.variadic(require('./unwant')(config)) +module.exports = config => ({ + wantlist: require('./wantlist')(config), + stat: require('./stat')(config), + unwant: require('./unwant')(config) }) diff --git a/src/bitswap/stat.js b/src/bitswap/stat.js index fee13c355..e041f5d9d 100644 --- a/src/bitswap/stat.js +++ b/src/bitswap/stat.js @@ -2,6 +2,7 @@ const configure = require('../lib/configure') const Big = require('bignumber.js') +const CID = require('cids') module.exports = configure(({ ky }) => { return async (options) => { @@ -21,8 +22,8 @@ module.exports = configure(({ ky }) => { function toCoreInterface (res) { return { provideBufLen: res.ProvideBufLen, - wantlist: res.Wantlist || [], - peers: res.Peers || [], + wantlist: (res.Wantlist || []).map(k => new CID(k['/'])), + peers: (res.Peers || []).map(p => new CID(p)), blocksReceived: new Big(res.BlocksReceived), dataReceived: new Big(res.DataReceived), blocksSent: new Big(res.BlocksSent), diff --git a/src/bitswap/wantlist.js b/src/bitswap/wantlist.js index 6b087db1b..d917d6747 100644 --- a/src/bitswap/wantlist.js +++ b/src/bitswap/wantlist.js @@ -24,6 +24,6 @@ module.exports = configure(({ ky }) => { searchParams }).json() - return res + return (res.Keys || []).map(k => new CID(k['/'])) } }) diff --git a/src/block/index.js b/src/block/index.js index 8c683dd83..f25c73366 100644 --- a/src/block/index.js +++ b/src/block/index.js @@ -1,23 +1,8 @@ 'use strict' -const nodeify = require('promise-nodeify') -const callbackify = require('callbackify') -const { collectify } = require('../lib/converters') - -module.exports = config => { - const rm = require('./rm-async-iterator')(config) - - return { - get: callbackify.variadic(require('./get')(config)), - stat: callbackify.variadic(require('./stat')(config)), - put: callbackify.variadic(require('./put')(config)), - rm: (input, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - return nodeify(collectify(rm)(input, options), callback) - }, - _rmAsyncIterator: rm - } -} +module.exports = config => ({ + get: require('./get')(config), + stat: require('./stat')(config), + put: require('./put')(config), + rm: require('./rm')(config) +}) diff --git a/src/block/rm-async-iterator.js b/src/block/rm.js similarity index 76% rename from src/block/rm-async-iterator.js rename to src/block/rm.js index e44aedcd2..f8fc8c103 100644 --- a/src/block/rm-async-iterator.js +++ b/src/block/rm.js @@ -3,8 +3,7 @@ const CID = require('cids') const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('../lib/stream-to-iterable') -const toCamel = require('../lib/object-to-camel') +const toIterable = require('stream-to-it/source') module.exports = configure(({ ky }) => { return async function * rm (cid, options) { @@ -31,7 +30,19 @@ module.exports = configure(({ ky }) => { }) for await (const removed of ndjson(toIterable(res.body))) { - yield toCamel(removed) + yield toCoreInterface(removed) } } }) + +function toCoreInterface (removed) { + const out = { + cid: new CID(removed.Hash) + } + + if (removed.Error) { + out.error = new Error(removed.Error) + } + + return out +} diff --git a/src/block/stat.js b/src/block/stat.js index 9063d137c..24b4256fc 100644 --- a/src/block/stat.js +++ b/src/block/stat.js @@ -3,7 +3,6 @@ const CID = require('cids') const { Buffer } = require('buffer') const configure = require('../lib/configure') -const toCamel = require('../lib/object-to-camel') module.exports = configure(({ ky }) => { return async (cid, options) => { @@ -23,6 +22,6 @@ module.exports = configure(({ ky }) => { searchParams }).json() - return toCamel(res) + return { cid: new CID(res.Key), size: res.Size } } }) diff --git a/src/bootstrap/index.js b/src/bootstrap/index.js index 032265803..519a7161e 100644 --- a/src/bootstrap/index.js +++ b/src/bootstrap/index.js @@ -1,9 +1,7 @@ 'use strict' -const callbackify = require('callbackify') - module.exports = config => ({ - add: callbackify.variadic(require('./add')(config)), - rm: callbackify.variadic(require('./rm')(config)), - list: callbackify.variadic(require('./list')(config)) + add: require('./add')(config), + rm: require('./rm')(config), + list: require('./list')(config) }) diff --git a/src/cat.js b/src/cat.js index 32bccf59e..3d4971a1c 100644 --- a/src/cat.js +++ b/src/cat.js @@ -3,7 +3,7 @@ const CID = require('cids') const { Buffer } = require('buffer') const configure = require('./lib/configure') -const toIterable = require('./lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') module.exports = configure(({ ky }) => { return async function * cat (path, options) { diff --git a/src/config/index.js b/src/config/index.js index 36621fd39..cb56190a2 100644 --- a/src/config/index.js +++ b/src/config/index.js @@ -1,10 +1,8 @@ 'use strict' -const callbackify = require('callbackify') - module.exports = config => ({ - get: callbackify.variadic(require('./get')(config)), - set: callbackify.variadic(require('./set')(config)), - replace: callbackify.variadic(require('./replace')(config)), + get: require('./get')(config), + set: require('./set')(config), + replace: require('./replace')(config), profiles: require('./profiles')(config) }) diff --git a/src/config/profiles/index.js b/src/config/profiles/index.js index 57cd1ad7f..77b68de41 100644 --- a/src/config/profiles/index.js +++ b/src/config/profiles/index.js @@ -1,8 +1,6 @@ 'use strict' -const callbackify = require('callbackify') - module.exports = config => ({ - apply: callbackify.variadic(require('./apply')(config)), - list: callbackify.variadic(require('./list')(config)) + apply: require('./apply')(config), + list: require('./list')(config) }) diff --git a/src/dag/index.js b/src/dag/index.js index 06194e4a8..3cc4d410e 100644 --- a/src/dag/index.js +++ b/src/dag/index.js @@ -1,9 +1,7 @@ 'use strict' -const callbackify = require('callbackify') - module.exports = config => ({ - get: callbackify.variadic(require('./get')(config)), - put: callbackify.variadic(require('./put')(config)), - resolve: callbackify.variadic(require('./resolve')(config)) + get: require('./get')(config), + put: require('./put')(config), + resolve: require('./resolve')(config) }) diff --git a/src/dht/find-peer.js b/src/dht/find-peer.js index 71779952c..9d8f517e5 100644 --- a/src/dht/find-peer.js +++ b/src/dht/find-peer.js @@ -1,18 +1,18 @@ 'use strict' -const PeerId = require('peer-id') -const PeerInfo = require('peer-info') +const { Buffer } = require('buffer') +const CID = require('cids') const multiaddr = require('multiaddr') const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('../lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') module.exports = configure(({ ky }) => { - return async function * findPeer (peerId, options) { + return async function findPeer (peerId, options) { options = options || {} const searchParams = new URLSearchParams(options.searchParams) - searchParams.set('arg', `${peerId}`) + searchParams.set('arg', `${Buffer.isBuffer(peerId) ? new CID(peerId) : peerId}`) if (options.verbose != null) searchParams.set('verbose', options.verbose) const res = await ky.post('dht/findpeer', { @@ -23,15 +23,27 @@ module.exports = configure(({ ky }) => { }) for await (const message of ndjson(toIterable(res.body))) { + // 3 = QueryError + // https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18 + // https://github.com/ipfs/go-ipfs/blob/eb11f569b064b960d1aba4b5b8ca155a3bd2cb21/core/commands/dht.go#L388-L389 + if (message.Type === 3) { + throw new Error(message.Extra) + } + // 2 = FinalPeer // https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18 if (message.Type === 2 && message.Responses) { + // There will only be 1: + // https://github.com/ipfs/go-ipfs/blob/eb11f569b064b960d1aba4b5b8ca155a3bd2cb21/core/commands/dht.go#L395-L396 for (const { ID, Addrs } of message.Responses) { - const peerInfo = new PeerInfo(PeerId.createFromB58String(ID)) - if (Addrs) Addrs.forEach(a => peerInfo.multiaddrs.add(multiaddr(a))) - yield peerInfo + return { + id: new CID(ID), + addrs: (Addrs || []).map(a => multiaddr(a)) + } } } } + + throw new Error('not found') } }) diff --git a/src/dht/find-provs.js b/src/dht/find-provs.js index a0fc35722..7adbaf38c 100644 --- a/src/dht/find-provs.js +++ b/src/dht/find-provs.js @@ -1,18 +1,17 @@ 'use strict' -const PeerId = require('peer-id') -const PeerInfo = require('peer-info') +const CID = require('cids') const multiaddr = require('multiaddr') const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('../lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') module.exports = configure(({ ky }) => { return async function * findProvs (cid, options) { options = options || {} const searchParams = new URLSearchParams(options.searchParams) - searchParams.set('arg', `${cid}`) + searchParams.set('arg', `${new CID(cid)}`) if (options.numProviders) searchParams.set('num-providers', options.numProviders) if (options.verbose != null) searchParams.set('verbose', options.verbose) @@ -24,13 +23,21 @@ module.exports = configure(({ ky }) => { }) for await (const message of ndjson(toIterable(res.body))) { + // 3 = QueryError + // https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18 + // https://github.com/libp2p/go-libp2p-kad-dht/blob/master/routing.go#L525-L526 + if (message.Type === 3) { + throw new Error(message.Extra) + } + // 4 = Provider // https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L20 if (message.Type === 4 && message.Responses) { for (const { ID, Addrs } of message.Responses) { - const peerInfo = new PeerInfo(PeerId.createFromB58String(ID)) - if (Addrs) Addrs.forEach(a => peerInfo.multiaddrs.add(multiaddr(a))) - yield peerInfo + yield { + id: new CID(ID), + addrs: (Addrs || []).map(a => multiaddr(a)) + } } } } diff --git a/src/dht/get.js b/src/dht/get.js index d2cd0db22..4be7b80c2 100644 --- a/src/dht/get.js +++ b/src/dht/get.js @@ -1,30 +1,43 @@ 'use strict' +const { Buffer } = require('buffer') const ndjson = require('iterable-ndjson') +const toIterable = require('stream-to-it/source') +const encodeBufferURIComponent = require('../lib/encode-buffer-uri-component') const configure = require('../lib/configure') -const toIterable = require('../lib/stream-to-iterable') module.exports = configure(({ ky }) => { - return async function * get (key, options) { + return async function get (key, options) { options = options || {} const searchParams = new URLSearchParams(options.searchParams) - searchParams.set('arg', `${key}`) if (options.verbose != null) searchParams.set('verbose', options.verbose) - const res = await ky.post('dht/get', { + if (!Buffer.isBuffer(key)) { + throw new Error('invalid key') + } + + const res = await ky.post(`dht/get?key=${encodeBufferURIComponent(key)}&${searchParams}`, { timeout: options.timeout, signal: options.signal, - headers: options.headers, - searchParams + headers: options.headers }) for await (const message of ndjson(toIterable(res.body))) { + // 3 = QueryError + // https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18 + // https://github.com/ipfs/go-ipfs/blob/eb11f569b064b960d1aba4b5b8ca155a3bd2cb21/core/commands/dht.go#L472-L473 + if (message.Type === 3) { + throw new Error(message.Extra) + } + // 5 = Value // https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L21 if (message.Type === 5) { - yield message.Extra + return message.Extra } } + + throw new Error('not found') } }) diff --git a/src/dht/index.js b/src/dht/index.js index 5478876ff..754e57b83 100644 --- a/src/dht/index.js +++ b/src/dht/index.js @@ -1,30 +1,11 @@ 'use strict' -const callbackify = require('callbackify') -const errCode = require('err-code') -const { collectify } = require('../lib/converters') - -module.exports = config => { - const get = require('./get')(config) - const findPeer = require('./find-peer')(config) - - return { - get: callbackify.variadic(async (key, options) => { - for await (const value of get(key, options)) { - return value - } - throw errCode(new Error('value not found'), 'ERR_TYPE_5_NOT_FOUND') - }), - put: callbackify.variadic(collectify(require('./put')(config))), - findProvs: callbackify.variadic(collectify(require('./find-provs')(config))), - findPeer: callbackify.variadic(async (peerId, options) => { - for await (const peerInfo of findPeer(peerId, options)) { - return peerInfo - } - throw errCode(new Error('final peer not found'), 'ERR_TYPE_2_NOT_FOUND') - }), - provide: callbackify.variadic(collectify(require('./provide')(config))), - // find closest peerId to given peerId - query: callbackify.variadic(collectify(require('./query')(config))) - } -} +module.exports = config => ({ + get: require('./get')(config), + put: require('./put')(config), + findProvs: require('./find-provs')(config), + findPeer: require('./find-peer')(config), + provide: require('./provide')(config), + // find closest peerId to given peerId + query: require('./query')(config) +}) diff --git a/src/dht/provide.js b/src/dht/provide.js index cb72f9c6d..f9013bfed 100644 --- a/src/dht/provide.js +++ b/src/dht/provide.js @@ -1,11 +1,10 @@ 'use strict' -const PeerId = require('peer-id') -const PeerInfo = require('peer-info') +const CID = require('cids') const multiaddr = require('multiaddr') const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('../lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') const toCamel = require('../lib/object-to-camel') module.exports = configure(({ ky }) => { @@ -14,7 +13,7 @@ module.exports = configure(({ ky }) => { options = options || {} const searchParams = new URLSearchParams(options.searchParams) - cids.forEach(cid => searchParams.append('arg', `${cid}`)) + cids.forEach(cid => searchParams.append('arg', `${new CID(cid)}`)) if (options.recursive != null) searchParams.set('recursive', options.recursive) if (options.verbose != null) searchParams.set('verbose', options.verbose) @@ -26,13 +25,22 @@ module.exports = configure(({ ky }) => { }) for await (let message of ndjson(toIterable(res.body))) { + // 3 = QueryError + // https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18 + // https://github.com/ipfs/go-ipfs/blob/eb11f569b064b960d1aba4b5b8ca155a3bd2cb21/core/commands/dht.go#L283-L284 + if (message.Type === 3) { + throw new Error(message.Extra) + } + message = toCamel(message) + message.id = new CID(message.id) if (message.responses) { - message.responses = message.responses.map(({ ID, Addrs }) => { - const peerInfo = new PeerInfo(PeerId.createFromB58String(ID)) - if (Addrs) Addrs.forEach(a => peerInfo.multiaddrs.add(multiaddr(a))) - return peerInfo - }) + message.responses = message.responses.map(({ ID, Addrs }) => ({ + id: new CID(ID), + addrs: (Addrs || []).map(a => multiaddr(a)) + })) + } else { + message.responses = [] } yield message } diff --git a/src/dht/put.js b/src/dht/put.js index 49a6947aa..6d0ecf6fc 100644 --- a/src/dht/put.js +++ b/src/dht/put.js @@ -1,11 +1,11 @@ 'use strict' -const PeerId = require('peer-id') -const PeerInfo = require('peer-info') +const { Buffer } = require('buffer') +const CID = require('cids') const multiaddr = require('multiaddr') const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('../lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') const encodeBufferURIComponent = require('../lib/encode-buffer-uri-component') const toCamel = require('../lib/object-to-camel') @@ -27,13 +27,20 @@ module.exports = configure(({ ky }) => { }) for await (let message of ndjson(toIterable(res.body))) { + // 3 = QueryError + // https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18 + // https://github.com/ipfs/go-ipfs/blob/eb11f569b064b960d1aba4b5b8ca155a3bd2cb21/core/commands/dht.go#L472-L473 + if (message.Type === 3) { + throw new Error(message.Extra) + } + message = toCamel(message) + message.id = new CID(message.id) if (message.responses) { - message.responses = message.responses.map(({ ID, Addrs }) => { - const peerInfo = new PeerInfo(PeerId.createFromB58String(ID)) - if (Addrs) Addrs.forEach(a => peerInfo.multiaddrs.add(multiaddr(a))) - return peerInfo - }) + message.responses = message.responses.map(({ ID, Addrs }) => ({ + id: new CID(ID), + addrs: (Addrs || []).map(a => multiaddr(a)) + })) } yield message } diff --git a/src/dht/query.js b/src/dht/query.js index 5aefaf90d..1628c0cc8 100644 --- a/src/dht/query.js +++ b/src/dht/query.js @@ -1,17 +1,18 @@ 'use strict' -const PeerId = require('peer-id') -const PeerInfo = require('peer-info') +const CID = require('cids') const ndjson = require('iterable-ndjson') +const multiaddr = require('multiaddr') +const toIterable = require('stream-to-it/source') const configure = require('../lib/configure') -const toIterable = require('../lib/stream-to-iterable') +const toCamel = require('../lib/object-to-camel') module.exports = configure(({ ky }) => { return async function * query (peerId, options) { options = options || {} const searchParams = new URLSearchParams(options.searchParams) - searchParams.set('arg', `${peerId}`) + searchParams.set('arg', `${Buffer.isBuffer(peerId) ? new CID(peerId) : peerId}`) if (options.verbose != null) searchParams.set('verbose', options.verbose) const res = await ky.post('dht/query', { @@ -21,8 +22,14 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (const message of ndjson(toIterable(res.body))) { - yield new PeerInfo(PeerId.createFromB58String(message.ID)) + for await (let message of ndjson(toIterable(res.body))) { + message = toCamel(message) + message.id = new CID(message.id) + message.responses = (message.responses || []).map(({ ID, Addrs }) => ({ + id: new CID(ID), + addrs: (Addrs || []).map(a => multiaddr(a)) + })) + yield message } } }) diff --git a/src/diag/index.js b/src/diag/index.js index 7e48d576d..d3ac85d55 100644 --- a/src/diag/index.js +++ b/src/diag/index.js @@ -1,9 +1,7 @@ 'use strict' -const callbackify = require('callbackify') - module.exports = config => ({ - net: callbackify.variadic(require('./net')(config)), - sys: callbackify.variadic(require('./sys')(config)), - cmds: callbackify.variadic(require('./cmds')(config)) + net: require('./net')(config), + sys: require('./sys')(config), + cmds: require('./cmds')(config) }) diff --git a/src/files/index.js b/src/files/index.js index ee6b7d8d8..5dbf395a2 100644 --- a/src/files/index.js +++ b/src/files/index.js @@ -1,27 +1,15 @@ 'use strict' -const callbackify = require('callbackify') -const { collectify, streamify, pullify, concatify } = require('../lib/converters') - -module.exports = config => { - const ls = require('./ls')(config) - const read = require('./read')(config) - - return { - chmod: callbackify.variadic(require('./chmod')(config)), - cp: callbackify.variadic(require('./cp')(config)), - mkdir: callbackify.variadic(require('./mkdir')(config)), - flush: callbackify.variadic(require('./flush')(config)), - stat: callbackify.variadic(require('./stat')(config)), - rm: callbackify.variadic(require('./rm')(config)), - ls: callbackify.variadic(collectify(ls)), - lsReadableStream: streamify.readable(ls), - lsPullStream: pullify.source(ls), - read: callbackify.variadic(concatify(read)), - readReadableStream: streamify.readable(read), - readPullStream: pullify.source(read), - touch: callbackify.variadic(require('./touch')(config)), - write: callbackify.variadic(require('./write')(config)), - mv: callbackify.variadic(require('./mv')(config)) - } -} +module.exports = config => ({ + chmod: require('./chmod')(config), + cp: require('./cp')(config), + mkdir: require('./mkdir')(config), + flush: require('./flush')(config), + stat: require('./stat')(config), + rm: require('./rm')(config), + ls: require('./ls')(config), + read: require('./read')(config), + touch: require('./touch')(config), + write: require('./write')(config), + mv: require('./mv')(config) +}) diff --git a/src/files/ls.js b/src/files/ls.js index 51ee33912..f07c65c09 100644 --- a/src/files/ls.js +++ b/src/files/ls.js @@ -2,7 +2,7 @@ const CID = require('cids') const ndjson = require('iterable-ndjson') -const toIterable = require('../lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') const configure = require('../lib/configure') const toCamelWithMetadata = require('../lib/object-to-camel-with-metadata') @@ -17,9 +17,11 @@ module.exports = configure(({ ky }) => { const searchParams = new URLSearchParams(options.searchParams) searchParams.set('arg', CID.isCID(path) ? `/ipfs/${path}` : path) - searchParams.set('stream', true) + searchParams.set('stream', options.stream == null ? true : options.stream) if (options.cidBase) searchParams.set('cid-base', options.cidBase) - if (options.long != null) searchParams.set('long', options.long) + searchParams.set('long', options.long == null ? true : options.long) + // TODO: remove after go-ipfs 0.5 is released + searchParams.set('l', options.long == null ? true : options.long) const res = await ky.post('files/ls', { timeout: options.timeout, @@ -32,12 +34,17 @@ module.exports = configure(({ ky }) => { // go-ipfs does not yet support the "stream" option if ('Entries' in result) { for (const entry of result.Entries || []) { - yield toCamelWithMetadata(entry) + yield toCoreInterface(toCamelWithMetadata(entry)) } - return + } else { + yield toCoreInterface(toCamelWithMetadata(result)) } - - yield toCamelWithMetadata(result) } } }) + +function toCoreInterface (entry) { + if (entry.hash) entry.cid = new CID(entry.hash) + delete entry.hash + return entry +} diff --git a/src/files/read.js b/src/files/read.js index 5a6a14acb..1800609d0 100644 --- a/src/files/read.js +++ b/src/files/read.js @@ -2,7 +2,7 @@ const { Buffer } = require('buffer') const configure = require('../lib/configure') -const toIterable = require('../lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') module.exports = configure(({ ky }) => { return async function * read (path, options) { diff --git a/src/files/stat.js b/src/files/stat.js index 1b4af061b..0d8c9caa1 100644 --- a/src/files/stat.js +++ b/src/files/stat.js @@ -1,5 +1,6 @@ 'use strict' +const CID = require('cids') const configure = require('../lib/configure') const toCamelWithMetadata = require('../lib/object-to-camel-with-metadata') @@ -27,7 +28,12 @@ module.exports = configure(({ ky }) => { }).json() res.WithLocality = res.WithLocality || false - - return toCamelWithMetadata(res) + return toCoreInterface(toCamelWithMetadata(res)) } }) + +function toCoreInterface (entry) { + entry.cid = new CID(entry.hash) + delete entry.hash + return entry +} diff --git a/src/get.js b/src/get.js index 1a5ebfd93..635f8b34f 100644 --- a/src/get.js +++ b/src/get.js @@ -4,7 +4,7 @@ const configure = require('./lib/configure') const Tar = require('it-tar') const { Buffer } = require('buffer') const CID = require('cids') -const toIterable = require('./lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') module.exports = configure(({ ky }) => { return async function * get (path, options) { diff --git a/src/index.js b/src/index.js index 939f5986f..320a5a9a8 100644 --- a/src/index.js +++ b/src/index.js @@ -1,159 +1,51 @@ 'use strict' -const isIPFS = require('is-ipfs') const { Buffer } = require('buffer') const CID = require('cids') const multiaddr = require('multiaddr') const multibase = require('multibase') const multicodec = require('multicodec') const multihash = require('multihashes') -const PeerId = require('peer-id') -const PeerInfo = require('peer-info') -const nodeify = require('promise-nodeify') -const callbackify = require('callbackify') -const all = require('it-all') -const toPullStream = require('async-iterator-to-pull-stream') -const toStream = require('it-to-stream') -const BufferList = require('bl/BufferList') -const { concatify, collectify, pullify, streamify } = require('./lib/converters') +const globSource = require('ipfs-utils/src/files/glob-source') +const urlSource = require('ipfs-utils/src/files/url-source') function ipfsClient (config) { - const add = require('./add')(config) - const addFromFs = require('./add-from-fs')(config) - const addFromURL = require('./add-from-url')(config) - const cat = require('./cat')(config) - const get = require('./get')(config) - const ls = require('./ls')(config) - const ping = require('./ping')(config) - const refs = require('./refs')(config) - - const api = { - add: (input, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - return nodeify(collectify(add)(input, options), callback) - }, - addReadableStream: streamify.transform(add), - addPullStream: pullify.transform(add), - addFromFs: (path, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - return nodeify(collectify(addFromFs)(path, options), callback) - }, - addFromURL: (url, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - return nodeify(collectify(addFromURL)(url, options), callback) - }, - addFromStream: (input, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - return nodeify(collectify(add)(input, options), callback) - }, - _addAsyncIterator: add, + return { + add: require('./add')(config), bitswap: require('./bitswap')(config), block: require('./block')(config), bootstrap: require('./bootstrap')(config), - cat: callbackify.variadic((path, options) => concatify(cat)(path, options)), - catReadableStream: streamify.readable(cat), - catPullStream: pullify.source(cat), - _catAsyncIterator: cat, - commands: callbackify.variadic(require('./commands')(config)), + cat: require('./cat')(config), + commands: require('./commands')(config), config: require('./config')(config), dag: require('./dag')(config), dht: require('./dht')(config), diag: require('./diag')(config), - dns: callbackify.variadic(require('./dns')(config)), + dns: require('./dns')(config), files: require('./files')(config), - get: callbackify.variadic(async (path, options) => { - const output = [] - - for await (const entry of get(path, options)) { - if (entry.content) { - entry.content = new BufferList(await all(entry.content)).slice() - } - - output.push(entry) - } - - return output - }), + get: require('./get')(config), getEndpointConfig: require('./get-endpoint-config')(config), - getReadableStream: streamify.readable(async function * (path, options) { - for await (const file of get(path, options)) { - if (file.content) { - const { content } = file - file.content = toStream((async function * () { - for await (const chunk of content) { - yield chunk.slice() // Convert bl to Buffer - } - })()) - } - - yield file - } - }), - getPullStream: pullify.source(async function * (path, options) { - for await (const file of get(path, options)) { - if (file.content) { - const { content } = file - file.content = toPullStream((async function * () { - for await (const chunk of content) { - yield chunk.slice() // Convert bl to Buffer - } - })()) - } - - yield file - } - }), - _getAsyncIterator: get, - id: callbackify.variadic(require('./id')(config)), + id: require('./id')(config), key: require('./key')(config), log: require('./log')(config), - ls: callbackify.variadic((path, options) => collectify(ls)(path, options)), - lsReadableStream: streamify.readable(ls), - lsPullStream: pullify.source(ls), - _lsAsyncIterator: ls, - mount: callbackify.variadic(require('./mount')(config)), + ls: require('./ls')(config), + mount: require('./mount')(config), name: require('./name')(config), object: require('./object')(config), pin: require('./pin')(config), - ping: callbackify.variadic(collectify(ping)), - pingReadableStream: streamify.readable(ping), - pingPullStream: pullify.source(ping), + ping: require('./ping')(config), pubsub: require('./pubsub')(config), - refs: callbackify.variadic((path, options) => collectify(refs)(path, options)), - refsReadableStream: streamify.readable(refs), - refsPullStream: pullify.source(refs), - _refsAsyncIterator: refs, + refs: require('./refs')(config), repo: require('./repo')(config), - resolve: callbackify.variadic(require('./resolve')(config)), + resolve: require('./resolve')(config), stats: require('./stats')(config), - stop: callbackify.variadic(require('./stop')(config)), - shutdown: callbackify.variadic(require('./stop')(config)), + stop: require('./stop')(config), + shutdown: require('./stop')(config), swarm: require('./swarm')(config), - version: callbackify.variadic(require('./version')(config)) + version: require('./version')(config) } - - Object.assign(api.refs, { - local: callbackify.variadic(options => collectify(refs.local)(options)), - localReadableStream: streamify.readable(refs.local), - localPullStream: pullify.source(refs.local), - _localAsyncIterator: refs.local - }) - - return api } -Object.assign(ipfsClient, { isIPFS, Buffer, CID, multiaddr, multibase, multicodec, multihash, PeerId, PeerInfo }) +Object.assign(ipfsClient, { Buffer, CID, multiaddr, multibase, multicodec, multihash, globSource, urlSource }) module.exports = ipfsClient diff --git a/src/key/index.js b/src/key/index.js index 7293236f1..de12c732c 100644 --- a/src/key/index.js +++ b/src/key/index.js @@ -1,12 +1,10 @@ 'use strict' -const callbackify = require('callbackify') - module.exports = config => ({ - gen: callbackify.variadic(require('./gen')(config)), - list: callbackify.variadic(require('./list')(config)), - rename: callbackify.variadic(require('./rename')(config)), - rm: callbackify.variadic(require('./rm')(config)), - export: callbackify.variadic(require('./export')(config)), - import: callbackify.variadic(require('./import')(config)) + gen: require('./gen')(config), + list: require('./list')(config), + rename: require('./rename')(config), + rm: require('./rm')(config), + export: require('./export')(config), + import: require('./import')(config) }) diff --git a/src/lib/converters.js b/src/lib/converters.js deleted file mode 100644 index 4f42b3175..000000000 --- a/src/lib/converters.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -const toPull = require('async-iterator-to-pull-stream') -const all = require('it-all') -const toStream = require('it-to-stream') -const { Buffer } = require('buffer') - -exports.collectify = fn => (...args) => all(fn(...args)) - -exports.concatify = fn => async (...args) => Buffer.concat(await all(fn(...args))) - -exports.pullify = { - source: fn => (...args) => toPull(fn(...args)), - transform: fn => (...args) => toPull.transform(source => fn(source, ...args)) -} - -exports.streamify = { - readable: fn => (...args) => toStream(fn(...args), { objectMode: true }), - transform: fn => (...args) => toStream.transform(source => fn(source, ...args), { objectMode: true }) -} diff --git a/src/lib/stream-to-iterable.js b/src/lib/stream-to-iterable.js deleted file mode 100644 index 5e06a99c6..000000000 --- a/src/lib/stream-to-iterable.js +++ /dev/null @@ -1,25 +0,0 @@ -'use strict' - -module.exports = function toIterable (body) { - // Node.js stream - if (body[Symbol.asyncIterator]) return body - - // Browser ReadableStream - if (body.getReader) { - return (async function * () { - const reader = body.getReader() - - try { - while (true) { - const { done, value } = await reader.read() - if (done) return - yield value - } - } finally { - reader.releaseLock() - } - })() - } - - throw new Error('unknown stream') -} diff --git a/src/log/index.js b/src/log/index.js index f7d94f910..3eea39fd5 100644 --- a/src/log/index.js +++ b/src/log/index.js @@ -1,9 +1,7 @@ 'use strict' -const callbackify = require('callbackify') - module.exports = config => ({ tail: require('./tail')(config), - ls: callbackify.variadic(require('./ls')(config)), - level: callbackify.variadic(require('./level')(config)) + ls: require('./ls')(config), + level: require('./level')(config) }) diff --git a/src/log/tail.js b/src/log/tail.js index 00708e9b9..74b72b2c2 100644 --- a/src/log/tail.js +++ b/src/log/tail.js @@ -2,7 +2,7 @@ const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('../lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') module.exports = configure(({ ky }) => { return async function * tail (options) { diff --git a/src/ls.js b/src/ls.js index 43e92a54a..ec7e37dfb 100644 --- a/src/ls.js +++ b/src/ls.js @@ -2,6 +2,8 @@ const { Buffer } = require('buffer') const CID = require('cids') +const ndjson = require('iterable-ndjson') +const toIterable = require('stream-to-it/source') const configure = require('./lib/configure') module.exports = configure(({ ky }) => { @@ -10,18 +12,11 @@ module.exports = configure(({ ky }) => { const searchParams = new URLSearchParams() searchParams.set('arg', `${Buffer.isBuffer(path) ? new CID(path) : path}`) + searchParams.set('stream', options.stream == null ? true : options.stream) - if (options.long !== undefined) { - searchParams.set('long', options.long) - } - - if (options.unsorted !== undefined) { - searchParams.set('unsorted', options.unsorted) - } - - if (options.recursive !== undefined) { - searchParams.set('recursive', options.recursive) - } + if (options.long != null) searchParams.set('long', options.long) + if (options.unsorted != null) searchParams.set('unsorted', options.unsorted) + if (options.recursive != null) searchParams.set('recursive', options.recursive) const res = await ky.post('ls', { timeout: options.timeout, @@ -30,48 +25,49 @@ module.exports = configure(({ ky }) => { searchParams }) - let result = await res.json() - - result = result.Objects - if (!result) { - throw new Error('expected .Objects in results') - } - - result = result[0] - if (!result) { - throw new Error('expected one array in results.Objects') - } + for await (let result of ndjson(toIterable(res.body))) { + result = result.Objects - result = result.Links - if (!Array.isArray(result)) { - throw new Error('expected one array in results.Objects[0].Links') - } + if (!result) { + throw new Error('expected .Objects in results') + } - for (const link of result) { - const entry = { - name: link.Name, - path: path + '/' + link.Name, - size: link.Size, - hash: link.Hash, - type: typeOf(link), - depth: link.Depth || 1 + result = result[0] + if (!result) { + throw new Error('expected one array in results.Objects') } - if (link.Mode) { - entry.mode = parseInt(link.Mode, 8) + result = result.Links + if (!Array.isArray(result)) { + throw new Error('expected one array in results.Objects[0].Links') } - if (link.Mtime !== undefined && link.Mtime !== null) { - entry.mtime = { - secs: link.Mtime + for (const link of result) { + const entry = { + name: link.Name, + path: path + '/' + link.Name, + size: link.Size, + cid: new CID(link.Hash), + type: typeOf(link), + depth: link.Depth || 1 } - if (link.MtimeNsecs !== undefined && link.MtimeNsecs !== null) { - entry.mtime.nsecs = link.MtimeNsecs + if (link.Mode) { + entry.mode = parseInt(link.Mode, 8) } - } - yield entry + if (link.Mtime !== undefined && link.Mtime !== null) { + entry.mtime = { + secs: link.Mtime + } + + if (link.MtimeNsecs !== undefined && link.MtimeNsecs !== null) { + entry.mtime.nsecs = link.MtimeNsecs + } + } + + yield entry + } } } }) diff --git a/src/name/index.js b/src/name/index.js index 7a6837b82..65d1acca2 100644 --- a/src/name/index.js +++ b/src/name/index.js @@ -1,9 +1,7 @@ 'use strict' -const callbackify = require('callbackify') - module.exports = config => ({ - publish: callbackify.variadic(require('./publish')(config)), - resolve: callbackify.variadic(require('./resolve')(config)), + publish: require('./publish')(config), + resolve: require('./resolve')(config), pubsub: require('./pubsub')(config) }) diff --git a/src/name/pubsub/index.js b/src/name/pubsub/index.js index 70db19874..14ffd8852 100644 --- a/src/name/pubsub/index.js +++ b/src/name/pubsub/index.js @@ -1,9 +1,7 @@ 'use strict' -const callbackify = require('callbackify') - module.exports = config => ({ - cancel: callbackify.variadic(require('./cancel')(config)), - state: callbackify.variadic(require('./state')(config)), - subs: callbackify.variadic(require('./subs')(config)) + cancel: require('./cancel')(config), + state: require('./state')(config), + subs: require('./subs')(config) }) diff --git a/src/name/resolve.js b/src/name/resolve.js index b6e8db47e..e7eb20b4f 100644 --- a/src/name/resolve.js +++ b/src/name/resolve.js @@ -1,13 +1,16 @@ 'use strict' +const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') +const toIterable = require('stream-to-it/source') module.exports = configure(({ ky }) => { - return async (path, options) => { + return async function * (path, options) { options = options || {} const searchParams = new URLSearchParams(options.searchParams) searchParams.set('arg', path) + searchParams.set('stream', options.stream == null ? true : options.stream) if (options.dhtRecordCount != null) searchParams.set('dht-record-count', options.dhtRecordCount) if (options.dhtTimeout != null) searchParams.set('dht-timeout', options.dhtTimeout) if (options.noCache != null) searchParams.set('nocache', options.noCache) @@ -18,8 +21,10 @@ module.exports = configure(({ ky }) => { signal: options.signal, headers: options.headers, searchParams - }).json() + }) - return res.Path + for await (const result of ndjson(toIterable(res.body))) { + yield result.Path + } } }) diff --git a/src/object/data.js b/src/object/data.js index 48291f722..5f40e0450 100644 --- a/src/object/data.js +++ b/src/object/data.js @@ -3,24 +3,21 @@ const { Buffer } = require('buffer') const CID = require('cids') const configure = require('../lib/configure') -const toIterable = require('../lib/stream-to-iterable') module.exports = configure(({ ky }) => { - return async function * data (cid, options) { + return async function data (cid, options) { options = options || {} const searchParams = new URLSearchParams(options.searchParams) searchParams.set('arg', `${Buffer.isBuffer(cid) ? new CID(cid) : cid}`) - const res = await ky.post('object/data', { + const data = await ky.post('object/data', { timeout: options.timeout, signal: options.signal, headers: options.headers, searchParams - }) + }).arrayBuffer() - for await (const chunk of toIterable(res.body)) { - yield Buffer.from(chunk) - } + return Buffer.from(data) } }) diff --git a/src/object/index.js b/src/object/index.js index dac85598c..ee7c85dd6 100644 --- a/src/object/index.js +++ b/src/object/index.js @@ -1,14 +1,11 @@ 'use strict' -const callbackify = require('callbackify') -const { concatify } = require('../lib/converters') - module.exports = config => ({ - data: callbackify.variadic(concatify(require('./data')(config))), - get: callbackify.variadic(require('./get')(config)), - links: callbackify.variadic(require('./links')(config)), - new: callbackify.variadic(require('./new')(config)), + data: require('./data')(config), + get: require('./get')(config), + links: require('./links')(config), + new: require('./new')(config), patch: require('./patch')(config), - put: callbackify.variadic(require('./put')(config)), - stat: callbackify.variadic(require('./stat')(config)) + put: require('./put')(config), + stat: require('./stat')(config) }) diff --git a/src/object/patch/index.js b/src/object/patch/index.js index 5711200ec..f886e288f 100644 --- a/src/object/patch/index.js +++ b/src/object/patch/index.js @@ -1,10 +1,8 @@ 'use strict' -const callbackify = require('callbackify') - module.exports = config => ({ - addLink: callbackify.variadic(require('./add-link')(config)), - appendData: callbackify.variadic(require('./append-data')(config)), - rmLink: callbackify.variadic(require('./rm-link')(config)), - setData: callbackify.variadic(require('./set-data')(config)) + addLink: require('./add-link')(config), + appendData: require('./append-data')(config), + rmLink: require('./rm-link')(config), + setData: require('./set-data')(config) }) diff --git a/src/pin/add.js b/src/pin/add.js index 74eb47a2d..41b119fe8 100644 --- a/src/pin/add.js +++ b/src/pin/add.js @@ -1,13 +1,15 @@ 'use strict' +const CID = require('cids') const configure = require('../lib/configure') module.exports = configure(({ ky }) => { - return async (path, options) => { + return async (paths, options) => { + paths = Array.isArray(paths) ? paths : [paths] options = options || {} const searchParams = new URLSearchParams(options.searchParams) - searchParams.set('arg', `${path}`) + paths.forEach(path => searchParams.append('arg', `${path}`)) if (options.recursive != null) searchParams.set('recursive', options.recursive) const res = await ky.post('pin/add', { @@ -17,6 +19,6 @@ module.exports = configure(({ ky }) => { searchParams }).json() - return (res.Pins || []).map(hash => ({ hash })) + return (res.Pins || []).map(cid => ({ cid: new CID(cid) })) } }) diff --git a/src/pin/index.js b/src/pin/index.js index c62d3a46a..ad43057ec 100644 --- a/src/pin/index.js +++ b/src/pin/index.js @@ -1,9 +1,7 @@ 'use strict' -const callbackify = require('callbackify') - module.exports = config => ({ - add: callbackify.variadic(require('./add')(config)), - rm: callbackify.variadic(require('./rm')(config)), - ls: callbackify.variadic(require('./ls')(config)) + add: require('./add')(config), + rm: require('./rm')(config), + ls: require('./ls')(config) }) diff --git a/src/pin/ls.js b/src/pin/ls.js index 9c42fd71d..f9e0968ac 100644 --- a/src/pin/ls.js +++ b/src/pin/ls.js @@ -1,9 +1,12 @@ 'use strict' +const ndjson = require('iterable-ndjson') +const CID = require('cids') const configure = require('../lib/configure') +const toIterable = require('stream-to-it/source') module.exports = configure(({ ky }) => { - return async (path, options) => { + return async function * ls (path, options) { if (path && path.type) { options = path path = null @@ -14,16 +17,25 @@ module.exports = configure(({ ky }) => { options = options || {} const searchParams = new URLSearchParams(options.searchParams) + searchParams.set('stream', options.stream == null ? true : options.stream) path.forEach(p => searchParams.append('arg', `${p}`)) if (options.type) searchParams.set('type', options.type) - const { Keys } = await ky.post('pin/ls', { + const res = await ky.post('pin/ls', { timeout: options.timeout, signal: options.signal, headers: options.headers, searchParams - }).json() + }) - return Object.keys(Keys).map(hash => ({ hash, type: Keys[hash].Type })) + for await (const pin of ndjson(toIterable(res.body))) { + if (pin.Keys) { // non-streaming response + for (const cid of Object.keys(pin.Keys)) { + yield { cid: new CID(cid), type: pin.Keys[cid].Type } + } + return + } + yield { cid: new CID(pin.Cid), type: pin.Type } + } } }) diff --git a/src/pin/rm.js b/src/pin/rm.js index 9f75307ff..83fbca93c 100644 --- a/src/pin/rm.js +++ b/src/pin/rm.js @@ -1,5 +1,6 @@ 'use strict' +const CID = require('cids') const configure = require('../lib/configure') module.exports = configure(({ ky }) => { @@ -17,6 +18,6 @@ module.exports = configure(({ ky }) => { searchParams }).json() - return (res.Pins || []).map(hash => ({ hash })) + return (res.Pins || []).map(cid => ({ cid: new CID(cid) })) } }) diff --git a/src/ping.js b/src/ping.js index 33b275617..332120934 100644 --- a/src/ping.js +++ b/src/ping.js @@ -2,7 +2,7 @@ const ndjson = require('iterable-ndjson') const configure = require('./lib/configure') -const toIterable = require('./lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') const toCamel = require('./lib/object-to-camel') module.exports = configure(({ ky }) => { diff --git a/src/pubsub/index.js b/src/pubsub/index.js index 2738bd5ac..e369bb6f3 100644 --- a/src/pubsub/index.js +++ b/src/pubsub/index.js @@ -1,50 +1,9 @@ 'use strict' -const nodeify = require('promise-nodeify') - -// This file is temporary and for compatibility with legacy usage -module.exports = (send, options) => { - if (typeof send !== 'function') { - options = send - } - - const ls = require('./ls')(options) - const peers = require('./peers')(options) - const publish = require('./publish')(options) - const subscribe = require('./subscribe')(options) - const unsubscribe = require('./unsubscribe')(options) - - return { - ls: (options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - return nodeify(ls(options), callback) - }, - peers: (topic, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - return nodeify(peers(topic, options), callback) - }, - publish: (topic, data, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - return nodeify(publish(topic, data, options), callback) - }, - subscribe: (topic, handler, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - return nodeify(subscribe(topic, handler, options), callback) - }, - unsubscribe: (topic, handler, callback) => { - return nodeify(unsubscribe(topic, handler), callback) - } - } -} +module.exports = config => ({ + ls: require('./ls')(config), + peers: require('./peers')(config), + publish: require('./publish')(config), + subscribe: require('./subscribe')(config), + unsubscribe: require('./unsubscribe')(config) +}) diff --git a/src/pubsub/subscribe.js b/src/pubsub/subscribe.js index 7950a274a..188a91664 100644 --- a/src/pubsub/subscribe.js +++ b/src/pubsub/subscribe.js @@ -1,12 +1,11 @@ 'use strict' const ndjson = require('iterable-ndjson') -const explain = require('explain-error') const bs58 = require('bs58') const { Buffer } = require('buffer') const log = require('debug')('ipfs-http-client:pubsub:subscribe') const configure = require('../lib/configure') -const toIterable = require('../lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') const SubscriptionTracker = require('./subscription-tracker') module.exports = configure((config) => { @@ -71,7 +70,8 @@ async function readMessages (msgStream, { onMessage, onEnd, onError }) { topicIDs: msg.topicIDs }) } catch (err) { - onError(explain(err, 'Failed to parse pubsub message'), false, msg) // Not fatal + err.message = `Failed to parse pubsub message: ${err.message}` + onError(err, false, msg) // Not fatal } } } catch (err) { diff --git a/src/refs/index.js b/src/refs/index.js index b15f2cd0c..05a636feb 100644 --- a/src/refs/index.js +++ b/src/refs/index.js @@ -4,7 +4,7 @@ const configure = require('../lib/configure') const { Buffer } = require('buffer') const CID = require('cids') const ndjson = require('iterable-ndjson') -const toIterable = require('../lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') const toCamel = require('../lib/object-to-camel') module.exports = config => { diff --git a/src/refs/local.js b/src/refs/local.js index afa1630ea..98e0fce40 100644 --- a/src/refs/local.js +++ b/src/refs/local.js @@ -2,7 +2,7 @@ const configure = require('../lib/configure') const ndjson = require('iterable-ndjson') -const toIterable = require('../lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') const toCamel = require('../lib/object-to-camel') module.exports = configure(({ ky }) => { diff --git a/src/repo/gc.js b/src/repo/gc.js index 3d92dee4c..fc60a46bc 100644 --- a/src/repo/gc.js +++ b/src/repo/gc.js @@ -3,7 +3,7 @@ const CID = require('cids') const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('../lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') module.exports = configure(({ ky }) => { return async function * gc (peerId, options) { diff --git a/src/repo/index.js b/src/repo/index.js index fe58504ad..4785f55db 100644 --- a/src/repo/index.js +++ b/src/repo/index.js @@ -1,10 +1,7 @@ 'use strict' -const callbackify = require('callbackify') -const { collectify } = require('../lib/converters') - module.exports = config => ({ - gc: callbackify.variadic(collectify(require('./gc')(config))), - stat: callbackify.variadic(require('./stat')(config)), - version: callbackify.variadic(require('./version')(config)) + gc: require('./gc')(config), + stat: require('./stat')(config), + version: require('./version')(config) }) diff --git a/src/stats/bw.js b/src/stats/bw.js index f68ad23ba..12bc6d44a 100644 --- a/src/stats/bw.js +++ b/src/stats/bw.js @@ -3,7 +3,7 @@ const ndjson = require('iterable-ndjson') const Big = require('bignumber.js') const configure = require('../lib/configure') -const toIterable = require('../lib/stream-to-iterable') +const toIterable = require('stream-to-it/source') module.exports = configure(({ ky }) => { return async function * bw (options) { diff --git a/src/stats/index.js b/src/stats/index.js index 4351d79e2..d13a534a7 100644 --- a/src/stats/index.js +++ b/src/stats/index.js @@ -1,19 +1,7 @@ 'use strict' -const callbackify = require('callbackify') -const { streamify, pullify } = require('../lib/converters') - -module.exports = config => { - const bw = require('./bw')(config) - return { - bitswap: callbackify.variadic(require('../bitswap/stat')(config)), - bw: callbackify.variadic(async options => { - for await (const stats of bw(options)) { - return stats - } - }), - bwReadableStream: streamify.readable(bw), - bwPullStream: pullify.source(bw), - repo: callbackify.variadic(require('../repo/stat')(config)) - } -} +module.exports = config => ({ + bitswap: require('../bitswap/stat')(config), + bw: require('./bw')(config), + repo: require('../repo/stat')(config) +}) diff --git a/src/swarm/addrs.js b/src/swarm/addrs.js index a786d32cb..25bd98d02 100644 --- a/src/swarm/addrs.js +++ b/src/swarm/addrs.js @@ -1,7 +1,6 @@ 'use strict' -const PeerInfo = require('peer-info') -const PeerId = require('peer-id') +const CID = require('cids') const multiaddr = require('multiaddr') const configure = require('../lib/configure') @@ -16,10 +15,9 @@ module.exports = configure(({ ky }) => { searchParams: options.searchParams }).json() - return Object.keys(res.Addrs).map(id => { - const peerInfo = new PeerInfo(PeerId.createFromB58String(id)) - res.Addrs[id].forEach(addr => peerInfo.multiaddrs.add(multiaddr(addr))) - return peerInfo - }) + return Object.keys(res.Addrs).map(id => ({ + id: new CID(id), + addrs: (res.Addrs[id] || []).map(a => multiaddr(a)) + })) } }) diff --git a/src/swarm/disconnect.js b/src/swarm/disconnect.js index a3d60d172..e83ca0dd4 100644 --- a/src/swarm/disconnect.js +++ b/src/swarm/disconnect.js @@ -8,7 +8,7 @@ module.exports = configure(({ ky }) => { options = options || {} const searchParams = new URLSearchParams(options.searchParams) - addrs.forEach(addr => searchParams.append('arg', addr)) + addrs.forEach(addr => searchParams.append('arg', `${addr}`)) const res = await ky.post('swarm/disconnect', { timeout: options.timeout, diff --git a/src/swarm/index.js b/src/swarm/index.js index e86a7c22c..84648439d 100644 --- a/src/swarm/index.js +++ b/src/swarm/index.js @@ -1,11 +1,9 @@ 'use strict' -const callbackify = require('callbackify') - module.exports = config => ({ - addrs: callbackify.variadic(require('./addrs')(config)), - connect: callbackify.variadic(require('./connect')(config)), - disconnect: callbackify.variadic(require('./disconnect')(config)), - localAddrs: callbackify.variadic(require('./localAddrs')(config)), - peers: callbackify.variadic(require('./peers')(config)) + addrs: require('./addrs')(config), + connect: require('./connect')(config), + disconnect: require('./disconnect')(config), + localAddrs: require('./localAddrs')(config), + peers: require('./peers')(config) }) diff --git a/src/swarm/peers.js b/src/swarm/peers.js index 3b897348c..f10035e06 100644 --- a/src/swarm/peers.js +++ b/src/swarm/peers.js @@ -1,7 +1,7 @@ 'use strict' const multiaddr = require('multiaddr') -const PeerId = require('peer-id') +const CID = require('cids') const configure = require('../lib/configure') module.exports = configure(({ ky }) => { @@ -25,7 +25,7 @@ module.exports = configure(({ ky }) => { const info = {} try { info.addr = multiaddr(peer.Addr) - info.peer = PeerId.createFromB58String(peer.Peer) + info.peer = new CID(peer.Peer) } catch (error) { info.error = error info.rawPeerInfo = peer diff --git a/test/custom-headers.spec.js b/test/custom-headers.spec.js index ce14a01e5..5e88b72a6 100644 --- a/test/custom-headers.spec.js +++ b/test/custom-headers.spec.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const isNode = require('detect-node') +const { isNode } = require('ipfs-utils/src/env') const { expect } = require('interface-ipfs-core/src/utils/mocha') const ipfsClient = require('../src') diff --git a/test/exports.spec.js b/test/exports.spec.js index f97707e13..0df770da6 100644 --- a/test/exports.spec.js +++ b/test/exports.spec.js @@ -1,28 +1,22 @@ /* eslint-env mocha, browser */ 'use strict' -const isIPFS = require('is-ipfs') const CID = require('cids') const multiaddr = require('multiaddr') const multibase = require('multibase') const multicodec = require('multicodec') const multihash = require('multihashes') -const PeerId = require('peer-id') -const PeerInfo = require('peer-info') const { expect } = require('interface-ipfs-core/src/utils/mocha') const IpfsHttpClient = require('../') describe('exports', () => { it('should export the expected types and utilities', () => { - expect(IpfsHttpClient.isIPFS).to.equal(isIPFS) expect(IpfsHttpClient.Buffer).to.equal(Buffer) expect(IpfsHttpClient.CID).to.equal(CID) expect(IpfsHttpClient.multiaddr).to.equal(multiaddr) expect(IpfsHttpClient.multibase).to.equal(multibase) expect(IpfsHttpClient.multicodec).to.equal(multicodec) expect(IpfsHttpClient.multihash).to.equal(multihash) - expect(IpfsHttpClient.PeerId).to.equal(PeerId) - expect(IpfsHttpClient.PeerInfo).to.equal(PeerInfo) }) }) diff --git a/test/files-mfs.spec.js b/test/files-mfs.spec.js index 0e3d6b1af..edc7698e1 100644 --- a/test/files-mfs.spec.js +++ b/test/files-mfs.spec.js @@ -5,13 +5,11 @@ const { expect } = require('interface-ipfs-core/src/utils/mocha') const loadFixture = require('aegir/fixtures') const mh = require('multihashes') -const CID = require('cids') -const values = require('pull-stream/sources/values') -const pull = require('pull-stream/pull') -const collect = require('pull-stream/sinks/collect') +const all = require('it-all') +const pipe = require('it-pipe') +const { TimeoutError } = require('ky-universal') const f = require('./utils/factory') -const expectTimeout = require('./utils/expect-timeout') const testfile = loadFixture('test/fixtures/testfile.txt') @@ -41,10 +39,10 @@ describe('.files (the MFS API part)', function () { after(() => f.clean()) it('.add file for testing', async () => { - const res = await ipfs.add(testfile) + const res = await all(ipfs.add(testfile)) expect(res).to.have.length(1) - expect(res[0].hash).to.equal(expectedMultihash) + expect(res[0].cid.toString()).to.equal(expectedMultihash) expect(res[0].path).to.equal(expectedMultihash) }) @@ -54,10 +52,10 @@ describe('.files (the MFS API part)', function () { const expectedBufferMultihash = 'QmWfVY9y3xjsixTgbd9AorQxH7VtMpzfx2HaWtsoUYecaX' const file = Buffer.from('hello') - const res = await ipfs.add(file) + const res = await all(ipfs.add(file)) expect(res).to.have.length(1) - expect(res[0].hash).to.equal(expectedBufferMultihash) + expect(res[0].cid.toString()).to.equal(expectedBufferMultihash) expect(res[0].path).to.equal(expectedBufferMultihash) }) @@ -65,10 +63,10 @@ describe('.files (the MFS API part)', function () { const expectedHash = 'QmWfVY9y3xjsixTgbd9AorQxH7VtMpzfx2HaWtsoUYecaX' const content = Buffer.from('hello') - const res = await ipfs.add([{ path: '', content }]) + const res = await all(ipfs.add([{ path: '', content }])) expect(res).to.have.length(1) - expect(res[0].hash).to.equal(expectedHash) + expect(res[0].cid.toString()).to.equal(expectedHash) expect(res[0].path).to.equal(expectedHash) }) @@ -76,39 +74,40 @@ describe('.files (the MFS API part)', function () { const expectedCid = 'bafybeifogzovjqrcxvgt7g36y7g63hvwvoakledwk4b2fr2dl4wzawpnny' const options = { cidVersion: 1, rawLeaves: false } - const res = await ipfs.add(testfile, options) + const res = await all(ipfs.add(testfile, options)) expect(res).to.have.length(1) - expect(res[0].hash).to.equal(expectedCid) + expect(res[0].cid.toString()).to.equal(expectedCid) expect(res[0].path).to.equal(expectedCid) }) it('.add with only-hash=true', async () => { const content = String(Math.random() + Date.now()) - const files = await ipfs.add(Buffer.from(content), { onlyHash: true }) + const files = await all(ipfs.add(Buffer.from(content), { onlyHash: true })) expect(files).to.have.length(1) // 'ipfs.object.get()' should timeout because content wasn't actually added - await expectTimeout(ipfs.object.get(files[0].hash), 4000) + return expect(ipfs.object.get(files[0].cid, { timeout: 2000 })) + .to.be.rejectedWith(TimeoutError) }) it('.add with options', async () => { - const res = await ipfs.add(testfile, { pin: false }) + const res = await all(ipfs.add(testfile, { pin: false })) expect(res).to.have.length(1) - expect(res[0].hash).to.equal(expectedMultihash) + expect(res[0].cid.toString()).to.equal(expectedMultihash) expect(res[0].path).to.equal(expectedMultihash) }) it('.add pins by default', async () => { const newContent = Buffer.from(String(Math.random())) - const initialPins = await ipfs.pin.ls() + const initialPins = await all(ipfs.pin.ls()) - await ipfs.add(newContent) + await all(ipfs.add(newContent)) - const pinsAfterAdd = await ipfs.pin.ls() + const pinsAfterAdd = await all(ipfs.pin.ls()) expect(pinsAfterAdd.length).to.eql(initialPins.length + 1) }) @@ -116,11 +115,11 @@ describe('.files (the MFS API part)', function () { it('.add with pin=false', async () => { const newContent = Buffer.from(String(Math.random())) - const initialPins = await ipfs.pin.ls() + const initialPins = await all(ipfs.pin.ls()) - await ipfs.add(newContent, { pin: false }) + await all(ipfs.add(newContent, { pin: false })) - const pinsAfterAdd = await ipfs.pin.ls() + const pinsAfterAdd = await all(ipfs.pin.ls()) expect(pinsAfterAdd.length).to.eql(initialPins.length) }) @@ -134,10 +133,10 @@ describe('.files (the MFS API part)', function () { } const options = { hashAlg: name, rawLeaves: false } - const res = await ipfs.add([file], options) + const res = await all(ipfs.add([file], options)) expect(res).to.have.length(1) - const cid = new CID(res[0].hash) + const { cid } = res[0] expect(mh.decode(cid.multihash).name).to.equal(name) }) }) @@ -151,7 +150,7 @@ describe('.files (the MFS API part)', function () { progress = p } - const res = await ipfs.add(testfile, { progress: progressHandler }) + const res = await all(ipfs.add(testfile, { progress: progressHandler })) expect(res).to.have.length(1) expect(progress).to.be.equal(testfile.byteLength) @@ -168,7 +167,7 @@ describe('.files (the MFS API part)', function () { } // TODO: needs to be using a big file - const res = await ipfs.add(testfile, { progress: progressHandler }) + const res = await all(ipfs.add(testfile, { progress: progressHandler })) expect(res).to.have.length(1) expect(progress).to.be.equal(testfile.byteLength) @@ -185,7 +184,7 @@ describe('.files (the MFS API part)', function () { } // TODO: needs to be using a directory - const res = await ipfs.add(testfile, { progress: progressHandler }) + const res = await all(ipfs.add(testfile, { progress: progressHandler })) expect(res).to.have.length(1) expect(progress).to.be.equal(testfile.byteLength) @@ -193,7 +192,7 @@ describe('.files (the MFS API part)', function () { }) it('.add without progress options', async () => { - const res = await ipfs.add(testfile) + const res = await all(ipfs.add(testfile)) expect(res).to.have.length(1) }) @@ -207,44 +206,35 @@ describe('.files (the MFS API part)', function () { } const options = { hashAlg: name, rawLeaves: false } - const res = await ipfs.add([file], options) + const res = await all(ipfs.add([file], options)) expect(res).to.have.length(1) - const cid = new CID(res[0].hash) + const { cid } = res[0] expect(mh.decode(cid.multihash).name).to.equal(name) }) }) - it('.addPullStream with object chunks and pull stream content', (done) => { + it('.add with object chunks and iterable content', async () => { const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - pull( - values([{ content: values([Buffer.from('test')]) }]), - ipfs.addPullStream(), - collect((err, res) => { - expect(err).to.not.exist() - - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - done() - }) + const res = await pipe( + [{ content: [Buffer.from('test')] }], + ipfs.add, + all ) - }) - - it('.add with pull stream', async () => { - const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - const res = await ipfs.add(values([Buffer.from('test')])) expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) + res[0].cid = res[0].cid.toString() + expect(res[0]).to.deep.equal({ path: expectedCid, cid: expectedCid, size: 12 }) }) - it('.add with array of objects with pull stream content', async () => { + it('.add with iterable', async () => { const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - const res = await ipfs.add([{ content: values([Buffer.from('test')]) }]) + const res = await all(ipfs.add([Buffer.from('test')])) expect(res).to.have.length(1) - expect(res[0]).to.eql({ path: expectedCid, hash: expectedCid, size: 12 }) + res[0].cid = res[0].cid.toString() + expect(res[0]).to.deep.equal({ path: expectedCid, cid: expectedCid, size: 12 }) }) it('files.mkdir', async () => { @@ -315,7 +305,7 @@ describe('.files (the MFS API part)', function () { await ipfs.files.write(file, Buffer.from('Hello, world'), { create: true }) - const files = await ipfs.files.ls(folder) + const files = await all(ipfs.files.ls(folder)) expect(files.length).to.equal(1) }) @@ -324,7 +314,7 @@ describe('.files (the MFS API part)', function () { const folder = `test-folder-${Math.random()}` await ipfs.files.mkdir(`/${folder}`) - const files = await ipfs.files.ls() + const files = await all(ipfs.files.ls()) expect(files.find(file => file.name === folder)).to.be.ok() }) @@ -334,7 +324,7 @@ describe('.files (the MFS API part)', function () { create: true }) - const buf = await ipfs.files.read('/test-folder/test-file-2.txt') + const buf = Buffer.concat(await all(ipfs.files.read('/test-folder/test-file-2.txt'))) expect(buf.toString()).to.be.equal('hello world') }) @@ -342,7 +332,7 @@ describe('.files (the MFS API part)', function () { it('files.write without options', async () => { await ipfs.files.write('/test-folder/test-file-2.txt', Buffer.from('hello world')) - const buf = await ipfs.files.read('/test-folder/test-file-2.txt') + const buf = Buffer.concat(await all(ipfs.files.read('/test-folder/test-file-2.txt'))) expect(buf.toString()).to.be.equal('hello world') }) @@ -357,9 +347,10 @@ describe('.files (the MFS API part)', function () { }) const stats = await ipfs.files.stat(file) + stats.cid = stats.cid.toString() expect(stats).to.deep.equal({ - hash: 'QmQhouoDPAnzhVM148yCa9CbUXK65wSEAZBtgrLGHtmdmP', + cid: 'QmQhouoDPAnzhVM148yCa9CbUXK65wSEAZBtgrLGHtmdmP', size: 12, cumulativeSize: 70, blocks: 1, @@ -383,7 +374,7 @@ describe('.files (the MFS API part)', function () { await ipfs.files.write(file, testfile, { create: true }) - const buf = await ipfs.files.read(file) + const buf = Buffer.concat(await all(ipfs.files.read(file))) expect(Buffer.from(buf)).to.deep.equal(testfile) }) diff --git a/test/get.spec.js b/test/get.spec.js index a6fb3aaa4..c303179c8 100644 --- a/test/get.spec.js +++ b/test/get.spec.js @@ -5,6 +5,8 @@ const { expect } = require('interface-ipfs-core/src/utils/mocha') const loadFixture = require('aegir/fixtures') +const all = require('it-all') +const concat = require('it-concat') const f = require('./utils/factory') @@ -24,35 +26,37 @@ describe('.get (specific go-ipfs features)', function () { before(async () => { ipfs = (await f.spawn()).api - await ipfs.add(smallFile.data) + await all(ipfs.add(smallFile.data)) }) after(() => f.clean()) it('no compression args', async () => { - const files = await ipfs.get(smallFile.cid) + const files = await all(ipfs.get(smallFile.cid)) expect(files).to.be.length(1) - expect(files[0].content.toString()).to.contain(smallFile.data.toString()) + const content = await concat(files[0].content) + expect(content.toString()).to.contain(smallFile.data.toString()) }) it('archive true', async () => { - const files = await ipfs.get(smallFile.cid, { archive: true }) + const files = await all(ipfs.get(smallFile.cid, { archive: true })) expect(files).to.be.length(1) - expect(files[0].content.toString()).to.contain(smallFile.data.toString()) + const content = await concat(files[0].content) + expect(content.toString()).to.contain(smallFile.data.toString()) }) it('err with out of range compression level', async () => { - await expect(ipfs.get(smallFile.cid, { + await expect(all(ipfs.get(smallFile.cid, { compress: true, compressionLevel: 10 - })).to.be.rejectedWith('compression level must be between 1 and 9') + }))).to.be.rejectedWith('compression level must be between 1 and 9') }) // TODO Understand why this test started failing it.skip('with compression level', async () => { - await ipfs.get(smallFile.cid, { compress: true, 'compression-level': 1 }) + await all(ipfs.get(smallFile.cid, { compress: true, 'compression-level': 1 })) }) it('add path containing "+"s (for testing get)', async () => { @@ -60,17 +64,17 @@ describe('.get (specific go-ipfs features)', function () { const subdir = 'tmp/c++files' const expectedCid = 'QmPkmARcqjo5fqK1V1o8cFsuaXxWYsnwCNLJUYS4KeZyff' const path = `${subdir}/${filename}` - const files = await ipfs.add([{ + const files = await all(ipfs.add([{ path, content: Buffer.from(path) - }]) + }])) - expect(files[2].hash).to.equal(expectedCid) + expect(files[2].cid.toString()).to.equal(expectedCid) }) it('get path containing "+"s', async () => { const cid = 'QmPkmARcqjo5fqK1V1o8cFsuaXxWYsnwCNLJUYS4KeZyff' - const files = await ipfs.get(cid) + const files = await all(ipfs.get(cid)) expect(files).to.be.an('array').with.lengthOf(3) expect(files[0]).to.have.property('path', cid) diff --git a/test/interface.spec.js b/test/interface.spec.js index 796c1ae86..ebec1b0e9 100644 --- a/test/interface.spec.js +++ b/test/interface.spec.js @@ -3,7 +3,6 @@ const tests = require('interface-ipfs-core') const merge = require('merge-options') -const { isNode } = require('ipfs-utils/src/env') const { createFactory } = require('ipfsd-ctl') const { findBin } = require('ipfsd-ctl/src/utils') const isWindows = process.platform && process.platform === 'win32' @@ -25,6 +24,43 @@ describe('interface-ipfs-core tests', () => { } const commonFactory = createFactory(commonOptions) + tests.root(commonFactory, { + skip: [ + { + name: 'should add with mode as string', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should add with mode as number', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should add with mtime as Date', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should add with mtime as { nsecs, secs }', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should add with mtime as timespec', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should add with mtime as hrtime', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should export a chunk of a file', + reason: 'TODO not implemented in go-ipfs yet' + }, + { + name: 'should ls with metadata', + reason: 'TODO not implemented in go-ipfs yet' + } + ] + }) + tests.bitswap(commonFactory) tests.block(commonFactory, { @@ -34,7 +70,18 @@ describe('interface-ipfs-core tests', () => { }] }) - tests.bootstrap(commonFactory) + tests.bootstrap(commonFactory, { + skip: [{ + name: 'should return a list containing the bootstrap peer when called with a valid arg (ip4)', + reason: 'TODO unskip when go-ipfs switches to p2p for libp2p keys' + }, { + name: 'should prevent duplicate inserts of bootstrap peers', + reason: 'TODO unskip when go-ipfs switches to p2p for libp2p keys' + }, { + name: 'should return a list containing the peer removed when called with a valid arg (ip4)', + reason: 'TODO unskip when go-ipfs switches to p2p for libp2p keys' + }] + }) tests.config(commonFactory, { skip: [ @@ -81,31 +128,21 @@ describe('interface-ipfs-core tests', () => { ] }) - tests.dht(commonFactory, { + tests.dht(commonFactory) + + tests.files(commonFactory, { skip: [ - // dht.findpeer { - name: 'should fail to find other peer if peer does not exist', - reason: 'FIXME checking what is exactly go-ipfs returning https://github.com/ipfs/go-ipfs/issues/3862#issuecomment-294168090' + name: 'should ls directory', + reason: 'TODO unskip when go-ipfs supports --long https://github.com/ipfs/go-ipfs/pull/6528' }, - // dht.findprovs { - name: 'should take options to override timeout config', - reason: 'FIXME go-ipfs does not support a timeout option' + name: 'should list a file directly', + reason: 'TODO unskip when go-ipfs supports --long https://github.com/ipfs/go-ipfs/pull/6528' }, - // dht.get - { - name: 'should get a value after it was put on another node', - reason: 'FIXME go-ipfs errors with Error: key was not found (type 6) https://github.com/ipfs/go-ipfs/issues/3862' - } - ] - }) - - tests.filesMFS(commonFactory, { - skip: [ { - name: 'should ls directory with long option', - reason: 'TODO unskip when go-ipfs supports --long https://github.com/ipfs/go-ipfs/pull/6528' + name: 'should ls directory and include metadata', + reason: 'TODO not implemented in go-ipfs yet' }, { name: 'should read from outside of mfs', @@ -167,10 +204,6 @@ describe('interface-ipfs-core tests', () => { name: 'should respect metadata when copying from outside of mfs', reason: 'TODO not implemented in go-ipfs yet' }, - { - name: 'ls directory with long option should include metadata', - reason: 'TODO not implemented in go-ipfs yet' - }, { name: 'should have default mtime', reason: 'TODO not implemented in go-ipfs yet' @@ -270,57 +303,6 @@ describe('interface-ipfs-core tests', () => { ] }) - tests.filesRegular(commonFactory, { - skip: [ - // .addFromFs - isNode ? null : { - name: 'addFromFs', - reason: 'Not designed to run in the browser' - }, - { - name: 'should add with mode as string', - reason: 'TODO not implemented in go-ipfs yet' - }, - { - name: 'should add with mode as number', - reason: 'TODO not implemented in go-ipfs yet' - }, - { - name: 'should add with mtime as Date', - reason: 'TODO not implemented in go-ipfs yet' - }, - { - name: 'should add with mtime as { nsecs, secs }', - reason: 'TODO not implemented in go-ipfs yet' - }, - { - name: 'should add with mtime as timespec', - reason: 'TODO not implemented in go-ipfs yet' - }, - { - name: 'should add with mtime as hrtime', - reason: 'TODO not implemented in go-ipfs yet' - }, - // .catPullStream - { - name: 'should export a chunk of a file', - reason: 'TODO not implemented in go-ipfs yet' - }, - { - name: 'should export a chunk of a file in a Pull Stream', - reason: 'TODO not implemented in go-ipfs yet' - }, - { - name: 'should export a chunk of a file in a Readable Stream', - reason: 'TODO not implemented in go-ipfs yet' - }, - { - name: 'should ls with metadata', - reason: 'TODO not implemented in go-ipfs yet' - } - ] - }) - tests.key(commonFactory, { skip: [ // key.export @@ -346,7 +328,6 @@ describe('interface-ipfs-core tests', () => { } )), { skip: [ - // stop { name: 'should resolve a record from peerid as cidv1 in base32', reason: 'TODO not implemented in go-ipfs yet: https://github.com/ipfs/go-ipfs/issues/5287' @@ -383,14 +364,6 @@ describe('interface-ipfs-core tests', () => { tests.ping(commonFactory, { skip: [ - { - name: 'should fail when pinging an unknown peer over pull stream', - reason: 'FIXME go-ipfs return success with text: Looking up peer ' - }, - { - name: 'should fail when pinging peer that is not available over readable stream', - reason: 'FIXME go-ipfs return success with text: Looking up peer ' - }, { name: 'should fail when pinging a peer that is not available', reason: 'FIXME go-ipfs return success with text: Looking up peer ' diff --git a/test/lib.stream-to-iterable.spec.js b/test/lib.stream-to-iterable.spec.js deleted file mode 100644 index 3ad326480..000000000 --- a/test/lib.stream-to-iterable.spec.js +++ /dev/null @@ -1,40 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { expect } = require('interface-ipfs-core/src/utils/mocha') -const toIterable = require('../src/lib/stream-to-iterable') - -describe('lib/stream-to-iterable', () => { - it('should return input if already async iterable', () => { - const input = { [Symbol.asyncIterator] () { return this } } - expect(toIterable(input)).to.equal(input) - }) - - it('should convert reader to async iterable', async () => { - const inputData = [2, 31, 3, 4] - const input = { - getReader () { - let i = 0 - return { - read () { - return i === inputData.length - ? { done: true } - : { value: inputData[i++] } - }, - releaseLock () {} - } - } - } - - const chunks = [] - for await (const chunk of toIterable(input)) { - chunks.push(chunk) - } - - expect(chunks).to.eql(inputData) - }) - - it('should throw on unknown stream', () => { - expect(() => toIterable({})).to.throw('unknown stream') - }) -}) diff --git a/test/log.spec.js b/test/log.spec.js index 1a885028d..c56660d07 100644 --- a/test/log.spec.js +++ b/test/log.spec.js @@ -3,6 +3,7 @@ 'use strict' const { expect } = require('interface-ipfs-core/src/utils/mocha') +const all = require('it-all') const f = require('./utils/factory') describe('.log', function () { @@ -19,7 +20,7 @@ describe('.log', function () { it('.log.tail', async () => { const i = setInterval(async () => { try { - await ipfs.add(Buffer.from('just adding some data to generate logs')) + await all(ipfs.add(Buffer.from('just adding some data to generate logs'))) } catch (_) { // this can error if the test has finished and we're shutting down the node } diff --git a/test/node/swarm.js b/test/node/swarm.js index 9e0252a2c..89315f2fe 100644 --- a/test/node/swarm.js +++ b/test/node/swarm.js @@ -25,7 +25,7 @@ describe('.swarm.peers', function () { expect(res.length).to.equal(1) expect(res[0].error).to.not.exist() expect(res[0].addr.toString()).to.equal(response.Peers[0].Addr) - expect(res[0].peer.toB58String()).to.equal(response.Peers[0].Peer) + expect(res[0].peer.toString()).to.equal(response.Peers[0].Peer) expect(scope.isDone()).to.equal(true) }) @@ -43,7 +43,7 @@ describe('.swarm.peers', function () { expect(res.length).to.equal(1) expect(res[0].error).to.not.exist() expect(res[0].addr.toString()).to.equal(response.Peers[0].Addr) - expect(res[0].peer.toB58String()).to.equal(response.Peers[0].Peer) + expect(res[0].peer.toString()).to.equal(response.Peers[0].Peer) expect(scope.isDone()).to.equal(true) }) diff --git a/test/ping.spec.js b/test/ping.spec.js index 01636ed60..7076f208b 100644 --- a/test/ping.spec.js +++ b/test/ping.spec.js @@ -2,8 +2,7 @@ 'use strict' const { expect } = require('interface-ipfs-core/src/utils/mocha') -const pull = require('pull-stream/pull') -const collect = require('pull-stream/sinks/collect') +const all = require('it-all') const f = require('./utils/factory') @@ -34,7 +33,7 @@ describe('.ping', function () { after(() => f.clean()) it('.ping with default count', async () => { - const res = await ipfs.ping(otherId) + const res = await all(ipfs.ping(otherId)) expect(res).to.be.an('array') expect(res.filter(isPong)).to.have.lengthOf(10) res.forEach(packet => { @@ -46,7 +45,7 @@ describe('.ping', function () { }) it('.ping with count = 2', async () => { - const res = await ipfs.ping(otherId, { count: 2 }) + const res = await all(ipfs.ping(otherId, { count: 2 })) expect(res).to.be.an('array') expect(res.filter(isPong)).to.have.lengthOf(2) res.forEach(packet => { @@ -56,39 +55,4 @@ describe('.ping', function () { const resultMsg = res.find(packet => packet.text.includes('Average latency')) expect(resultMsg).to.exist() }) - - it('.pingPullStream', (done) => { - pull( - ipfs.pingPullStream(otherId, { count: 2 }), - collect((err, data) => { - expect(err).to.not.exist() - expect(data).to.be.an('array') - expect(data.filter(isPong)).to.have.lengthOf(2) - data.forEach(packet => { - expect(packet).to.have.keys('success', 'time', 'text') - expect(packet.time).to.be.a('number') - }) - const resultMsg = data.find(packet => packet.text.includes('Average latency')) - expect(resultMsg).to.exist() - done() - }) - ) - }) - - it('.pingReadableStream', (done) => { - let packetNum = 0 - ipfs.pingReadableStream(otherId, { count: 2 }) - .on('data', data => { - expect(data).to.be.an('object') - expect(data).to.have.keys('success', 'time', 'text') - if (isPong(data)) packetNum++ - }) - .on('error', err => { - expect(err).not.to.exist() - }) - .on('end', () => { - expect(packetNum).to.equal(2) - done() - }) - }) }) diff --git a/test/request-api.spec.js b/test/request-api.spec.js index c213193bb..1767b5914 100644 --- a/test/request-api.spec.js +++ b/test/request-api.spec.js @@ -2,16 +2,12 @@ 'use strict' const { expect } = require('interface-ipfs-core/src/utils/mocha') -const isNode = require('detect-node') +const { isNode } = require('ipfs-utils/src/env') const ipfsClient = require('../src/index.js') -const ndjson = require('ndjson') -const pump = require('pump') describe('\'deal with HTTP weirdness\' tests', () => { - it('does not crash if no content-type header is provided', (done) => { - if (!isNode) { - return done() - } + it('does not crash if no content-type header is provided', async function () { + if (!isNode) return this.skip() // go-ipfs always (currently) adds a content-type header, even if no content is present, // the standard behaviour for an http-api is to omit this header if no content is present @@ -24,12 +20,10 @@ describe('\'deal with HTTP weirdness\' tests', () => { }) }) - server.listen(6001, () => { - ipfsClient('/ip4/127.0.0.1/tcp/6001').config.replace('test/fixtures/r-config.json', (err) => { - expect(err).to.not.exist() - server.close(done) - }) - }) + await new Promise(resolve => server.listen(6001, resolve)) + await ipfsClient('/ip4/127.0.0.1/tcp/6001').config.replace('test/fixtures/r-config.json') + + server.close() }) }) @@ -39,12 +33,11 @@ describe('trailer headers', () => { if (!isNode) { return done() } const server = require('http').createServer((req, res) => { - const resStream = pump(res, ndjson.stringify()) res.setHeader('x-chunked-output', '1') res.setHeader('content-type', 'application/json') res.setHeader('Trailer', 'X-Stream-Error') res.addTrailers({ 'X-Stream-Error': JSON.stringify({ Message: 'ups, something went wrong', Code: 500 }) }) - resStream.write({ Bytes: 1 }) + res.write(JSON.stringify({ Bytes: 1 })) res.end() }) @@ -64,7 +57,7 @@ describe('trailer headers', () => { }) describe('error handling', () => { - it('should handle plain text error response', function (done) { + it('should handle plain text error response', async function () { if (!isNode) return this.skip() const server = require('http').createServer((req, res) => { @@ -78,17 +71,16 @@ describe('error handling', () => { }) }) - server.listen(6001, () => { - ipfsClient('/ip4/127.0.0.1/tcp/6001').config.replace('test/fixtures/r-config.json', (err) => { - expect(err).to.exist() - expect(err.response.status).to.equal(403) - expect(err.message).to.equal('ipfs method not allowed') - server.close(done) - }) - }) + await new Promise(resolve => server.listen(6001, resolve)) + + await expect(ipfsClient('/ip4/127.0.0.1/tcp/6001').config.replace('test/fixtures/r-config.json')) + .to.eventually.be.rejectedWith('ipfs method not allowed') + .and.to.have.nested.property('response.status').that.equals(403) + + server.close() }) - it('should handle JSON error response', function (done) { + it('should handle JSON error response', async function () { if (!isNode) return this.skip() const server = require('http').createServer((req, res) => { @@ -102,17 +94,16 @@ describe('error handling', () => { }) }) - server.listen(6001, () => { - ipfsClient('/ip4/127.0.0.1/tcp/6001').config.replace('test/fixtures/r-config.json', (err) => { - expect(err).to.exist() - expect(err.response.status).to.equal(400) - expect(err.message).to.equal('client error') - server.close(done) - }) - }) + await new Promise(resolve => server.listen(6001, resolve)) + + await expect(ipfsClient('/ip4/127.0.0.1/tcp/6001').config.replace('test/fixtures/r-config.json')) + .to.eventually.be.rejectedWith('client error') + .and.to.have.nested.property('response.status').that.equals(400) + + server.close() }) - it('should handle JSON error response with invalid JSON', function (done) { + it('should handle JSON error response with invalid JSON', async function () { if (!isNode) return this.skip() const server = require('http').createServer((req, res) => { @@ -126,12 +117,12 @@ describe('error handling', () => { }) }) - server.listen(6001, () => { - ipfsClient('/ip4/127.0.0.1/tcp/6001').config.replace('test/fixtures/r-config.json', (err) => { - expect(err).to.exist() - expect(err.message).to.include('Unexpected token M in JSON at position 2') - server.close(done) - }) - }) + await new Promise(resolve => server.listen(6001, resolve)) + + await expect(ipfsClient('/ip4/127.0.0.1/tcp/6001').config.replace('test/fixtures/r-config.json')) + .to.eventually.be.rejected() + .and.to.have.property('message').that.includes('Unexpected token M in JSON at position 2') + + server.close() }) }) diff --git a/test/stats.spec.js b/test/stats.spec.js index d60aaa330..1e5d7da25 100644 --- a/test/stats.spec.js +++ b/test/stats.spec.js @@ -2,6 +2,7 @@ 'use strict' const { expect } = require('interface-ipfs-core/src/utils/mocha') +const all = require('it-all') const f = require('./utils/factory') describe('stats', function () { @@ -31,7 +32,7 @@ describe('stats', function () { }) it('.stats.bw', async () => { - const res = await ipfs.stats.bw() + const res = (await all(ipfs.stats.bw()))[0] expect(res).to.exist() expect(res).to.have.a.property('totalIn') diff --git a/test/sub-modules.spec.js b/test/sub-modules.spec.js index ee8f6d93b..a5db456df 100644 --- a/test/sub-modules.spec.js +++ b/test/sub-modules.spec.js @@ -64,12 +64,8 @@ describe('submodules', () => { it('ping', () => { const ping = require('../src')().ping - const pingPullStream = require('../src')().pingPullStream - const pingReadableStream = require('../src')().pingReadableStream expect(ping).to.be.a('function') - expect(pingPullStream).to.be.a('function') - expect(pingReadableStream).to.be.a('function') }) it('log', () => { @@ -164,26 +160,11 @@ describe('submodules', () => { const filesRegular = require('../src')() expect(filesRegular.add).to.be.a('function') - expect(filesRegular.addReadableStream).to.be.a('function') - expect(filesRegular.addPullStream).to.be.a('function') - expect(filesRegular.addFromStream).to.be.a('function') - expect(filesRegular.addFromFs).to.be.a('function') - expect(filesRegular.addFromURL).to.be.a('function') expect(filesRegular.get).to.be.a('function') - expect(filesRegular.getReadableStream).to.be.a('function') - expect(filesRegular.getPullStream).to.be.a('function') expect(filesRegular.cat).to.be.a('function') - expect(filesRegular.catReadableStream).to.be.a('function') - expect(filesRegular.catPullStream).to.be.a('function') expect(filesRegular.ls).to.be.a('function') - expect(filesRegular.lsReadableStream).to.be.a('function') - expect(filesRegular.lsPullStream).to.be.a('function') expect(filesRegular.refs).to.be.a('function') - expect(filesRegular.refsReadableStream).to.be.a('function') - expect(filesRegular.refsPullStream).to.be.a('function') expect(filesRegular.refs.local).to.be.a('function') - expect(filesRegular.refs.localReadableStream).to.be.a('function') - expect(filesRegular.refs.localPullStream).to.be.a('function') }) it('files MFS API', () => { diff --git a/test/utils/expect-timeout.js b/test/utils/expect-timeout.js deleted file mode 100644 index 51c733075..000000000 --- a/test/utils/expect-timeout.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict' - -/** - * Resolve if @param promise hangs for at least @param ms, throw otherwise - * @param {Promise} promise promise that you expect to hang - * @param {Number} ms millis to wait - * @return {Promise} - */ -module.exports = (promise, ms) => { - return Promise.race([ - promise.then((out) => { - throw new Error('Expected Promise to timeout but it was successful.') - }), - new Promise((resolve, reject) => setTimeout(resolve, ms)) - ]) -}