Skip to content
This repository has been archived by the owner on Mar 10, 2020. It is now read-only.

Adds API for ipfs.files.get. #28

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
- [Files](/API/files)
- [`add`](/API/files#add)
- [`createAddStream`](/files#createaddstream)
- [`get`](/API/files#get)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@noffle seems that this was never added to the files.md. Maybe a missing commit?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not sure where it got dropped. Thanks for noticing! I'll re-add.

- [`cat`](/API/files#cat)
- [Object](/API/object)
- [`object.new`](/API/object#objectnew)
Expand Down Expand Up @@ -99,7 +100,7 @@ test.all(common)

## API

A valid (read: that follows this interface) IPFS core implementation, must expose the API described in [/API](/API)
A valid (read: that follows this interface) IPFS core implementation must expose the API described in [/API](/API).

## Contribute

Expand All @@ -114,3 +115,5 @@ This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/c
## License

MIT

[UnixFS]: https://github.com/ipfs/specs/tree/master/unixfs
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
"bl": "^1.1.2",
"bs58": "^3.0.0",
"chai": "^3.5.0",
"concat-stream": "^1.5.1",
"detect-node": "^2.0.3",
"ipfs-merkle-dag": "^0.6.0",
"readable-stream": "1.1.13"
Expand Down
198 changes: 173 additions & 25 deletions src/files.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,28 @@ const bs58 = require('bs58')
const Readable = require('readable-stream')
const path = require('path')
const fs = require('fs')
const isNode = require('detect-node')
const bl = require('bl')
const concat = require('concat-stream')
const through = require('through2')

module.exports = (common) => {
describe('.files', () => {
describe.only('.files', () => {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Will need to change this. But I can also do it on merge :)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Agh, I'm guilty of continually doing this. Will correct. :)

let smallFile
let bigFile
let directoryContent
let ipfs

before((done) => {
smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt')
)
bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random')
)
smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt'))
bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random'))
directoryContent = {
'pp.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/pp.txt')),
'holmes.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/holmes.txt')),
'jungle.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/jungle.txt')),
'alice.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/alice.txt')),
'files/hello.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/files/hello.txt')),
'files/ipfs.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/files/ipfs.txt'))
}

common.setup((err, _ipfs) => {
expect(err).to.not.exist
Expand Down Expand Up @@ -100,15 +108,9 @@ module.exports = (common) => {
})

it('add a nested dir as array', (done) => {
if (!isNode) {
return done()
// can't run this test cause browserify
// can't shim readFileSync in runtime
}
const base = path.join(__dirname, 'data/test-folder')
const content = (name) => ({
path: `test-folder/${name}`,
content: fs.readFileSync(path.join(base, name))
content: directoryContent[name]
})
const emptyDir = (name) => ({
path: `test-folder/${name}`
Expand Down Expand Up @@ -138,21 +140,13 @@ module.exports = (common) => {

describe('.createAddStream', () => {
it('stream of valid files and dirs', (done) => {
if (!isNode) {
return done()
// can't run this test cause browserify
// can't shim readFileSync in runtime
}

const base = path.join(__dirname, 'data/test-folder')
const content = (name) => ({
path: `test-folder/${name}`,
content: fs.readFileSync(path.join(base, name))
content: directoryContent[name]
})
const emptyDir = (name) => ({
path: `test-folder/${name}`
})

const files = [
content('pp.txt'),
content('holmes.txt'),
Expand Down Expand Up @@ -241,7 +235,7 @@ module.exports = (common) => {
})

describe('.cat', () => {
it('with a bas58 multihash encoded string', () => {
it('with a base58 multihash encoded string', () => {
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'

return ipfs.cat(hash)
Expand Down Expand Up @@ -273,11 +267,165 @@ module.exports = (common) => {
const hash = new Buffer(bs58.decode('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'))
return ipfs.cat(hash)
.then((stream) => {
stream.pipe(bl((err, bldata) => {
stream.pipe(bl((err, data) => {
expect(err).to.not.exist
expect(bldata.toString()).to.contain('Check out some of the other files in this directory:')
expect(data.toString()).to.contain('Check out some of the other files in this directory:')
}))
})
})
})
})

describe('.get', () => {
it('with a base58 encoded multihash', (done) => {
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
ipfs.files.get(hash, (err, stream) => {
expect(err).to.not.exist
stream.pipe(concat((files) => {
expect(err).to.not.exist
expect(files).to.be.length(1)
expect(files[0].path).to.equal(hash)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Seems that here you expect that the path is a string b58 encoded multihash, while... ->

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The correct thing to do is to check for the bs58-encoded value. If you're seeing anything else it's definitely an error to accommodate pre-this-PR. Thanks for noticing.

files[0].content.pipe(concat((content) => {
expect(content.toString()).to.contain('Check out some of the other files in this directory:')
done()
}))
}))
})
})

it('with a multihash', (done) => {
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
const mhBuf = new Buffer(bs58.decode(hash))
ipfs.files.get(mhBuf, (err, stream) => {
expect(err).to.not.exist
stream.pipe(concat((files) => {
expect(files).to.be.length(1)
expect(files[0].path).to.deep.equal(hash)
files[0].content.pipe(concat((content) => {
expect(content.toString()).to.contain('Check out some of the other files in this directory:')
done()
}))
}))
})
})

it('large file', (done) => {
const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq'
ipfs.files.get(hash, (err, stream) => {
expect(err).to.not.exist

// accumulate the files and their content
var files = []
stream.pipe(through.obj((file, enc, next) => {
file.content.pipe(concat((content) => {
files.push({
path: file.path,
content: content
})
next()
}))
}, () => {
expect(files.length).to.equal(1)
expect(files[0].path).to.equal(hash)
expect(files[0].content).to.deep.equal(bigFile)
done()
}))
})
})

it('directory', (done) => {
const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP'
ipfs.files.get(hash, (err, stream) => {
expect(err).to.not.exist

// accumulate the files and their content
var files = []
stream.pipe(through.obj((file, enc, next) => {
if (file.content) {
file.content.pipe(concat((content) => {
files.push({
path: file.path,
content: content
})
next()
}))
} else {
files.push(file)
next()
}
}, () => {
// Check paths
var paths = files.map((file) => {
return file.path
})
expect(paths).to.deep.equal([
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP',
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should the path also include the hash?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why?

Copy link
Contributor Author

@hackergrrl hackergrrl Jun 23, 2016

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is how js-ipfs-unixfs-engine works. Is there a specific concern you have?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm just trying to understand if there is a good reason why we should always patch the hash to the path or no. What will the user expect?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think since this is the Core API we have to worry less about what the user expects and more about what would be the most useful to implementation developers, since they'll be the ones who ultimately decide what the users see. They could choose to cull the root hash from the result if they wanted.

My thinking is that, as an IPFS implementation developer, I'd want the root hash when geting a directory: if I didn't then I'd need to do the extra legwork of inferring it from the path of its contents in order to figure out what root directory to create on the user's filesystem (in the case of ipfs get). This seems like a reasonable expectation to me, since the output of get will always match the directory structure that gets created by it (on the CLI, at least). Please let me know if you have any strong reasons against, but I think if not then this is a strong enough reason for.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That idea grew on me :) Thank you!

'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt',
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt'
])

// Check contents
var contents = files.map((file) => {
return file.content ? file.content : null
})
expect(contents).to.deep.equal([
null,
directoryContent['alice.txt'],
null,
null,
null,
directoryContent['files/hello.txt'],
directoryContent['files/ipfs.txt'],
directoryContent['holmes.txt'],
directoryContent['jungle.txt'],
directoryContent['pp.txt']
])
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's verify also the content (one chunk out of order is enough to screw the file up)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sure. I added the directory data to the brfs file reading so we can test this in the browser too. 👍

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sweet!

done()
}))
})
})

describe('promise', () => {
it('with a base58 encoded string', (done) => {
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
ipfs.files.get(hash)
.then((stream) => {
stream.pipe(concat((files) => {
expect(files).to.be.length(1)
expect(files[0].path).to.equal(hash)
files[0].content.pipe(concat((content) => {
expect(content.toString()).to.contain('Check out some of the other files in this directory:')
done()
}))
}))
})
.catch((err) => {
expect(err).to.not.exist
})
})

it('errors on invalid key', (done) => {
const hash = 'somethingNotMultihash'
ipfs.files.get(hash)
.then((stream) => {})
.catch((err) => {
expect(err).to.exist
const errString = err.toString()
if (errString === 'Error: invalid ipfs ref path') {
expect(err.toString()).to.contain('Error: invalid ipfs ref path')
}
if (errString === 'Error: Invalid Key') {
expect(err.toString()).to.contain('Error: Invalid Key')
}
done()
})
})
})
})
Expand Down