Skip to content
This repository has been archived by the owner on Aug 11, 2021. It is now read-only.

Commit

Permalink
feat: upgrade to new ipfs-block and blockservice
Browse files Browse the repository at this point in the history
  • Loading branch information
dignifiedquire authored and daviddias committed Mar 21, 2017
1 parent eb9b9f1 commit 1dd4dd2
Show file tree
Hide file tree
Showing 11 changed files with 177 additions and 156 deletions.
14 changes: 6 additions & 8 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -45,28 +45,26 @@
},
"dependencies": {
"async": "^2.1.5",
"bs58": "^4.0.0",
"buffer-loader": "0.0.1",
"cids": "~0.4.2",
"ipfs-block": "~0.5.5",
"ipfs-block": "~0.6.0",
"is-ipfs": "~0.3.0",
"multihashes": "~0.4.4",
"multihashing-async": "~0.4.4",
"protocol-buffers": "^3.2.1",
"pull-stream": "^3.5.0",
"pull-traverse": "^1.0.3",
"stable": "^0.1.6",
"bs58": "^4.0.0"
"stable": "^0.1.6"
},
"devDependencies": {
"aegir": "^11.0.0",
"buffer-loader": "0.0.1",
"chai": "^3.5.0",
"chai-checkmark": "^1.0.1",
"detect-node": "^2.0.3",
"dirty-chai": "^1.2.2",
"fs-pull-blob-store": "~0.4.1",
"idb-pull-blob-store": "~0.5.1",
"ipfs-block-service": "~0.8.3",
"ipfs-repo": "~0.11.3",
"ipfs-block-service": "~0.9.0",
"ipfs-repo": "~0.12.0",
"lodash": "^4.17.4",
"ncp": "^2.0.0",
"pre-commit": "^1.2.2",
Expand Down
101 changes: 50 additions & 51 deletions src/resolver.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
'use strict'

const waterfall = require('async/waterfall')

const util = require('./util')

exports = module.exports
Expand All @@ -10,65 +12,62 @@ exports.multicodec = 'dag-pb'
* throw if not possible. `block` is an IPFS Block instance (contains data+key)
*/
exports.resolve = (block, path, callback) => {
util.deserialize(block.data, gotNode)

function gotNode (err, node) {
if (err) {
return callback(err)
}

const split = path.split('/')

if (split[0] === 'Links') {
let remainderPath = ''
waterfall([
(cb) => util.deserialize(block.data, cb),
(node, cb) => {
const split = path.split('/')

if (split[0] === 'Links') {
let remainderPath = ''

// all links
if (!split[1]) {
return cb(null, {
value: node.links.map((l) => l.toJSON()),
remainderPath: ''
})
}

// all links
if (!split[1]) {
return callback(null, {
value: node.links.map((l) => l.toJSON()),
remainderPath: ''
// select one link

const values = {}

// populate both index number and name to enable both cases
// for the resolver
node.links.forEach((l, i) => {
const link = l.toJSON()
values[i] = {
hash: link.multihash,
name: link.name,
size: link.size
}
// TODO by enabling something to resolve through link name, we are
// applying a transformation (a view) to the data, confirm if this
// is exactly what we want
values[link.name] = link.multihash
})
}

// select one link
let value = values[split[1]]

const values = {}

// populate both index number and name to enable both cases
// for the resolver
node.links.forEach((l, i) => {
const link = l.toJSON()
values[i] = {
hash: link.multihash,
name: link.name,
size: link.size
// if remainderPath exists, value needs to be CID
if (split[2] === 'Hash') {
value = { '/': value.hash }
} else if (split[2] === 'Tsize') {
value = { '/': value.size }
} else if (split[2] === 'Name') {
value = { '/': value.name }
}
// TODO by enabling something to resolve through link name, we are
// applying a transformation (a view) to the data, confirm if this
// is exactly what we want
values[link.name] = link.multihash
})

let value = values[split[1]]

// if remainderPath exists, value needs to be CID
if (split[2] === 'Hash') {
value = { '/': value.hash }
} else if (split[2] === 'Tsize') {
value = { '/': value.size }
} else if (split[2] === 'Name') {
value = { '/': value.name }
}

remainderPath = split.slice(3).join('/')
remainderPath = split.slice(3).join('/')

callback(null, { value: value, remainderPath: remainderPath })
} else if (split[0] === 'Data') {
callback(null, { value: node.data, remainderPath: '' })
} else {
callback(new Error('path not available'))
cb(null, { value: value, remainderPath: remainderPath })
} else if (split[0] === 'Data') {
cb(null, { value: node.data, remainderPath: '' })
} else {
cb(new Error('path not available'))
}
}
}
], callback)
}

/*
Expand Down
56 changes: 20 additions & 36 deletions test/browser.js
Original file line number Diff line number Diff line change
@@ -1,58 +1,42 @@
/* eslint-env mocha */
/* global self */

'use strict'

const series = require('async/series')
const Store = require('idb-pull-blob-store')
const _ = require('lodash')
const IPFSRepo = require('ipfs-repo')
const repoContext = require.context('buffer!./test-repo', true)
const pull = require('pull-stream')

const basePath = 'ipfs' + Math.random()

const idb = self.indexedDB ||
self.mozIndexedDB ||
self.webkitIndexedDB ||
self.msIndexedDB

idb.deleteDatabase('ipfs')
idb.deleteDatabase('ipfs/blocks')
idb.deleteDatabase(basePath)
idb.deleteDatabase(basePath + '/blocks')

describe('Browser tests', () => {
const path = 'ipfs' + Math.random()
describe('Browser', () => {
const repo = new IPFSRepo(basePath)

before((done) => {
const repoData = []

repoContext.keys().forEach((key) => {
repoData.push({
key: key.replace('./', ''),
value: repoContext(key)
})
})

const mainBlob = new Store(path)
const blocksBlob = new Store(path + '/blocks')
series([
(cb) => repo.init({}, cb),
(cb) => repo.open(cb)
], done)
})

series(repoData.map((file) => (cb) => {
if (_.startsWith(file.key, 'datastore/')) {
return cb()
after((done) => {
series([
(cb) => repo.close(cb),
(cb) => {
idb.deleteDatabase(basePath)
idb.deleteDatabase(basePath + '/blocks')
cb()
}

const blocks = _.startsWith(file.key, 'blocks/')
const blob = blocks ? blocksBlob : mainBlob
const key = blocks ? file.key.replace(/^blocks\//, '') : file.key

pull(
pull(
pull.values([file.value]),
blob.write(key, cb)
)
)
}), done)
], done)
})

const repo = new IPFSRepo(path, {stores: Store})

require('./dag-node-test')(repo)
require('./dag-link-test')(repo)
})
51 changes: 17 additions & 34 deletions test/dag-node-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ const DAGNode = dagPB.DAGNode
const toDAGLink = require('../src/dag-node/util').toDAGLink
const util = dagPB.util
const series = require('async/series')
const waterfall = require('async/waterfall')
const isNode = require('detect-node')

const BlockService = require('ipfs-block-service')
const Block = require('ipfs-block')
Expand All @@ -26,6 +28,8 @@ const testBlockNamedLinks = loadFixture(__dirname, '/fixtures/test-block-named-l
const testBlockUnnamedLinks = loadFixture(__dirname, '/fixtures/test-block-unnamed-links')

module.exports = (repo) => {
const bs = new BlockService(repo)

describe('DAGNode', () => {
it('create a node', (done) => {
expect(7).checks(done)
Expand Down Expand Up @@ -131,6 +135,7 @@ module.exports = (repo) => {
})

it('create an empty node', (done) => {
// this node is not in the repo as we don't copy node data to the browser
expect(7).checks(done)
const fromGoIPFS = 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n'

Expand Down Expand Up @@ -406,51 +411,29 @@ module.exports = (repo) => {
})

it('marshal a node and store it with block-service', (done) => {
const bs = new BlockService(repo)

DAGNode.create(new Buffer('some data'), (err, node) => {
expect(err).to.not.exist()
let cid
let block

series([
(cb) => {
dagPB.util.serialize(node, (err, serialized) => {
expect(err).to.not.exist()
block = new Block(serialized)
cb()
})
},
(cb) => {
util.cid(node, (err, _cid) => {
expect(err).to.not.exist()
cid = _cid
cb()
})
waterfall([
(cb) => dagPB.util.serialize(node, cb),
(s, cb) => {
block = new Block(s, new CID(node.multihash))
bs.put(block, cb)
},
(cb) => {
bs.put({
block: block,
cid: cid
}, cb)
},
(cb) => {
bs.get(cid, (err, retrievedBlock) => {
expect(err).to.not.exist()
expect(retrievedBlock.data).to.eql(block.data)
retrievedBlock.key((err, key) => {
expect(err).to.not.exist()
expect(key).to.eql(cid.multihash)
cb()
})
})
(cb) => bs.get(block.cid, cb),
(retrievedBlock, cb) => {
expect(retrievedBlock).to.eql(block)
cb()
}
], done)
})
})

it('deserialize go-ipfs block from ipldResolver', (done) => {
const bs = new BlockService(repo)
if (!isNode) {
return done()
}

const cidStr = 'QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG'
const cid = new CID(cidStr)
Expand Down
31 changes: 31 additions & 0 deletions test/node.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/* eslint-env mocha */
'use strict'

const ncp = require('ncp').ncp
const rimraf = require('rimraf')
const IPFSRepo = require('ipfs-repo')
const series = require('async/series')
const os = require('os')

describe('Node.js', () => {
const repoExample = process.cwd() + '/test/test-repo'
const repoTests = os.tmpDir() + '/t-r-' + Date.now()
const repo = new IPFSRepo(repoTests)

before((done) => {
series([
(cb) => ncp(repoExample, repoTests, cb),
(cb) => repo.open(cb)
], done)
})

after((done) => {
series([
(cb) => repo.close(cb),
(cb) => rimraf(repoTests, cb)
], done)
})

require('./dag-link-test')(repo)
require('./dag-node-test')(repo)
})
Loading

0 comments on commit 1dd4dd2

Please sign in to comment.