Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit 211dfb6

Browse files
authored
feat: update to latest ipld-resolver (#137)
* feat: update to latest ipld-resolver
1 parent 1f0d760 commit 211dfb6

File tree

9 files changed

+42
-45
lines changed

9 files changed

+42
-45
lines changed

package.json

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939
},
4040
"homepage": "https://github.com/ipfs/js-ipfs-unixfs-engine#readme",
4141
"devDependencies": {
42-
"aegir": "^9.4.0",
42+
"aegir": "^10.0.0",
4343
"buffer-loader": "0.0.1",
4444
"chai": "^3.5.0",
4545
"fs-pull-blob-store": "^0.4.1",
@@ -58,7 +58,7 @@
5858
"deep-extend": "^0.4.1",
5959
"ipfs-unixfs": "^0.1.9",
6060
"ipld-dag-pb": "^0.9.4",
61-
"ipld-resolver": "^0.6.0",
61+
"ipld-resolver": "^0.8.0",
6262
"is-ipfs": "^0.3.0",
6363
"lodash": "^4.17.4",
6464
"multihashes": "^0.3.2",
@@ -85,4 +85,4 @@
8585
"jbenet <[email protected]>",
8686
"nginnever <[email protected]>"
8787
]
88-
}
88+
}

src/builder/builder.js

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -62,8 +62,7 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options)
6262
waterfall([
6363
(cb) => DAGNode.create(d.marshal(), cb),
6464
(node, cb) => {
65-
ipldResolver.put({
66-
node: node,
65+
ipldResolver.put(node, {
6766
cid: new CID(node.multihash)
6867
}, (err) => cb(err, node))
6968
}
@@ -104,12 +103,9 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options)
104103
})
105104
}),
106105
pull.asyncMap((leaf, callback) => {
107-
ipldResolver.put(
108-
{
109-
node: leaf.DAGNode,
110-
cid: new CID(leaf.DAGNode.multihash)
111-
},
112-
err => callback(err, leaf)
106+
ipldResolver.put(leaf.DAGNode, {
107+
cid: new CID(leaf.DAGNode.multihash)
108+
}, (err) => callback(err, leaf)
113109
)
114110
}),
115111
pull.map((leaf) => {

src/builder/reduce.js

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,7 @@ module.exports = function (file, ipldResolver, options) {
3434
waterfall([
3535
(cb) => DAGNode.create(f.marshal(), links, cb),
3636
(node, cb) => {
37-
ipldResolver.put({
38-
node: node,
37+
ipldResolver.put(node, {
3938
cid: new CID(node.multihash)
4039
}, (err) => cb(err, node))
4140
}

src/exporter/dir.js

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ function dirExporter (node, name, ipldResolver) {
3030
path: path.join(name, link.name),
3131
hash: link.multihash
3232
})),
33-
paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => {
33+
paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, result) => {
3434
if (err) {
3535
return cb(err)
3636
}
@@ -40,10 +40,12 @@ function dirExporter (node, name, ipldResolver) {
4040
size: item.size
4141
}
4242

43+
const node = result.value
44+
4345
cb(null, switchType(
44-
n,
45-
() => cat([pull.values([dir]), dirExporter(n, item.path, ipldResolver)]),
46-
() => fileExporter(n, item.path, ipldResolver)
46+
node,
47+
() => cat([pull.values([dir]), dirExporter(node, item.path, ipldResolver)]),
48+
() => fileExporter(node, item.path, ipldResolver)
4749
))
4850
})),
4951
pull.flatten()

src/exporter/file.js

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,8 @@ module.exports = (node, name, ipldResolver) => {
2020
function visitor (node) {
2121
return pull(
2222
pull.values(node.links),
23-
paramap((link, cb) => ipldResolver.get(new CID(link.multihash), cb))
23+
paramap((link, cb) => ipldResolver.get(new CID(link.multihash), cb)),
24+
pull.map((result) => result.value)
2425
)
2526
}
2627

src/exporter/index.js

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ module.exports = (hash, ipldResolver, options) => {
3434
}
3535
return pull(
3636
ipldResolver.getStream(new CID(item.hash)),
37+
pull.map((result) => result.value),
3738
pull.map((node) => switchType(
3839
node,
3940
() => dirExporter(node, item.path, ipldResolver),
@@ -46,6 +47,7 @@ module.exports = (hash, ipldResolver, options) => {
4647
// Traverse the DAG
4748
return pull(
4849
ipldResolver.getStream(new CID(hash)),
50+
pull.map((result) => result.value),
4951
pull.map((node) => switchType(
5052
node,
5153
() => traverse.widthFirst({path: hash, hash}, visitor),

src/importer/flush-tree.js

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -135,8 +135,7 @@ function traverse (tree, sizeIndex, path, ipldResolver, source, done) {
135135
(node, cb) => {
136136
sizeIndex[mh.toB58String(node.multihash)] = node.size
137137

138-
ipldResolver.put({
139-
node: node,
138+
ipldResolver.put(node, {
140139
cid: new CID(node.multihash)
141140
}, (err) => cb(err, node))
142141
}

test/browser.js

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,16 @@
11
/* eslint-env mocha */
2+
/* global self */
23
'use strict'
34

45
const Store = require('idb-pull-blob-store')
56
const IPFSRepo = require('ipfs-repo')
67
const repoContext = require.context('buffer!./repo-example', true)
78
const pull = require('pull-stream')
89

9-
const idb = window.indexedDB ||
10-
window.mozIndexedDB ||
11-
window.webkitIndexedDB ||
12-
window.msIndexedDB
10+
const idb = self.indexedDB ||
11+
self.mozIndexedDB ||
12+
self.webkitIndexedDB ||
13+
self.msIndexedDB
1314

1415
idb.deleteDatabase('ipfs')
1516
idb.deleteDatabase('ipfs/blocks')

test/test-exporter.js

Lines changed: 18 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -28,29 +28,26 @@ module.exports = (repo) => {
2828
it('ensure hash inputs are sanitized', (done) => {
2929
const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8'
3030
const mhBuf = new Buffer(bs58.decode(hash))
31+
const cid = new CID(hash)
3132

32-
pull(
33-
ipldResolver.getStream(new CID(hash)),
34-
pull.map((node) => UnixFS.unmarshal(node.data)),
35-
pull.collect((err, nodes) => {
36-
expect(err).to.not.exist
37-
38-
const unmarsh = nodes[0]
33+
ipldResolver.get(cid, (err, result) => {
34+
expect(err).to.not.exist
35+
const node = result.value
36+
const unmarsh = UnixFS.unmarshal(node.data)
3937

40-
pull(
41-
exporter(mhBuf, ipldResolver),
42-
pull.collect(onFiles)
43-
)
38+
pull(
39+
exporter(mhBuf, ipldResolver),
40+
pull.collect(onFiles)
41+
)
4442

45-
function onFiles (err, files) {
46-
expect(err).to.not.exist
47-
expect(files).to.have.length(1)
48-
expect(files[0]).to.have.property('path', hash)
43+
function onFiles (err, files) {
44+
expect(err).to.not.exist
45+
expect(files).to.have.length(1)
46+
expect(files[0]).to.have.property('path', hash)
4947

50-
fileEql(files[0], unmarsh.data, done)
51-
}
52-
})
53-
)
48+
fileEql(files[0], unmarsh.data, done)
49+
}
50+
})
5451
})
5552

5653
it('export a file with no links', (done) => {
@@ -59,7 +56,7 @@ module.exports = (repo) => {
5956
pull(
6057
zip(
6158
pull(
62-
ipldResolver.getStream(new CID(hash)),
59+
ipldResolver._getStream(new CID(hash)),
6360
pull.map((node) => UnixFS.unmarshal(node.data))
6461
),
6562
exporter(hash, ipldResolver)
@@ -176,7 +173,7 @@ function fileEql (f1, f2, done) {
176173

177174
try {
178175
if (f2) {
179-
expect(Buffer.concat(data)).to.be.eql(f2)
176+
expect(Buffer.concat(data)).to.eql(f2)
180177
} else {
181178
expect(data).to.exist
182179
}

0 commit comments

Comments
 (0)