From f964eee2b30c36598cb7c4b4d77367aa60583aec Mon Sep 17 00:00:00 2001 From: Volker Mische Date: Fri, 14 Dec 2018 15:17:16 +0100 Subject: [PATCH] refactor: update to new IPLD API This is part of the Awesome Endeavour: Async Iterators: https://github.com/ipfs/js-ipfs/issues/1670 --- package.json | 6 +- src/file.js | 36 ++++++------ src/resolve.js | 7 ++- test/exporter-sharded.spec.js | 34 ++++++----- test/exporter-subtree.spec.js | 10 +++- test/exporter.spec.js | 103 +++++++++++++++------------------- 6 files changed, 103 insertions(+), 93 deletions(-) diff --git a/package.json b/package.json index c4e18f8..cc5ac07 100644 --- a/package.json +++ b/package.json @@ -40,12 +40,12 @@ "chai": "^4.2.0", "detect-node": "^2.0.4", "dirty-chai": "^2.0.1", - "ipld": "~0.21.1", + "ipld": "~0.22.0", "ipld-dag-pb": "~0.15.2", "ipld-in-memory": "^2.0.0", + "multicodec": "~0.5.0", "pull-pushable": "^2.2.0", "pull-stream-to-stream": "^1.3.4", - "pull-zip": "^2.0.1", "sinon": "^7.1.0", "stream-to-pull-stream": "^1.7.2" }, @@ -54,7 +54,7 @@ "cids": "~0.5.5", "hamt-sharding": "0.0.2", "ipfs-unixfs": "~0.1.16", - "ipfs-unixfs-importer": "~0.38.0", + "ipfs-unixfs-importer": "git+https://github.com/ipfs/js-ipfs-unixfs-importer.git#new-ipld-api", "pull-cat": "^1.1.11", "pull-defer": "~0.2.3", "pull-paramap": "^1.2.2", diff --git a/src/file.js b/src/file.js index 0078a9b..9a65e06 100644 --- a/src/file.js +++ b/src/file.js @@ -150,23 +150,25 @@ function getChildren (dag, offset, end) { return pull( once(filteredLinks), - paramap((children, cb) => { - dag.getMany(children.map(child => child.link.cid), (err, results) => { - if (err) { - return cb(err) - } - - cb(null, results.map((result, index) => { - const child = children[index] - - return { - start: child.start, - end: child.end, - node: result, - size: child.size - } - })) - }) + paramap(async (children, cb) => { + const results = dag.getMany(children.map(child => child.link.cid)) + const final = [] + for ( + let index = 0, result = await results.next(); + !result.done; + index++, result = await results.next() + ) { + const child = children[index] + const node = result.value + + final.push({ + start: child.start, + end: child.end, + node: node, + size: child.size + }) + } + cb(null, final) }), flatten() ) diff --git a/src/resolve.js b/src/resolve.js index e0c10b5..b7ea742 100644 --- a/src/resolve.js +++ b/src/resolve.js @@ -42,8 +42,11 @@ function createResolver (dag, options, depth, parent) { } waterfall([ - (done) => dag.get(item.cid, done), - (node, done) => done(null, resolveItem(item.cid, node.value, item, options)) + (done) => dag.get(item.cid).then( + (node) => done(null, node), + (error) => done(error) + ), + (node, done) => done(null, resolveItem(item.cid, node, item, options)) ], cb) }), flatten(), diff --git a/test/exporter-sharded.spec.js b/test/exporter-sharded.spec.js index 57bb18b..a9fc907 100644 --- a/test/exporter-sharded.spec.js +++ b/test/exporter-sharded.spec.js @@ -20,6 +20,7 @@ const { DAGLink, DAGNode } = require('ipld-dag-pb') +const multicodec = require('multicodec') const SHARD_SPLIT_THRESHOLD = 10 @@ -96,10 +97,13 @@ describe('exporter sharded', function () { files[imported.path].cid = new CID(imported.multihash) }) - ipld.get(directory, cb) + ipld.get(directory).then( + (data) => cb(null, data), + (error) => cb(error) + ) }, - ({ value, cid }, cb) => { - const dir = UnixFS.unmarshal(value.data) + ({ data }, cb) => { + const dir = UnixFS.unmarshal(data) expect(dir.type).to.equal('hamt-sharded-directory') @@ -375,11 +379,13 @@ describe('exporter sharded', function () { ], cb) }, (node, cb) => { - ipld.put(node, { - version: 0, - format: 'dag-pb', - hashAlg: 'sha2-256' - }, cb) + ipld.put(node, multicodec.DAG_PB, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }).then( + (cid) => cb(null, cid), + (error) => cb(error) + ) }, (cid, cb) => { DAGNode.create(new UnixFS('hamt-sharded-directory').marshal(), [ @@ -387,11 +393,13 @@ describe('exporter sharded', function () { ], cb) }, (node, cb) => { - ipld.put(node, { - version: 1, - format: 'dag-pb', - hashAlg: 'sha2-256' - }, cb) + ipld.put(node, multicodec.DAG_PB, { + cidVersion: 1, + hashAlg: multicodec.SHA_256 + }).then( + (cid) => cb(null, cid), + (error) => cb(error) + ) }, (dir, cb) => { pull( diff --git a/test/exporter-subtree.spec.js b/test/exporter-subtree.spec.js index a4577ef..f9d9e54 100644 --- a/test/exporter-subtree.spec.js +++ b/test/exporter-subtree.spec.js @@ -11,6 +11,7 @@ const pull = require('pull-stream') const randomBytes = require('./helpers/random-bytes') const waterfall = require('async/waterfall') const importer = require('ipfs-unixfs-importer') +const multicodec = require('multicodec') const ONE_MEG = Math.pow(1024, 2) @@ -132,7 +133,14 @@ describe('exporter subtree', () => { ), (files, cb) => cb(null, files.pop().multihash), (buf, cb) => cb(null, new CID(buf)), - (cid, cb) => ipld.put({ a: { file: cid } }, { format: 'dag-cbor' }, cb), + (cid, cb) => { + ipld.put( + { a: { file: cid } }, multicodec.DAG_CBOR + ).then( + (cborNodeCid) => cb(null, cborNodeCid), + (error) => cb(error) + ) + }, (cborNodeCid, cb) => pull( exporter(`${cborNodeCid.toBaseEncodedString()}/a/file/level-1/200Bytes.txt`, ipld), pull.collect(cb) diff --git a/test/exporter.spec.js b/test/exporter.spec.js index 124b709..e11606c 100644 --- a/test/exporter.spec.js +++ b/test/exporter.spec.js @@ -8,7 +8,6 @@ const IPLD = require('ipld') const inMemory = require('ipld-in-memory') const UnixFS = require('ipfs-unixfs') const pull = require('pull-stream') -const zip = require('pull-zip') const CID = require('cids') const doUntil = require('async/doUntil') const waterfall = require('async/waterfall') @@ -25,6 +24,7 @@ const { } = require('ipld-dag-pb') const isNode = require('detect-node') const randomBytes = require('./helpers/random-bytes') +const multicodec = require('multicodec') const exporter = require('../src') const importer = require('ipfs-unixfs-importer') @@ -51,13 +51,11 @@ describe('exporter', () => { DAGNode.create(file.marshal(), options.links, (err, node) => { expect(err).to.not.exist() - ipld.put(node, { - version: 0, - hashAlg: 'sha2-256', - format: 'dag-pb' - }, (err, cid) => { - cb(err, { file: file, node: node, cid: cid }) - }) + ipld.put(node, multicodec.DAG_PB, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }).then((cid) => cb(null, { file: file, node: node, cid: cid })) + .catch((error) => cb(error)) }) } @@ -182,47 +180,41 @@ describe('exporter', () => { }) it('ensure hash inputs are sanitized', (done) => { - dagPut((err, result) => { + dagPut(async (err, result) => { expect(err).to.not.exist() - ipld.get(result.cid, (err, res) => { - expect(err).to.not.exist() - const unmarsh = UnixFS.unmarshal(result.node.data) + const node = await ipld.get(result.cid) + const unmarsh = UnixFS.unmarshal(node.data) - expect(unmarsh.data).to.deep.equal(result.file.data) + expect(unmarsh.data).to.deep.equal(result.file.data) - pull( - exporter(result.cid, ipld), - pull.collect(onFiles) - ) + pull( + exporter(result.cid, ipld), + pull.collect(onFiles) + ) - function onFiles (err, files) { - expect(err).to.equal(null) - expect(files).to.have.length(1) - expect(files[0]).to.have.property('cid') - expect(files[0]).to.have.property('path', result.cid.toBaseEncodedString()) - fileEql(files[0], unmarsh.data, done) - } - }) + function onFiles (err, files) { + expect(err).to.equal(null) + expect(files).to.have.length(1) + expect(files[0]).to.have.property('cid') + expect(files[0]).to.have.property('path', result.cid.toBaseEncodedString()) + fileEql(files[0], unmarsh.data, done) + } }) }) it('exports a file with no links', (done) => { - dagPut((err, result) => { + dagPut(async (err, result) => { expect(err).to.not.exist() + const node = await ipld.get(result.cid) + const unmarsh = UnixFS.unmarshal(node.data) + pull( - zip( - pull( - ipld.getStream(result.cid), - pull.map((res) => UnixFS.unmarshal(res.value.data)) - ), - exporter(result.cid, ipld) - ), + exporter(result.cid, ipld), pull.collect((err, values) => { expect(err).to.not.exist() - const unmarsh = values[0][0] - const file = values[0][1] + const file = values[0] fileEql(file, unmarsh.data, done) }) @@ -292,25 +284,20 @@ describe('exporter', () => { dagPut({ content: randomBytes(100) - }, (err, result) => { + }, async (err, result) => { expect(err).to.not.exist() + const node = await ipld.get(result.cid) + const unmarsh = UnixFS.unmarshal(node.data) + pull( - zip( - pull( - ipld.getStream(result.cid), - pull.map((res) => UnixFS.unmarshal(res.value.data)) - ), - exporter(result.cid, ipld, { - offset, - length - }) - ), + exporter(result.cid, ipld, { + offset, + length + }), pull.collect((err, values) => { expect(err).to.not.exist() - - const unmarsh = values[0][0] - const file = values[0][1] + const file = values[0] fileEql(file, unmarsh.data.slice(offset, offset + length), done) }) @@ -1153,13 +1140,15 @@ function createAndPersistNode (ipld, type, data, children, callback) { return callback(error) } - ipld.put(node, { - version: 1, - hashAlg: 'sha2-256', - format: 'dag-pb' - }, (error, cid) => callback(error, { - node, - cid - })) + ipld.put(node, multicodec.DAG_PB, { + cidVersion: 1, + hashAlg: multicodec.SHA2_256 + }).then( + (cid) => callback(null, { + cid, + node + }), + (error) => callback(error) + ) }) }