From 8273e41327fe3b9dba7d7c5bc455d5779d236132 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Sat, 11 Jan 2020 19:12:57 +0100 Subject: [PATCH] feat: support passing DAGNodes as content Sometimes you just want to do some DAG manipulations, you don't necessarily want to chunk and create files. This PR allows you to pass `DAGNode`s as `.content` for entries being imported. Also removes the `multihashes` dep in favour of the one exported by `multihashing-async`. --- README.md | 2 +- package.json | 1 - src/dag-builder/dag-node.js | 17 +++++++++++++++++ src/dag-builder/index.js | 30 +++++++++++++++++++----------- src/utils/persist.js | 8 +++++--- test/builder.spec.js | 12 +++++++----- test/importer.spec.js | 25 +++++++++++++++++++++++++ 7 files changed, 74 insertions(+), 21 deletions(-) create mode 100644 src/dag-builder/dag-node.js diff --git a/README.md b/README.md index 674f3e4..8964f1b 100644 --- a/README.md +++ b/README.md @@ -105,7 +105,7 @@ The `import` function returns an async iterator takes a source async iterator th ```js { path: 'a name', - content: (Buffer or iterator emitting Buffers), + content: (Buffer, (async) iterator emitting Buffers or a DAGNode with a marshaled UnixFS entry as it's Data property), mtime: (Number representing seconds since (positive) or before (negative) the Unix Epoch), mode: (Number representing ugo-rwx, setuid, setguid and sticky bit) } diff --git a/package.json b/package.json index 0f46e42..e5d5c90 100644 --- a/package.json +++ b/package.json @@ -48,7 +48,6 @@ "ipld-in-memory": "^3.0.0", "it-buffer-stream": "^1.0.0", "it-last": "^1.0.0", - "multihashes": "^0.4.14", "nyc": "^15.0.0", "sinon": "^8.0.4" }, diff --git a/src/dag-builder/dag-node.js b/src/dag-builder/dag-node.js new file mode 100644 index 0000000..e4c8d8d --- /dev/null +++ b/src/dag-builder/dag-node.js @@ -0,0 +1,17 @@ +'use strict' + +const UnixFS = require('ipfs-unixfs') +const persist = require('../utils/persist') + +const dagNodeBuilder = async (path, node, ipld, options) => { + const cid = await persist(node, ipld, options) + + return { + cid, + path, + unixfs: UnixFS.unmarshal(node.Data), + node + } +} + +module.exports = dagNodeBuilder diff --git a/src/dag-builder/index.js b/src/dag-builder/index.js index bc2f4b2..a43d137 100644 --- a/src/dag-builder/index.js +++ b/src/dag-builder/index.js @@ -2,8 +2,12 @@ const dirBuilder = require('./dir') const fileBuilder = require('./file') +const dagNodeBuilder = require('./dag-node') const createChunker = require('../chunker') const validateChunks = require('./validate-chunks') +const { + DAGNode +} = require('ipld-dag-pb') async function * dagBuilder (source, ipld, options) { for await (const entry of source) { @@ -19,21 +23,25 @@ async function * dagBuilder (source, ipld, options) { } if (entry.content) { - let source = entry.content - - // wrap in iterator if it is array-like or not an iterator - if ((!source[Symbol.asyncIterator] && !source[Symbol.iterator]) || source.length !== undefined) { - source = { - [Symbol.iterator]: function * () { - yield entry.content + if (DAGNode.isDAGNode(entry.content)) { + yield () => dagNodeBuilder(entry.path, entry.content, ipld, options) + } else { + let source = entry.content + + // wrap in iterator if it is array-like or not an iterator + if ((!source[Symbol.asyncIterator] && !source[Symbol.iterator]) || source.length !== undefined) { + source = { + [Symbol.iterator]: function * () { + yield entry.content + } } } - } - const chunker = createChunker(options.chunker, validateChunks(source), options) + const chunker = createChunker(options.chunker, validateChunks(source), options) - // item is a file - yield () => fileBuilder(entry, chunker, ipld, options) + // item is a file + yield () => fileBuilder(entry, chunker, ipld, options) + } } else { // item is a directory yield () => dirBuilder(entry, ipld, options) diff --git a/src/utils/persist.js b/src/utils/persist.js index 69e5af7..8d422ff 100644 --- a/src/utils/persist.js +++ b/src/utils/persist.js @@ -1,6 +1,8 @@ 'use strict' -const mh = require('multihashes') +const { + multihash +} = require('multihashing-async') const mc = require('multicodec') const persist = (node, ipld, options) => { @@ -14,10 +16,10 @@ const persist = (node, ipld, options) => { } if (isNaN(options.hashAlg)) { - options.hashAlg = mh.names[options.hashAlg] + options.hashAlg = multihash.names[options.hashAlg] } - if (options.hashAlg !== mh.names['sha2-256']) { + if (options.hashAlg !== multihash.names['sha2-256']) { options.cidVersion = 1 } diff --git a/test/builder.spec.js b/test/builder.spec.js index bbeb9d0..e84eca2 100644 --- a/test/builder.spec.js +++ b/test/builder.spec.js @@ -4,7 +4,9 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const mh = require('multihashes') +const { + multihash +} = require('multihashing-async') const IPLD = require('ipld') const inMemory = require('ipld-in-memory') const UnixFS = require('ipfs-unixfs') @@ -18,7 +20,7 @@ describe('builder', () => { ipld = await inMemory(IPLD) }) - const testMultihashes = Object.keys(mh.names).slice(1, 40) + const testMultihashes = Object.keys(multihash.names).slice(1, 40) const opts = { strategy: 'flat', chunker: 'fixed', @@ -48,7 +50,7 @@ describe('builder', () => { expect(imported).to.exist() // Verify multihash has been encoded using hashAlg - expect(mh.decode(imported.cid.multihash).name).to.equal(hashAlg) + expect(multihash.decode(imported.cid.multihash).name).to.equal(hashAlg) // Fetch using hashAlg encoded multihash const node = await ipld.get(imported.cid) @@ -77,7 +79,7 @@ describe('builder', () => { const imported = await (await first(builder([inputFile], ipld, options)))() expect(imported).to.exist() - expect(mh.decode(imported.cid.multihash).name).to.equal(hashAlg) + expect(multihash.decode(imported.cid.multihash).name).to.equal(hashAlg) } }) @@ -96,7 +98,7 @@ describe('builder', () => { const imported = await (await first(builder([Object.assign({}, inputFile)], ipld, options)))() - expect(mh.decode(imported.cid.multihash).name).to.equal(hashAlg) + expect(multihash.decode(imported.cid.multihash).name).to.equal(hashAlg) // Fetch using hashAlg encoded multihash const node = await ipld.get(imported.cid) diff --git a/test/importer.spec.js b/test/importer.spec.js index 50bef67..7bbb1b2 100644 --- a/test/importer.spec.js +++ b/test/importer.spec.js @@ -19,6 +19,13 @@ const bigFile = loadFixture((isNode ? __dirname : 'test') + '/fixtures/1.2MiB.tx const smallFile = loadFixture((isNode ? __dirname : 'test') + '/fixtures/200Bytes.txt') const all = require('it-all') const first = require('it-first') +const { + DAGNode +} = require('ipld-dag-pb') +const mc = require('multicodec') +const { + multihash +} = require('multihashing-async') function stringifyMh (files) { return files.map((file) => { @@ -922,5 +929,23 @@ strategies.forEach((strategy) => { const node2 = await exporter(entries[1].cid, ipld) expect(node2).to.have.nested.property('unixfs.mode', 0o0755) }) + + it('supports DAGNodes as content', async () => { + const entry = new UnixFs() + const node = new DAGNode(entry.marshal()) + const cid = await ipld.put(node, mc.DAG_PB, { + hashAlg: multihash.names['sha2-256'], + cidVersion: 0 + }) + + const entries = await all(importer([{ + path: '/foo', + content: node + }], ipld, { + shardSplitThreshold: 0 + })) + + expect(entries).to.have.nested.deep.property('[0].cid', cid) + }) }) })