Skip to content
This repository was archived by the owner on Apr 29, 2020. It is now read-only.

feat: support passing DAGNodes as content #47

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ The `import` function returns an async iterator takes a source async iterator th
```js
{
path: 'a name',
content: (Buffer or iterator emitting Buffers),
content: (Buffer, (async) iterator emitting Buffers or a DAGNode with a marshaled UnixFS entry as it's Data property),
mtime: (Number representing seconds since (positive) or before (negative) the Unix Epoch),
mode: (Number representing ugo-rwx, setuid, setguid and sticky bit)
}
Expand Down
1 change: 0 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@
"ipld-in-memory": "^3.0.0",
"it-buffer-stream": "^1.0.0",
"it-last": "^1.0.0",
"multihashes": "^0.4.14",
"nyc": "^15.0.0",
"sinon": "^8.0.4"
},
Expand Down
17 changes: 17 additions & 0 deletions src/dag-builder/dag-node.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
'use strict'

const UnixFS = require('ipfs-unixfs')
const persist = require('../utils/persist')

const dagNodeBuilder = async (path, node, ipld, options) => {
const cid = await persist(node, ipld, options)

return {
cid,
path,
unixfs: UnixFS.unmarshal(node.Data),
node
}
}

module.exports = dagNodeBuilder
30 changes: 19 additions & 11 deletions src/dag-builder/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,12 @@

const dirBuilder = require('./dir')
const fileBuilder = require('./file')
const dagNodeBuilder = require('./dag-node')
const createChunker = require('../chunker')
const validateChunks = require('./validate-chunks')
const {
DAGNode
} = require('ipld-dag-pb')

async function * dagBuilder (source, ipld, options) {
for await (const entry of source) {
Expand All @@ -19,21 +23,25 @@ async function * dagBuilder (source, ipld, options) {
}

if (entry.content) {
let source = entry.content

// wrap in iterator if it is array-like or not an iterator
if ((!source[Symbol.asyncIterator] && !source[Symbol.iterator]) || source.length !== undefined) {
source = {
[Symbol.iterator]: function * () {
yield entry.content
if (DAGNode.isDAGNode(entry.content)) {
yield () => dagNodeBuilder(entry.path, entry.content, ipld, options)
} else {
let source = entry.content

// wrap in iterator if it is array-like or not an iterator
if ((!source[Symbol.asyncIterator] && !source[Symbol.iterator]) || source.length !== undefined) {
source = {
[Symbol.iterator]: function * () {
yield entry.content
}
}
}
}

const chunker = createChunker(options.chunker, validateChunks(source), options)
const chunker = createChunker(options.chunker, validateChunks(source), options)

// item is a file
yield () => fileBuilder(entry, chunker, ipld, options)
// item is a file
yield () => fileBuilder(entry, chunker, ipld, options)
}
} else {
// item is a directory
yield () => dirBuilder(entry, ipld, options)
Expand Down
8 changes: 5 additions & 3 deletions src/utils/persist.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
'use strict'

const mh = require('multihashes')
const {
multihash
} = require('multihashing-async')
const mc = require('multicodec')

const persist = (node, ipld, options) => {
Expand All @@ -14,10 +16,10 @@ const persist = (node, ipld, options) => {
}

if (isNaN(options.hashAlg)) {
options.hashAlg = mh.names[options.hashAlg]
options.hashAlg = multihash.names[options.hashAlg]
}

if (options.hashAlg !== mh.names['sha2-256']) {
if (options.hashAlg !== multihash.names['sha2-256']) {
options.cidVersion = 1
}

Expand Down
12 changes: 7 additions & 5 deletions test/builder.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@
const chai = require('chai')
chai.use(require('dirty-chai'))
const expect = chai.expect
const mh = require('multihashes')
const {
multihash
} = require('multihashing-async')
const IPLD = require('ipld')
const inMemory = require('ipld-in-memory')
const UnixFS = require('ipfs-unixfs')
Expand All @@ -18,7 +20,7 @@ describe('builder', () => {
ipld = await inMemory(IPLD)
})

const testMultihashes = Object.keys(mh.names).slice(1, 40)
const testMultihashes = Object.keys(multihash.names).slice(1, 40)
const opts = {
strategy: 'flat',
chunker: 'fixed',
Expand Down Expand Up @@ -48,7 +50,7 @@ describe('builder', () => {
expect(imported).to.exist()

// Verify multihash has been encoded using hashAlg
expect(mh.decode(imported.cid.multihash).name).to.equal(hashAlg)
expect(multihash.decode(imported.cid.multihash).name).to.equal(hashAlg)

// Fetch using hashAlg encoded multihash
const node = await ipld.get(imported.cid)
Expand Down Expand Up @@ -77,7 +79,7 @@ describe('builder', () => {
const imported = await (await first(builder([inputFile], ipld, options)))()

expect(imported).to.exist()
expect(mh.decode(imported.cid.multihash).name).to.equal(hashAlg)
expect(multihash.decode(imported.cid.multihash).name).to.equal(hashAlg)
}
})

Expand All @@ -96,7 +98,7 @@ describe('builder', () => {

const imported = await (await first(builder([Object.assign({}, inputFile)], ipld, options)))()

expect(mh.decode(imported.cid.multihash).name).to.equal(hashAlg)
expect(multihash.decode(imported.cid.multihash).name).to.equal(hashAlg)

// Fetch using hashAlg encoded multihash
const node = await ipld.get(imported.cid)
Expand Down
25 changes: 25 additions & 0 deletions test/importer.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,13 @@ const bigFile = loadFixture((isNode ? __dirname : 'test') + '/fixtures/1.2MiB.tx
const smallFile = loadFixture((isNode ? __dirname : 'test') + '/fixtures/200Bytes.txt')
const all = require('it-all')
const first = require('it-first')
const {
DAGNode
} = require('ipld-dag-pb')
const mc = require('multicodec')
const {
multihash
} = require('multihashing-async')

function stringifyMh (files) {
return files.map((file) => {
Expand Down Expand Up @@ -922,5 +929,23 @@ strategies.forEach((strategy) => {
const node2 = await exporter(entries[1].cid, ipld)
expect(node2).to.have.nested.property('unixfs.mode', 0o0755)
})

it('supports DAGNodes as content', async () => {
const entry = new UnixFs()
const node = new DAGNode(entry.marshal())
const cid = await ipld.put(node, mc.DAG_PB, {
hashAlg: multihash.names['sha2-256'],
cidVersion: 0
})

const entries = await all(importer([{
path: '/foo',
content: node
}], ipld, {
shardSplitThreshold: 0
}))

expect(entries).to.have.nested.deep.property('[0].cid', cid)
})
})
})