diff --git a/README.md b/README.md index fbf69031..23147a02 100644 --- a/README.md +++ b/README.md @@ -150,6 +150,20 @@ been written into the [DAG Service][]'s storage mechanism. The input's file paths and directory structure will be preserved in the DAG Nodes. +### Importer options + +In the second argument of the importer constructor you can specify the following options: + +* `chunker` (string, defaults to `"fixed"`): the chunking strategy. Now only supports `"fixed"` +* `chunkerOptions` (object, optional): the options for the chunker. Defaults to an object with the following properties: + * `maxChunkSize` (positive integer, defaults to `262144`): the maximum chunk size for the `fixed` chunker. +* `strategy` (string, defaults to `"balanced"`): the DAG builder strategy name. Supports: + * `flat`: flat list of chunks + * `balanced`: builds a balanced tree + * `trickle`: builds [a trickle tree](https://github.com/ipfs/specs/pull/57#issuecomment-265205384) +* `maxChildrenPerNode` (positive integer, defaults to `174`): the maximum children per node for the `balanced` and `trickle` DAG builder strategies +* `layerRepeat` (positive integer, defaults to 4): (only applicable to the `trickle` DAG builder strategy). The maximum repetition of parent nodes for each layer of the tree. +* `reduceSingleLeafToSelf` (boolean, defaults to `false`): optimization for, when reducing a set of nodes with one node, reduce it to that node. ### Example Exporter diff --git a/package.json b/package.json index 0b30d687..85657d3f 100644 --- a/package.json +++ b/package.json @@ -48,19 +48,24 @@ "ipfs-repo": "^0.11.2", "ncp": "^2.0.0", "pre-commit": "^1.2.2", + "pull-generate": "^2.2.0", "pull-zip": "^2.0.1", "rimraf": "^2.5.4" }, "dependencies": { "async": "^2.1.4", "cids": "^0.3.5", + "deep-extend": "^0.4.1", "ipfs-unixfs": "^0.1.9", "ipld-dag-pb": "^0.9.3", "ipld-resolver": "^0.4.1", "is-ipfs": "^0.2.1", "multihashes": "^0.3.1", + "pull-batch": "^1.0.0", "pull-block": "^1.0.2", + "pull-pair": "^1.1.0", "pull-paramap": "^1.2.1", + "pull-pause": "0.0.0", "pull-pushable": "^2.0.1", "pull-stream": "^3.5.0", "pull-traverse": "^1.0.3", @@ -76,4 +81,4 @@ "jbenet ", "nginnever " ] -} \ No newline at end of file +} diff --git a/src/builder/balanced/balanced-reducer.js b/src/builder/balanced/balanced-reducer.js new file mode 100644 index 00000000..12291a43 --- /dev/null +++ b/src/builder/balanced/balanced-reducer.js @@ -0,0 +1,53 @@ +'use strict' + +const assert = require('assert') +const pull = require('pull-stream') +const pushable = require('pull-pushable') +const pullPair = require('pull-pair') +const batch = require('pull-batch') + +module.exports = function balancedReduceToRoot (reduce, options) { + const pair = pullPair() + const source = pair.source + + const result = pushable() + + reduceToParents(source, (err, roots) => { + if (err) { + result.end(err) + return // early + } + assert.equal(roots.length, 1, 'need one root') + result.push(roots[0]) + result.end() + }) + + function reduceToParents (_chunks, callback) { + let chunks = _chunks + if (Array.isArray(chunks)) { + chunks = pull.values(chunks) + } + + pull( + chunks, + batch(options.maxChildrenPerNode), + pull.asyncMap(reduce), + pull.collect(reduced) + ) + + function reduced (err, roots) { + if (err) { + callback(err) + } else if (roots.length > 1) { + reduceToParents(roots, callback) + } else { + callback(null, roots) + } + } + } + + return { + sink: pair.sink, + source: result + } +} diff --git a/src/builder/balanced/index.js b/src/builder/balanced/index.js new file mode 100644 index 00000000..cc43d8ba --- /dev/null +++ b/src/builder/balanced/index.js @@ -0,0 +1,12 @@ +'use strict' + +const balancedReducer = require('./balanced-reducer') + +const defaultOptions = { + maxChildrenPerNode: 174 +} + +module.exports = function (reduce, _options) { + const options = Object.assign({}, defaultOptions, _options) + return balancedReducer(reduce, options) +} diff --git a/src/builder/builder.js b/src/builder/builder.js new file mode 100644 index 00000000..d85025ab --- /dev/null +++ b/src/builder/builder.js @@ -0,0 +1,150 @@ +'use strict' + +const extend = require('deep-extend') +const assert = require('assert') +const UnixFS = require('ipfs-unixfs') +const pull = require('pull-stream') +const through = require('pull-through') +const parallel = require('async/parallel') +const waterfall = require('async/waterfall') +const dagPB = require('ipld-dag-pb') +const CID = require('cids') + +const reduce = require('./reduce') + +const DAGNode = dagPB.DAGNode + +const defaultOptions = { + chunkerOptions: { + maxChunkSize: 262144 + } +} + +module.exports = function (createChunker, ipldResolver, createReducer, _options) { + const options = extend({}, defaultOptions, _options) + + return function (source, files) { + return function (items, cb) { + parallel(items.map((item) => (cb) => { + if (!item.content) { + // item is a directory + return createAndStoreDir(item, (err, node) => { + if (err) { + return cb(err) + } + source.push(node) + files.push(node) + cb() + }) + } + + // item is a file + createAndStoreFile(item, (err, node) => { + if (err) { + return cb(err) + } + source.push(node) + files.push(node) + cb() + }) + }), cb) + } + } + + function createAndStoreDir (item, callback) { + // 1. create the empty dir dag node + // 2. write it to the dag store + + const d = new UnixFS('directory') + waterfall([ + (cb) => DAGNode.create(d.marshal(), cb), + (node, cb) => { + ipldResolver.put({ + node: node, + cid: new CID(node.multihash) + }, (err) => cb(err, node)) + } + ], (err, node) => { + if (err) { + return callback(err) + } + callback(null, { + path: item.path, + multihash: node.multihash, + size: node.size + }) + }) + } + + function createAndStoreFile (file, callback) { + if (Buffer.isBuffer(file.content)) { + file.content = pull.values([file.content]) + } + + if (typeof file.content !== 'function') { + return callback(new Error('invalid content')) + } + + const reducer = createReducer(reduce(file, ipldResolver, options), options) + + let previous + let count = 0 + + pull( + file.content, + createChunker(options.chunkerOptions), + pull.map(chunk => new Buffer(chunk)), + pull.map(buffer => new UnixFS('file', buffer)), + pull.asyncMap((fileNode, callback) => { + DAGNode.create(fileNode.marshal(), (err, node) => { + callback(err, { DAGNode: node, fileNode: fileNode }) + }) + }), + pull.asyncMap((leaf, callback) => { + ipldResolver.put( + { + node: leaf.DAGNode, + cid: new CID(leaf.DAGNode.multihash) + }, + err => callback(err, leaf) + ) + }), + pull.map((leaf) => { + return { + path: file.path, + multihash: leaf.DAGNode.multihash, + size: leaf.DAGNode.size, + leafSize: leaf.fileNode.fileSize(), + name: '' + } + }), + through( // mark as single node if only one single node + function onData (data) { + count++ + if (previous) { + this.queue(previous) + } + previous = data + }, + function ended () { + if (previous) { + if (count === 1) { + previous.single = true + } + this.queue(previous) + } + this.queue(null) + } + ), + reducer, + pull.collect((err, roots) => { + if (err) { + callback(err) + } else { + assert.equal(roots.length, 1, 'should result in exactly one root') + callback(null, roots[0]) + } + }) + ) + } +} diff --git a/src/builder/create-build-stream.js b/src/builder/create-build-stream.js new file mode 100644 index 00000000..b9e696db --- /dev/null +++ b/src/builder/create-build-stream.js @@ -0,0 +1,29 @@ +'use strict' + +const pullPushable = require('pull-pushable') +const pullWrite = require('pull-write') + +module.exports = function createBuildStream (createStrategy, ipldResolver, flushTree, options) { + const files = [] + + const source = pullPushable() + + const sink = pullWrite( + createStrategy(source, files), + null, + options.highWaterMark, + (err) => { + if (err) { + source.end(err) + return // early + } + + flushTree(files, ipldResolver, source, source.end) + } + ) + + return { + source: source, + sink: sink + } +} diff --git a/src/builder/flat/index.js b/src/builder/flat/index.js new file mode 100644 index 00000000..7a4e9cbb --- /dev/null +++ b/src/builder/flat/index.js @@ -0,0 +1,33 @@ +'use strict' + +const assert = require('assert') +const pull = require('pull-stream') +const pushable = require('pull-pushable') +const pullPair = require('pull-pair') +const batch = require('pull-batch') + +module.exports = function (reduce, options) { + const pair = pullPair() + const source = pair.source + const result = pushable() + + pull( + source, + batch(Infinity), + pull.asyncMap(reduce), + pull.collect((err, roots) => { + if (err) { + result.end(err) + return // early + } + assert.equal(roots.length, 1, 'need one root') + result.push(roots[0]) + result.end() + }) + ) + + return { + sink: pair.sink, + source: result + } +} diff --git a/src/builder/index.js b/src/builder/index.js new file mode 100644 index 00000000..0a769979 --- /dev/null +++ b/src/builder/index.js @@ -0,0 +1,33 @@ +'use strict' + +const assert = require('assert') +const createBuildStream = require('./create-build-stream') +const Builder = require('./builder') + +const reducers = { + flat: require('./flat'), + balanced: require('./balanced'), + trickle: require('./trickle') +} + +const defaultOptions = { + strategy: 'balanced', + highWaterMark: 100, + reduceSingleLeafToSelf: false +} + +module.exports = function (Chunker, ipldResolver, flushTree, _options) { + assert(Chunker, 'Missing chunker creator function') + assert(ipldResolver, 'Missing IPLD Resolver') + assert(flushTree, 'Missing flushTree argument') + + const options = Object.assign({}, defaultOptions, _options) + + const strategyName = options.strategy + const reducer = reducers[strategyName] + assert(reducer, 'Unknown importer build strategy name: ' + strategyName) + + const createStrategy = Builder(Chunker, ipldResolver, reducer, options) + + return createBuildStream(createStrategy, ipldResolver, flushTree, options) +} diff --git a/src/builder/reduce.js b/src/builder/reduce.js new file mode 100644 index 00000000..868fa15f --- /dev/null +++ b/src/builder/reduce.js @@ -0,0 +1,59 @@ +'use strict' + +const waterfall = require('async/waterfall') +const dagPB = require('ipld-dag-pb') +const UnixFS = require('ipfs-unixfs') +const CID = require('cids') + +const DAGLink = dagPB.DAGLink +const DAGNode = dagPB.DAGNode + +module.exports = function (file, ipldResolver, options) { + return function (leaves, callback) { + if (leaves.length === 1 && (leaves[0].single || options.reduceSingleLeafToSelf)) { + const leave = leaves[0] + callback(null, { + path: file.path, + multihash: leave.multihash, + size: leave.size, + leafSize: leave.leafSize, + name: leave.name + }) + return // early + } + + // create a parent node and add all the leafs + const f = new UnixFS('file') + + const links = leaves.map((leaf) => { + f.addBlockSize(leaf.leafSize) + + return new DAGLink(leaf.name, leaf.size, leaf.multihash) + }) + + waterfall([ + (cb) => DAGNode.create(f.marshal(), links, cb), + (node, cb) => { + ipldResolver.put({ + node: node, + cid: new CID(node.multihash) + }, (err) => cb(err, node)) + } + ], (err, node) => { + if (err) { + callback(err) + return // early + } + + const root = { + name: '', + path: file.path, + multihash: node.multihash, + size: node.size, + leafSize: f.fileSize() + } + + callback(null, root) + }) + } +} diff --git a/src/builder/trickle/index.js b/src/builder/trickle/index.js new file mode 100644 index 00000000..12931481 --- /dev/null +++ b/src/builder/trickle/index.js @@ -0,0 +1,13 @@ +'use strict' + +const trickleReducer = require('./trickle-reducer') + +const defaultOptions = { + maxChildrenPerNode: 174, + layerRepeat: 4 +} + +module.exports = function (reduce, _options) { + const options = Object.assign({}, defaultOptions, _options) + return trickleReducer(reduce, options) +} diff --git a/src/builder/trickle/trickle-reducer.js b/src/builder/trickle/trickle-reducer.js new file mode 100644 index 00000000..92f9456f --- /dev/null +++ b/src/builder/trickle/trickle-reducer.js @@ -0,0 +1,141 @@ +'use strict' + +const assert = require('assert') +const pull = require('pull-stream') +const pushable = require('pull-pushable') +const batch = require('pull-batch') +const pullPair = require('pull-pair') +const through = require('pull-through') +const pullWrite = require('pull-write') +const pause = require('pull-pause') + +module.exports = function trickleReduceToRoot (reduce, options) { + const pair = pullPair() + const result = pushable() + const pausable = pause(() => {}) + let pendingResumes = 0 + + pull( + pair.source, + pausable, + trickle(0, -1), + batch(Infinity), + pull.asyncMap(reduce), + pull.collect((err, nodes) => { + if (err) { + result.end(err) + } else { + assert.equal(nodes.length, 1, 'need one root') + result.push(nodes[0]) + result.end() + } + }) + ) + + return { + sink: pair.sink, + source: result + } + + function trickle (indent, maxDepth) { + let iteration = 0 + let depth = 0 + let deeper + let aborting = false + + const result = pushable() + + return { + source: result, + sink: pullWrite(write, null, 1, end) + } + + function write (nodes, callback) { + let ended = false + const node = nodes[0] + + if (depth && !deeper) { + deeper = pushable() + + pull( + deeper, + trickle(indent + 1, depth - 1), + through( + function (d) { + this.queue(d) + }, + function (err) { + if (err) { + this.emit('error', err) + return // early + } + if (!ended) { + ended = true + pendingResumes++ + pausable.pause() + } + this.queue(null) + } + ), + batch(Infinity), + pull.asyncMap(reduce), + pull.collect((err, nodes) => { + pendingResumes-- + if (err) { + result.end(err) + return + } + nodes.forEach(node => { + result.push(node) + }) + iterate() + }) + ) + } + + if (deeper) { + deeper.push(node) + } else { + result.push(node) + iterate() + } + + callback() + } + + function iterate () { + deeper = null + iteration++ + if (depth === 0 && iteration === options.maxChildrenPerNode || + depth > 0 && iteration === options.layerRepeat) { + iteration = 0 + depth++ + } + + if (!aborting && maxDepth >= 0 && depth > maxDepth || + aborting && !pendingResumes) { + aborting = true + result.end() + } + + if (!pendingResumes) { + pausable.resume() + } + } + + function end (err) { + if (err) { + result.end(err) + return + } + if (deeper) { + if (!aborting) { + aborting = true + deeper.end() + } + } else { + result.end() + } + } + } +} diff --git a/src/chunker/fixed-size.js b/src/chunker/fixed-size.js index 0533a389..d737a8fb 100644 --- a/src/chunker/fixed-size.js +++ b/src/chunker/fixed-size.js @@ -2,6 +2,7 @@ const pullBlock = require('pull-block') -module.exports = (size) => { - return pullBlock(size, { zeroPadding: false }) +module.exports = (options) => { + let maxSize = (typeof options === 'number') ? options : options.maxChunkSize + return pullBlock(maxSize, { zeroPadding: false }) } diff --git a/src/importer/flush-tree.js b/src/importer/flush-tree.js index 4d3e89a7..f465bc4a 100644 --- a/src/importer/flush-tree.js +++ b/src/importer/flush-tree.js @@ -13,6 +13,11 @@ module.exports = (files, ipldResolver, source, callback) => { // 1) convert files to a tree const fileTree = createTree(files) + if (Object.keys(fileTree).length > 1) { + callback(new Error('detected more than one root')) + return + } + if (Object.keys(fileTree).length === 0) { return callback()// no dirs to be created } diff --git a/src/importer/index.js b/src/importer/index.js index 7a3c5631..03c07e63 100644 --- a/src/importer/index.js +++ b/src/importer/index.js @@ -1,188 +1,20 @@ 'use strict' -const UnixFS = require('ipfs-unixfs') const assert = require('assert') -const pull = require('pull-stream') -const pullPushable = require('pull-pushable') -const pullWrite = require('pull-write') -const parallel = require('async/parallel') -const dagPB = require('ipld-dag-pb') -const CID = require('cids') -const waterfall = require('async/waterfall') - -const fsc = require('./../chunker/fixed-size') const createAndStoreTree = require('./flush-tree') +const Builder = require('../builder') -const DAGNode = dagPB.DAGNode -const DAGLink = dagPB.DAGLink - -const CHUNK_SIZE = 262144 - -module.exports = (ipldResolver, options) => { - assert(ipldResolver, 'Missing IPLD Resolver') - - const files = [] - - const source = pullPushable() - - const sink = pullWrite( - makeWriter(source, files, ipldResolver), - null, - 100, - (err) => { - if (err) { - return source.end(err) - } - - createAndStoreTree(files, ipldResolver, source, () => { - source.end() - }) - } - ) - - return { - source: source, - sink: sink - } -} - -function makeWriter (source, files, ipldResolver) { - return (items, cb) => { - parallel(items.map((item) => (cb) => { - if (!item.content) { - return createAndStoreDir(item, ipldResolver, (err, node) => { - if (err) { - return cb(err) - } - source.push(node) - files.push(node) - cb() - }) - } - - createAndStoreFile(item, ipldResolver, (err, node) => { - if (err) { - return cb(err) - } - source.push(node) - files.push(node) - cb() - }) - }), cb) - } +const chunkers = { + fixed: require('../chunker/fixed-size') } -function createAndStoreDir (item, ipldResolver, callback) { - // 1. create the empty dir dag node - // 2. write it to the dag store - - const d = new UnixFS('directory') - waterfall([ - (cb) => DAGNode.create(d.marshal(), cb), - (node, cb) => { - ipldResolver.put({ - node: node, - cid: new CID(node.multihash) - }, (err) => cb(err, node)) - } - ], (err, node) => { - if (err) { - return callback(err) - } - callback(null, { - path: item.path, - multihash: node.multihash, - size: node.size - }) - }) +const defaultOptions = { + chunker: 'fixed' } -function createAndStoreFile (file, ipldResolver, callback) { - if (Buffer.isBuffer(file.content)) { - file.content = pull.values([file.content]) - } - - if (typeof file.content !== 'function') { - return callback(new Error('invalid content')) - } - - // 1. create the unixfs merkledag node - // 2. add its hash and size to the leafs array - - // TODO - Support really large files - // a) check if we already reach max chunks if yes - // a.1) create a parent node for all of the current leaves - // b.2) clean up the leaves array and add just the parent node - - pull( - file.content, - fsc(CHUNK_SIZE), - pull.asyncMap((chunk, cb) => { - const l = new UnixFS('file', new Buffer(chunk)) - - DAGNode.create(l.marshal(), (err, node) => { - if (err) { - return cb(err) - } - - ipldResolver.put({ - node: node, - cid: new CID(node.multihash) - }, (err) => { - if (err) { - return cb(err) - } - - cb(null, { - Hash: node.multihash, - Size: node.size, - leafSize: l.fileSize(), - Name: '' - }) - }) - }) - }), - pull.collect((err, leaves) => { - if (err) { - return callback(err) - } - - if (leaves.length === 1) { - return callback(null, { - path: file.path, - multihash: leaves[0].Hash, - size: leaves[0].Size - }) - } - - // create a parent node and add all the leafs - const f = new UnixFS('file') - - const links = leaves.map((leaf) => { - f.addBlockSize(leaf.leafSize) - - return new DAGLink(leaf.Name, leaf.Size, leaf.Hash) - }) - - waterfall([ - (cb) => DAGNode.create(f.marshal(), links, cb), - (node, cb) => { - ipldResolver.put({ - node: node, - cid: new CID(node.multihash) - }, (err) => cb(err, node)) - } - ], (err, node) => { - if (err) { - return callback(err) - } - - callback(null, { - path: file.path, - multihash: node.multihash, - size: node.size - }) - }) - }) - ) +module.exports = function (ipldResolver, _options) { + const options = Object.assign({}, defaultOptions, _options) + const Chunker = chunkers[options.chunker] + assert(Chunker, 'Unknkown chunker named ' + options.chunker) + return Builder(Chunker, ipldResolver, createAndStoreTree, options) } diff --git a/test/browser.js b/test/browser.js index 60503760..2e1b6b5f 100644 --- a/test/browser.js +++ b/test/browser.js @@ -51,7 +51,12 @@ describe('IPFS data importing tests on the Browser', function () { // create the repo constant to be used in the import a small buffer test const repo = new IPFSRepo('ipfs', {stores: Store}) + require('./test-flat-builder') + require('./test-balanced-builder') + require('./test-trickle-builder') + require('./test-fixed-size-chunker') require('./test-exporter')(repo) require('./test-importer')(repo) - require('./test-fixed-size-chunker') + require('./test-import-export')(repo) + require('./test-hash-parity-with-go-ipfs')(repo) }) diff --git a/test/helpers/finite-pseudorandom-byte-stream.js b/test/helpers/finite-pseudorandom-byte-stream.js new file mode 100644 index 00000000..42824403 --- /dev/null +++ b/test/helpers/finite-pseudorandom-byte-stream.js @@ -0,0 +1,33 @@ +'use strict' + +const pull = require('pull-stream') +const generate = require('pull-generate') + +const randomByteStream = require('./random-byte-stream') +const chunker = require('../../src/chunker/fixed-size') + +const REPEATABLE_CHUNK_SIZE = 300000 + +module.exports = function (maxSize, seed) { + const chunks = Math.ceil(maxSize / REPEATABLE_CHUNK_SIZE) + return pull( + generate(0, generator), + pull.take(chunks) + ) + + function generator (iteration, cb) { + if (iteration === 0) { + pull( + randomByteStream(seed), + chunker(REPEATABLE_CHUNK_SIZE), + pull.take(1), + pull.collect((err, results) => { + const result = results[0] + cb(err, result, result) + }) + ) + } else { + cb(null, iteration, iteration) + } + } +} diff --git a/test/helpers/random-byte-stream.js b/test/helpers/random-byte-stream.js new file mode 100644 index 00000000..8dd4bce7 --- /dev/null +++ b/test/helpers/random-byte-stream.js @@ -0,0 +1,19 @@ +'use strict' + +module.exports = function randomByteStream (_seed) { + let seed = _seed + return (end, cb) => { + if (end) { + cb(end) + } else { + const r = Math.floor(random(seed) * 256) + seed = r + cb(null, Buffer.from([r])) + } + } +} + +function random (seed) { + const x = Math.sin(seed) * 10000 + return x - Math.floor(x) +} diff --git a/test/node.js b/test/node.js index ae88ab82..e60daebc 100644 --- a/test/node.js +++ b/test/node.js @@ -9,7 +9,7 @@ const Store = require('fs-pull-blob-store') const mkdirp = require('mkdirp') const series = require('async/series') -describe('core', () => { +describe('IPFS UnixFS Engine', () => { const repoExample = path.join(process.cwd(), '/test/repo-example') const repoTests = path.join(process.cwd(), '/test/repo-tests' + Date.now()) @@ -36,7 +36,12 @@ describe('core', () => { }) const repo = new IPFSRepo(repoTests, {stores: Store}) + require('./test-flat-builder') + require('./test-balanced-builder') + require('./test-trickle-builder') + require('./test-fixed-size-chunker') require('./test-exporter')(repo) require('./test-importer')(repo) - require('./test-fixed-size-chunker') + require('./test-import-export')(repo) + require('./test-hash-parity-with-go-ipfs')(repo) }) diff --git a/test/test-balanced-builder.js b/test/test-balanced-builder.js new file mode 100644 index 00000000..a0488e58 --- /dev/null +++ b/test/test-balanced-builder.js @@ -0,0 +1,92 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('chai').expect +const pull = require('pull-stream') + +const builder = require('../src/builder/balanced') + +function reduce (leaves, callback) { + if (leaves.length > 1) { + callback(null, { children: leaves }) + } else { + callback(null, leaves[0]) + } +} + +const options = { + maxChildrenPerNode: 3 +} + +describe('balanced builder', () => { + it('reduces one value into itself', (callback) => { + pull( + pull.values([1]), + builder(reduce, options), + pull.collect((err, result) => { + expect(err).to.not.exist + expect(result).to.be.eql([1]) + callback() + }) + ) + }) + + it('reduces 3 values into parent', (callback) => { + pull( + pull.values([1, 2, 3]), + builder(reduce, options), + pull.collect((err, result) => { + expect(err).to.not.exist + expect(result).to.be.eql([{ + children: [1, 2, 3] + }]) + callback() + }) + ) + }) + + it('obeys max children per node', (callback) => { + pull( + pull.values([1, 2, 3, 4]), + builder(reduce, options), + pull.collect((err, result) => { + expect(err).to.not.exist + expect(result).to.be.eql([ + { + children: [ + { + children: [1, 2, 3] + }, + 4 + ] + } + ]) + callback() + }) + ) + }) + + it('refolds 2 parent nodes', (callback) => { + pull( + pull.values([1, 2, 3, 4, 5, 6, 7]), + builder(reduce, options), + pull.collect((err, result) => { + expect(err).to.not.exist + expect(result).to.be.eql([ + { + children: [ + { + children: [1, 2, 3] + }, + { + children: [4, 5, 6] + }, + 7 + ] + } + ]) + callback() + }) + ) + }) +}) diff --git a/test/test-exporter.js b/test/test-exporter.js index d1005281..731be425 100644 --- a/test/test-exporter.js +++ b/test/test-exporter.js @@ -25,30 +25,6 @@ module.exports = (repo) => { ipldResolver = new IPLDResolver(bs) }) - it('import and export', (done) => { - pull( - pull.values([{ - path: '1.2MiB.txt', - content: pull.values([ - bigFile, - Buffer('hello world') - ]) - }]), - unixFSEngine.importer(ipldResolver), - pull.map((file) => { - expect(file.path).to.be.eql('1.2MiB.txt') - - return exporter(file.multihash, ipldResolver) - }), - pull.flatten(), - pull.collect((err, files) => { - expect(err).to.not.exist - expect(files[0].size).to.be.eql(bigFile.length + 11) - fileEql(files[0], Buffer.concat([bigFile, Buffer('hello world')]), done) - }) - ) - }) - it('ensure hash inputs are sanitized', (done) => { const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8' const mhBuf = new Buffer(bs58.decode(hash)) diff --git a/test/test-flat-builder.js b/test/test-flat-builder.js new file mode 100644 index 00000000..358f4850 --- /dev/null +++ b/test/test-flat-builder.js @@ -0,0 +1,43 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('chai').expect +const pull = require('pull-stream') + +const builder = require('../src/builder/flat') + +function reduce (leaves, callback) { + if (leaves.length > 1) { + callback(null, { children: leaves }) + } else { + callback(null, leaves[0]) + } +} + +describe('flat builder', () => { + it('reduces one value into itself', (callback) => { + pull( + pull.values([1]), + builder(reduce), + pull.collect((err, result) => { + expect(err).to.not.exist + expect(result).to.be.eql([1]) + callback() + }) + ) + }) + + it('reduces 2 values into parent', (callback) => { + pull( + pull.values([1, 2]), + builder(reduce), + pull.collect((err, result) => { + expect(err).to.not.exist + expect(result).to.be.eql([{ + children: [1, 2] + }]) + callback() + }) + ) + }) +}) diff --git a/test/test-hash-parity-with-go-ipfs.js b/test/test-hash-parity-with-go-ipfs.js new file mode 100644 index 00000000..989b6225 --- /dev/null +++ b/test/test-hash-parity-with-go-ipfs.js @@ -0,0 +1,60 @@ +/* eslint-env mocha */ +'use strict' + +const importer = require('./../src').importer + +const expect = require('chai').expect +const BlockService = require('ipfs-block-service') +const pull = require('pull-stream') +const mh = require('multihashes') +const IPLDResolver = require('ipld-resolver') +const randomByteStream = require('./helpers/finite-pseudorandom-byte-stream') + +const strategies = [ + 'flat', + 'trickle', + 'balanced' +] + +const expectedHashes = { + flat: 'QmRgXEDv6DL8uchf7h9j8hAGG8Fq5r1UZ6Jy3TQAPxEb76', + balanced: 'QmVY1TFpjYKSo8LRG9oYgH4iy9AduwDvBGNhqap1Gkxme3', + trickle: 'QmYPsm9oVGjWECkT7KikZmrf8imggqKe8uS8Jco3qfWUCH' +} + +module.exports = (repo) => { + strategies.forEach(strategy => { + const options = { + strategy: strategy + } + + describe(strategy + ' importer', () => { + let ipldResolver + + before(() => { + const bs = new BlockService(repo) + ipldResolver = new IPLDResolver(bs) + }) + + it('yields the same tree as go-ipfs', (done) => { + pull( + pull.values([ + { + path: 'big.dat', + content: randomByteStream(45900000, 7382) + } + ]), + importer(ipldResolver, options), + pull.collect((err, files) => { + expect(err).to.not.exist + expect(files.length).to.be.equal(1) + + const file = files[0] + expect(mh.toB58String(file.multihash)).to.be.equal(expectedHashes[strategy]) + done() + }) + ) + }) + }) + }) +} diff --git a/test/test-import-export.js b/test/test-import-export.js new file mode 100644 index 00000000..103699bd --- /dev/null +++ b/test/test-import-export.js @@ -0,0 +1,101 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('chai').expect +const BlockService = require('ipfs-block-service') +const IPLDResolver = require('ipld-resolver') +const pull = require('pull-stream') + +const randomByteStream = require('./helpers/finite-pseudorandom-byte-stream') +const unixFSEngine = require('./../') +const exporter = unixFSEngine.exporter + +const strategies = [ + 'flat', + 'balanced', + 'trickle' +] + +module.exports = (repo) => { + let bigFile + strategies.forEach((strategy) => { + const importerOptions = { + strategy: strategy + } + + describe('import export using ' + strategy + ' builder strategy', () => { + let ipldResolver + + before(() => { + const bs = new BlockService(repo) + ipldResolver = new IPLDResolver(bs) + }) + + before((done) => { + if (bigFile) { + return done() + } + + pull( + randomByteStream(50000000, 8372), + pull.collect((err, buffers) => { + if (err) { + done(err) + } else { + bigFile = buffers + done() + } + }) + ) + }) + + it('import and export', (done) => { + const path = strategy + '-big.dat' + pull( + pull.values([{ + path: path, + content: pull.values(bigFile) + }]), + unixFSEngine.importer(ipldResolver, importerOptions), + pull.map((file) => { + expect(file.path).to.be.eql(path) + + return exporter(file.multihash, ipldResolver) + }), + pull.flatten(), + pull.collect((err, files) => { + expect(err).to.not.exist + expect(files[0].size).to.be.eql(bigFile.reduce(reduceLength, 0)) + fileEql(files[0], Buffer.concat(bigFile), done) + }) + ) + }) + }) + }) +} + +function fileEql (f1, f2, done) { + pull( + f1.content, + pull.collect((err, data) => { + if (err) { + return done(err) + } + + try { + if (f2) { + expect(Buffer.concat(data)).to.be.eql(f2) + } else { + expect(data).to.exist + } + } catch (err) { + return done(err) + } + done() + }) + ) +} + +function reduceLength (acc, chunk) { + return acc + chunk.length +} diff --git a/test/test-importer.js b/test/test-importer.js index 34f5fe1d..351208cf 100644 --- a/test/test-importer.js +++ b/test/test-importer.js @@ -2,6 +2,8 @@ 'use strict' const importer = require('./../src').importer + +const extend = require('deep-extend') const expect = require('chai').expect const BlockService = require('ipfs-block-service') const pull = require('pull-stream') @@ -19,249 +21,372 @@ function stringifyMh (files) { const bigFile = loadFixture(__dirname, 'fixtures/1.2MiB.txt') const smallFile = loadFixture(__dirname, 'fixtures/200Bytes.txt') +const baseFiles = { + '200Bytes.txt': { + path: '200Bytes.txt', + multihash: 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8', + size: 211, + name: '', + leafSize: 200 + }, + '1.2MiB.txt': { + path: '1.2MiB.txt', + multihash: 'QmbPN6CXXWpejfQgnRYnMQcVYkFHEntHWqLNQjbkatYCh1', + size: 1328062, + name: '', + leafSize: 1258000 + } +} + +const strategyBaseFiles = { + flat: baseFiles, + balanced: extend({}, baseFiles, { + '1.2MiB.txt': { + multihash: 'QmeEGqUisUD2T6zU96PrZnCkHfXCGuQeGWKu4UoSuaZL3d', + size: 1335420 + } + }), + trickle: extend({}, baseFiles, { + '1.2MiB.txt': { + multihash: 'QmaiSohNUt1rBf2Lqz6ou54NHVPTbXbBoPuq9td4ekcBx4', + size: 1334599 + } + }) +} + +const strategies = [ + 'flat', + 'balanced', + 'trickle' +] + +const strategyOverrides = { + balanced: { + 'foo-big': { + path: 'foo-big', + multihash: 'QmQ1S6eEamaf4t948etp8QiYQ9avrKCogiJnPRgNkVreLv', + size: 1335478 + }, + pim: { + multihash: 'QmUpzaN4Jio2GB3HoPSRCMQD5EagdMWjSEGD4SGZXaCw7W', + size: 1335744 + }, + 'pam/pum': { + multihash: 'QmUpzaN4Jio2GB3HoPSRCMQD5EagdMWjSEGD4SGZXaCw7W', + size: 1335744 + }, + pam: { + multihash: 'QmVoVD4fEWFLJLjvRCg4bGrziFhgECiaezp79AUfhuLgno', + size: 2671269 + } + }, + trickle: { + 'foo-big': { + path: 'foo-big', + multihash: 'QmPh6KSS7ghTqzgWhaoCiLoHFPF7HGqUxx7q9vcM5HUN4U', + size: 1334657 + }, + pim: { + multihash: 'QmPAn3G2x2nrq4A1fu2XUpwWtpqG4D1YXFDrU615NHvJbr', + size: 1334923 + }, + 'pam/pum': { + multihash: 'QmPAn3G2x2nrq4A1fu2XUpwWtpqG4D1YXFDrU615NHvJbr', + size: 1334923 + }, + pam: { + multihash: 'QmZTJah1xpG9X33ZsPtDEi1tYSHGDqQMRHsGV5xKzAR2j4', + size: 2669627 + } + } + +} + module.exports = (repo) => { - describe('importer', () => { - let ipldResolver + strategies.forEach(strategy => { + const baseFiles = strategyBaseFiles[strategy] + const defaultResults = extend({}, baseFiles, { + 'foo/bar/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], { + path: 'foo/bar/200Bytes.txt' + }), + foo: { + path: 'foo', + multihash: 'QmQrb6KKWGo8w7zKfx2JksptY6wN7B2ysSBdKZr4xMU36d', + size: 320 + }, + 'foo/bar': { + path: 'foo/bar', + multihash: 'Qmf5BQbTUyUAvd6Ewct83GYGnE1F6btiC3acLhR8MDxgkD', + size: 270 + }, + 'foo-big/1.2MiB.txt': extend({}, baseFiles['1.2MiB.txt'], { + path: 'foo-big/1.2MiB.txt' + }), + 'foo-big': { + path: 'foo-big', + multihash: 'Qma6JU3FoXU9eAzgomtmYPjzFBwVc2rRbECQpmHFiA98CJ', + size: 1328120 + }, + 'pim/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], { + path: 'pim/200Bytes.txt' + }), + 'pim/1.2MiB.txt': extend({}, baseFiles['1.2MiB.txt'], { + path: 'pim/1.2MiB.txt' + }), + pim: { + path: 'pim', + multihash: 'QmNk8VPGb3fkAQgoxctXo4Wmnr4PayFTASy4MiVXTtXqiA', + size: 1328386 + }, + 'empty-dir': { + path: 'empty-dir', + multihash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + size: 4 + }, + 'pam/pum': { + multihash: 'QmNk8VPGb3fkAQgoxctXo4Wmnr4PayFTASy4MiVXTtXqiA', + size: 1328386 + }, + pam: { + multihash: 'QmPAixYTaYnPe795fcWcuRpo6tfwHgRKNiBHpMzoomDVN6', + size: 2656553 + } + }, strategyOverrides[strategy]) - before(() => { - const bs = new BlockService(repo) - ipldResolver = new IPLDResolver(bs) - }) + const expected = extend({}, defaultResults, strategies[strategy]) + describe(strategy + ' importer', () => { + let ipldResolver - it('bad input', (done) => { - pull( - pull.values([{ - path: '200Bytes.txt', - content: 'banana' - }]), - importer(ipldResolver), - pull.onEnd((err) => { - expect(err).to.exist - done() - }) - ) - }) + const options = { + strategy: strategy, + maxChildrenPerNode: 10, + chunkerOptions: { + maxChunkSize: 1024 + } + } - it('small file (smaller than a chunk)', (done) => { - pull( - pull.values([{ - path: '200Bytes.txt', - content: pull.values([smallFile]) - }]), - importer(ipldResolver), - pull.collect((err, files) => { - expect(err).to.not.exist - expect(stringifyMh(files)).to.be.eql([{ + before(() => { + const bs = new BlockService(repo) + ipldResolver = new IPLDResolver(bs) + }) + + it('fails on bad input', (done) => { + pull( + pull.values([{ path: '200Bytes.txt', - multihash: 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8', - size: 211 - }]) - done() - }) - ) - }) + content: 'banana' + }]), + importer(ipldResolver, options), + pull.onEnd((err) => { + expect(err).to.exist + done() + }) + ) + }) - it('small file as buffer (smaller than a chunk)', (done) => { - pull( - pull.values([{ - path: '200Bytes.txt', - content: smallFile - }]), - importer(ipldResolver), - pull.collect((err, files) => { - expect(err).to.not.exist - expect(stringifyMh(files)).to.be.eql([{ + it('fails on more than one root', (done) => { + pull( + pull.values([ + { + path: '/beep/200Bytes.txt', + content: pull.values([smallFile]) + }, + { + path: '/boop/200Bytes.txt', + content: pull.values([smallFile]) + } + ]), + importer(ipldResolver, options), + pull.onEnd((err) => { + expect(err).to.exist + expect(err.message).to.be.eql('detected more than one root') + done() + }) + ) + }) + + it('small file (smaller than a chunk)', (done) => { + pull( + pull.values([{ path: '200Bytes.txt', - multihash: 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8', - size: 211 - }]) - done() - }) - ) - }) + content: pull.values([smallFile]) + }]), + importer(ipldResolver, options), + pull.collect((err, files) => { + expect(err).to.not.exist + expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']]) + done() + }) + ) + }) + + it('small file as buffer (smaller than a chunk)', (done) => { + pull( + pull.values([{ + path: '200Bytes.txt', + content: smallFile + }]), + importer(ipldResolver, options), + pull.collect((err, files) => { + expect(err).to.not.exist + expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']]) + done() + }) + ) + }) - it('small file (smaller than a chunk) inside a dir', (done) => { - pull( - pull.values([{ - path: 'foo/bar/200Bytes.txt', - content: pull.values([smallFile]) - }]), - importer(ipldResolver), - pull.collect((err, files) => { + it('small file (smaller than a chunk) inside a dir', (done) => { + pull( + pull.values([{ + path: 'foo/bar/200Bytes.txt', + content: pull.values([smallFile]) + }]), + importer(ipldResolver, options), + pull.collect(collected) + ) + + function collected (err, files) { expect(err).to.not.exist expect(files.length).to.equal(3) stringifyMh(files).forEach((file) => { if (file.path === 'foo/bar/200Bytes.txt') { - expect(file).to.be.eql({ - path: 'foo/bar/200Bytes.txt', - multihash: 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8', - size: 211 - }) + expect(file).to.be.eql(expected['foo/bar/200Bytes.txt']) } if (file.path === 'foo') { - expect(file).to.be.eql({ - path: 'foo', - multihash: 'QmQrb6KKWGo8w7zKfx2JksptY6wN7B2ysSBdKZr4xMU36d', - size: 320 - }) + expect(file).to.be.eql(expected.foo) } if (file.path === 'foo/bar') { - expect(file).to.be.eql({ - path: 'foo/bar', - multihash: 'Qmf5BQbTUyUAvd6Ewct83GYGnE1F6btiC3acLhR8MDxgkD', - size: 270 - }) + expect(file).to.be.eql(expected['foo/bar']) } }) done() - }) - ) - }) + } + }) - it('file bigger than a single chunk', (done) => { - pull( - pull.values([{ - path: '1.2MiB.txt', - content: pull.values([bigFile]) - }]), - importer(ipldResolver), - pull.collect((err, files) => { - expect(err).to.not.exist - expect(stringifyMh(files)).to.be.eql([{ + it('file bigger than a single chunk', (done) => { + pull( + pull.values([{ path: '1.2MiB.txt', - multihash: 'QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q', - size: 1258318 - }]) - done() - }) - ) - }) - - it('file bigger than a single chunk inside a dir', (done) => { - pull( - pull.values([{ - path: 'foo-big/1.2MiB.txt', - content: pull.values([bigFile]) - }]), - importer(ipldResolver), - pull.collect((err, files) => { - expect(err).to.not.exist + content: pull.values([bigFile]) + }]), + importer(ipldResolver, options), + pull.collect((err, files) => { + expect(err).to.not.exist + expect(stringifyMh(files)).to.be.eql([expected['1.2MiB.txt']]) + done() + }) + ) + }) - expect(stringifyMh(files)).to.be.eql([{ + it('file bigger than a single chunk inside a dir', (done) => { + pull( + pull.values([{ path: 'foo-big/1.2MiB.txt', - multihash: 'QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q', - size: 1258318 - }, { - path: 'foo-big', - multihash: 'QmaFgyFJUP4fxFySJCddg2Pj6rpwSywopWk87VEVv52RSj', - size: 1258376 - }]) - - done() - }) - ) - }) + content: pull.values([bigFile]) + }]), + importer(ipldResolver, options), + pull.collect((err, files) => { + expect(err).to.not.exist - it.skip('file (that chunk number exceeds max links)', (done) => { - // TODO - }) + expect(stringifyMh(files)).to.be.eql([ + expected['foo-big/1.2MiB.txt'], + expected['foo-big'] + ]) - it('empty directory', (done) => { - pull( - pull.values([{ - path: 'empty-dir' - }]), - importer(ipldResolver), - pull.collect((err, files) => { - expect(err).to.not.exist + done() + }) + ) + }) - expect(stringifyMh(files)).to.be.eql([{ - path: 'empty-dir', - multihash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - size: 4 - }]) + it('empty directory', (done) => { + pull( + pull.values([{ + path: 'empty-dir' + }]), + importer(ipldResolver, options), + pull.collect((err, files) => { + expect(err).to.not.exist - done() - }) - ) - }) + expect(stringifyMh(files)).to.be.eql([expected['empty-dir']]) - it('directory with files', (done) => { - pull( - pull.values([{ - path: 'pim/200Bytes.txt', - content: pull.values([smallFile]) - }, { - path: 'pim/1.2MiB.txt', - content: pull.values([bigFile]) - }]), - importer(ipldResolver), - pull.collect((err, files) => { - expect(err).to.not.exist + done() + }) + ) + }) - expect(stringifyMh(files)).be.eql([{ + it('directory with files', (done) => { + pull( + pull.values([{ path: 'pim/200Bytes.txt', - multihash: 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8', - size: 211 + content: pull.values([smallFile]) }, { path: 'pim/1.2MiB.txt', - multihash: 'QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q', - size: 1258318 - }, { - path: 'pim', - multihash: 'QmY8a78tx6Tk6naDgWCgTsd9EqGrUJRrH7dDyQhjyrmH2i', - size: 1258642 - }]) + content: pull.values([bigFile]) + }]), + importer(ipldResolver, options), + pull.collect((err, files) => { + expect(err).to.not.exist - done() - }) - ) - }) + expect(stringifyMh(files)).be.eql([ + expected['pim/200Bytes.txt'], + expected['pim/1.2MiB.txt'], + expected.pim] + ) - it('nested directory (2 levels deep)', (done) => { - pull( - pull.values([{ - path: 'pam/pum/200Bytes.txt', - content: pull.values([smallFile]) - }, { - path: 'pam/pum/1.2MiB.txt', - content: pull.values([bigFile]) - }, { - path: 'pam/1.2MiB.txt', - content: pull.values([bigFile]) - }]), - importer(ipldResolver), - pull.collect((err, files) => { - expect(err).to.not.exist + done() + }) + ) + }) - // need to sort as due to parallel storage the order - // can vary - stringifyMh(files).forEach((file) => { - if (file.path === 'pam/pum/200Bytes.txt') { - expect(file.multihash).to.be.eql('QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8') - expect(file.size).to.be.eql(211) - } - if (file.path === 'pam/pum/1.2MiB.txt') { - expect(file.multihash).to.be.eql('QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q') - expect(file.size).to.be.eql(1258318) - } - if (file.path === 'pam/pum') { - expect(file.multihash).to.be.eql('QmY8a78tx6Tk6naDgWCgTsd9EqGrUJRrH7dDyQhjyrmH2i') - expect(file.size).to.be.eql(1258642) - } - if (file.path === 'pam/1.2MiB.txt') { - expect(file.multihash).to.be.eql('QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q') - expect(file.size).to.be.eql(1258318) - } - if (file.path === 'pam') { - expect(file.multihash).to.be.eql('QmRgdtzNx1H1BPJqShdhvWZ2D4DA2HUgZJ3XLtoXei27Av') - expect(file.size).to.be.eql(2517065) - } + it('nested directory (2 levels deep)', (done) => { + pull( + pull.values([{ + path: 'pam/pum/200Bytes.txt', + content: pull.values([smallFile]) + }, { + path: 'pam/pum/1.2MiB.txt', + content: pull.values([bigFile]) + }, { + path: 'pam/1.2MiB.txt', + content: pull.values([bigFile]) + }]), + importer(ipldResolver, options), + pull.collect((err, files) => { + expect(err).to.not.exist + + // need to sort as due to parallel storage the order + // can vary + stringifyMh(files).forEach(eachFile) + + done() }) + ) - done() - }) - ) + function eachFile (file) { + if (file.path === 'pam/pum/200Bytes.txt') { + expect(file.multihash).to.be.eql(expected['200Bytes.txt'].multihash) + expect(file.size).to.be.eql(expected['200Bytes.txt'].size) + } + if (file.path === 'pam/pum/1.2MiB.txt') { + expect(file.multihash).to.be.eql(expected['1.2MiB.txt'].multihash) + expect(file.size).to.be.eql(expected['1.2MiB.txt'].size) + } + if (file.path === 'pam/pum') { + const dir = expected['pam/pum'] + expect(file.multihash).to.be.eql(dir.multihash) + expect(file.size).to.be.eql(dir.size) + } + if (file.path === 'pam/1.2MiB.txt') { + expect(file.multihash).to.be.eql(expected['1.2MiB.txt'].multihash) + expect(file.size).to.be.eql(expected['1.2MiB.txt'].size) + } + if (file.path === 'pam') { + const dir = expected.pam + expect(file.multihash).to.be.eql(dir.multihash) + expect(file.size).to.be.eql(dir.size) + } + } + }) }) }) } diff --git a/test/test-trickle-builder.js b/test/test-trickle-builder.js new file mode 100644 index 00000000..9ec52827 --- /dev/null +++ b/test/test-trickle-builder.js @@ -0,0 +1,465 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('chai').expect +const pull = require('pull-stream') + +const builder = require('../src/builder/trickle') + +function reduce (leaves, callback) { + if (leaves.length > 1) { + setTimeout(() => callback(null, { children: leaves }), 10) + } else { + setTimeout(() => callback(null, leaves[0]), 10) + } +} + +const options = { + maxChildrenPerNode: 3, + layerRepeat: 2 +} + +describe('trickle builder', () => { + it('reduces one value into itself', callback => { + pull( + pull.values([1]), + builder(reduce, options), + pull.collect((err, result) => { + expect(err).to.not.exist + expect(result).to.be.eql([1]) + callback() + }) + ) + }) + + it('reduces 3 values into parent', callback => { + pull( + pull.count(2), + builder(reduce, options), + pull.collect((err, result) => { + expect(err).to.not.exist + expect(result).to.be.eql([ + { + children: [ + 0, + 1, + 2 + ] + } + ]) + callback() + }) + ) + }) + + it('reduces 6 values correclty', callback => { + pull( + pull.count(5), + builder(reduce, options), + pull.collect((err, result) => { + expect(err).to.not.exist + expect(result).to.be.eql([ + { + children: [ + 0, + 1, + 2, + { + children: [ + 3, + 4, + 5 + ] + } + ] + } + ]) + callback() + }) + ) + }) + + it('reduces 9 values correclty', callback => { + pull( + pull.count(8), + builder(reduce, options), + pull.collect((err, result) => { + expect(err).to.not.exist + expect(result).to.be.eql([ + { + children: [ + 0, + 1, + 2, + { + children: [ + 3, + 4, + 5 + ] + }, + { + children: [ + 6, + 7, + 8 + ] + } + ] + } + ]) + callback() + }) + ) + }) + + it('reduces 12 values correclty', callback => { + pull( + pull.count(11), + builder(reduce, options), + pull.collect((err, result) => { + expect(err).to.not.exist + setTimeout(() => { + expect(result).to.be.eql([ + { + children: [ + 0, + 1, + 2, + { + children: [ + 3, + 4, + 5 + ] + }, + { + children: [ + 6, + 7, + 8 + ] + }, + { + children: [ + 9, + 10, + 11 + ] + } + ] + } + ]) + callback() + }, 100) + }) + ) + }) + + it('reduces 21 values correclty', callback => { + pull( + pull.count(20), + builder(reduce, options), + pull.collect((err, result) => { + expect(err).to.not.exist + expect(result).to.be.eql([ + { + children: [ + 0, + 1, + 2, + { + children: [ + 3, + 4, + 5 + ] + }, + { + children: [ + 6, + 7, + 8 + ] + }, + { + children: [ + 9, + 10, + 11, + { + children: [ + 12, + 13, + 14 + ] + }, + { + children: [ + 15, + 16, + 17 + ] + } + ] + }, + { + children: [ + 18, + 19, + 20 + ] + } + ] + } + ]) + callback() + }) + ) + }) + + it('forms correct trickle tree', callback => { + pull( + pull.count(99), + builder(reduce, options), + pull.collect((err, result) => { + expect(err).to.not.exist + expect(result).to.be.eql([ + { + children: [ + 0, + 1, + 2, + { + children: [ + 3, + 4, + 5 + ] + }, + { + children: [ + 6, + 7, + 8 + ] + }, + { + children: [ + 9, + 10, + 11, + { + children: [ + 12, + 13, + 14 + ] + }, + { + children: [ + 15, + 16, + 17 + ] + } + ] + }, + { + children: [ + 18, + 19, + 20, + { + children: [ + 21, + 22, + 23 + ] + }, + { + children: [ + 24, + 25, + 26 + ] + } + ] + }, + { + children: [ + 27, + 28, + 29, + { + children: [ + 30, + 31, + 32 + ] + }, + { + children: [ + 33, + 34, + 35 + ] + }, + { + children: [ + 36, + 37, + 38, + { + children: [ + 39, + 40, + 41 + ] + }, + { + children: [ + 42, + 43, + 44 + ] + } + ] + }, + { + children: [ + 45, + 46, + 47, + { + children: [ + 48, + 49, + 50 + ] + }, + { + children: [ + 51, + 52, + 53 + ] + } + ] + } + ] + }, + { + children: [ + 54, + 55, + 56, + { + children: [ + 57, + 58, + 59 + ] + }, + { + children: [ + 60, + 61, + 62 + ] + }, + { + children: [ + 63, + 64, + 65, + { + children: [ + 66, + 67, + 68 + ] + }, + { + children: [ + 69, + 70, + 71 + ] + } + ] + }, + { + children: [ + 72, + 73, + 74, + { + children: [ + 75, + 76, + 77 + ] + }, + { + children: [ + 78, + 79, + 80 + ] + } + ] + } + ] + }, + { + children: [ + 81, + 82, + 83, + { + children: [ + 84, + 85, + 86 + ] + }, + { + children: [ + 87, + 88, + 89 + ] + }, + { + children: [ + 90, + 91, + 92, + { + children: [ + 93, + 94, + 95 + ] + }, + { + children: [ + 96, + 97, + 98 + ] + } + ] + }, + 99 + ] + } + ] + } + ]) + callback() + }) + ) + }) +})