From eb54feae58721e9009968a0c059e584500ce97d0 Mon Sep 17 00:00:00 2001 From: Friedel Ziegelmayer Date: Wed, 28 Sep 2016 16:39:23 +0200 Subject: [PATCH 1/6] more fixes for merkledag changes --- package.json | 7 +- src/exporter/index.js | 7 +- src/importer.js | 168 ++++++++++++++++++++++++++++++++++++++++++ src/importer/index.js | 2 +- src/util.js | 8 ++ test/node.js | 2 +- 6 files changed, 187 insertions(+), 7 deletions(-) create mode 100644 src/importer.js diff --git a/package.json b/package.json index 716cf27a..8f959555 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "ipfs-unixfs-engine", "version": "0.12.0", "description": "JavaScript implementation of the unixfs Engine used by IPFS", - "main": "lib/index.js", + "main": "src/index.js", "jsnext:main": "src/index.js", "scripts": { "lint": "aegir-lint", @@ -45,10 +45,10 @@ "pre-commit": "^1.1.3", "pull-zip": "^2.0.1", "raw-loader": "^0.5.1", - "rimraf": "^2.5.4", - "run-series": "^1.1.4" + "rimraf": "^2.5.4" }, "dependencies": { + "async": "^2.0.1", "cids": "^0.2.0", "ipfs-unixfs": "^0.1.4", "ipld-dag-pb": "^0.1.3", @@ -61,7 +61,6 @@ "pull-stream": "^3.4.5", "pull-traverse": "^1.0.3", "pull-write": "^1.1.1", - "run-parallel": "^1.1.6" }, "contributors": [ "David Dias ", diff --git a/src/exporter/index.js b/src/exporter/index.js index e87970fb..aa14bca7 100644 --- a/src/exporter/index.js +++ b/src/exporter/index.js @@ -12,7 +12,12 @@ const dirExporter = require('./dir') const fileExporter = require('./file') module.exports = (hash, ipldResolver, options) => { - hash = cleanMultihash(hash) + try { + hash = cleanMultihash(hash) + } catch (err) { + return pull.error(err) + } + options = options || {} function visitor (item) { diff --git a/src/importer.js b/src/importer.js new file mode 100644 index 00000000..900f2386 --- /dev/null +++ b/src/importer.js @@ -0,0 +1,168 @@ +'use strict' + +const merkleDAG = require('ipfs-merkle-dag') +const UnixFS = require('ipfs-unixfs') +const assert = require('assert') +const pull = require('pull-stream') +const pushable = require('pull-pushable') +const write = require('pull-write') +const parallel = require('async/parallel') +const waterfall = require('async/waterfall') + +const fsc = require('./chunker-fixed-size') +const createAndStoreTree = require('./tree') +const getSizeAndHash = require('./util').getSizeAndHash + +const DAGNode = merkleDAG.DAGNode + +const CHUNK_SIZE = 262144 + +module.exports = (dagService, options) => { + assert(dagService, 'Missing dagService') + + const files = [] + + const source = pushable() + const sink = write( + makeWriter(source, files, dagService), + null, + 100, + (err) => { + if (err) return source.end(err) + + createAndStoreTree(files, dagService, source, () => { + source.end() + }) + } + ) + + return {source, sink} +} + +function makeWriter (source, files, dagService) { + return (items, cb) => { + parallel(items.map((item) => (cb) => { + if (!item.content) { + return createAndStoreDir(item, dagService, (err, node) => { + if (err) return cb(err) + source.push(node) + files.push(node) + cb() + }) + } + + createAndStoreFile(item, dagService, (err, node) => { + if (err) return cb(err) + source.push(node) + files.push(node) + cb() + }) + }), cb) + } +} + +function createAndStoreDir (item, ds, cb) { + // 1. create the empty dir dag node + // 2. write it to the dag store + + const d = new UnixFS('directory') + const n = new DAGNode() + n.data = d.marshal() + + waterfall([ + (cb) => ds.put(n, cb), + (cb) => getSizeAndHash(n, cb), + (res, cb) => { + cb(null, { + path: item.path, + multihash: res.multihash, + size: res.size + // dataSize: d.fileSize() + }) + } + ], cb) +} + +function createAndStoreFile (file, ds, cb) { + if (Buffer.isBuffer(file.content)) { + file.content = pull.values([file.content]) + } + + if (typeof file.content !== 'function') { + return cb(new Error('invalid content')) + } + + // 1. create the unixfs merkledag node + // 2. add its hash and size to the leafs array + + // TODO - Support really large files + // a) check if we already reach max chunks if yes + // a.1) create a parent node for all of the current leaves + // b.2) clean up the leaves array and add just the parent node + + pull( + file.content, + fsc(CHUNK_SIZE), + pull.asyncMap((chunk, cb) => { + const l = new UnixFS('file', Buffer(chunk)) + const n = new DAGNode(l.marshal()) + + waterfall([ + (cb) => ds.put(n, cb), + (cb) => getSizeAndHash(n, cb) + ], (err, stats) => { + if (err) { + return cb(err) + } + + cb(null, { + Hash: stats.multihash, + Size: stats.size, + leafSize: l.fileSize(), + Name: '' + }) + }) + }), + pull.collect((err, leaves) => { + if (err) return cb(err) + + if (leaves.length === 1) { + return cb(null, { + path: file.path, + multihash: leaves[0].Hash, + size: leaves[0].Size + // dataSize: leaves[0].leafSize + }) + } + + // create a parent node and add all the leafs + + const f = new UnixFS('file') + const n = new merkleDAG.DAGNode() + + for (let leaf of leaves) { + f.addBlockSize(leaf.leafSize) + n.addRawLink( + new merkleDAG.DAGLink(leaf.Name, leaf.Size, leaf.Hash) + ) + } + + n.data = f.marshal() + parallel([ + (cb) => ds.put(n, cb), + (cb) => getSizeAndHash(n, cb) + ], (err, res) => { + if (err) { + return cb(err) + } + + cb(null, { + path: file.path, + multihash: res[1].multihash, + size: res[1].size + // dataSize: f.fileSize() + }) + }) + }) + ) +} diff --git a/src/importer/index.js b/src/importer/index.js index bcf770d7..a26ea662 100644 --- a/src/importer/index.js +++ b/src/importer/index.js @@ -5,7 +5,7 @@ const assert = require('assert') const pull = require('pull-stream') const pullPushable = require('pull-pushable') const pullWrite = require('pull-write') -const parallel = require('run-parallel') +const parallel = require('async/parallel') const dagPB = require('ipld-dag-pb') const CID = require('cids') diff --git a/src/util.js b/src/util.js index 8f44ae01..79ebe181 100644 --- a/src/util.js +++ b/src/util.js @@ -4,6 +4,7 @@ const UnixFS = require('ipfs-unixfs') const pull = require('pull-stream') const mh = require('multihashes') const isIPFS = require('is-ipfs') +const parallel = require('async/parallel') exports.switchType = (node, dirHandler, fileHandler) => { const data = UnixFS.unmarshal(node.data) @@ -26,3 +27,10 @@ exports.cleanMultihash = (multihash) => { return multihash } + +exports.getSizeAndHash = (n, cb) => { + parallel({ + multihash: (cb) => n.multihash(cb), + size: (cb) => n.size(cb) + }, cb) +} diff --git a/test/node.js b/test/node.js index 33c81a2a..ae88ab82 100644 --- a/test/node.js +++ b/test/node.js @@ -7,7 +7,7 @@ const path = require('path') const IPFSRepo = require('ipfs-repo') const Store = require('fs-pull-blob-store') const mkdirp = require('mkdirp') -const series = require('run-series') +const series = require('async/series') describe('core', () => { const repoExample = path.join(process.cwd(), '/test/repo-example') From 244611d895403717067d880e02bd8724d1d5c941 Mon Sep 17 00:00:00 2001 From: Friedel Ziegelmayer Date: Thu, 29 Sep 2016 10:55:01 +0200 Subject: [PATCH 2/6] fix some more tests --- src/exporter/index.js | 11 +++++++---- src/importer.js | 9 +++++++-- src/util.js | 5 ----- 3 files changed, 14 insertions(+), 11 deletions(-) diff --git a/src/exporter/index.js b/src/exporter/index.js index aa14bca7..d212cc67 100644 --- a/src/exporter/index.js +++ b/src/exporter/index.js @@ -2,7 +2,11 @@ const traverse = require('pull-traverse') const pull = require('pull-stream') +<<<<<<< HEAD:src/exporter/index.js const CID = require('cids') +======= +const isIPFS = require('is-ipfs') +>>>>>>> fix some more tests:src/exporter.js const util = require('./../util') const switchType = util.switchType @@ -12,12 +16,11 @@ const dirExporter = require('./dir') const fileExporter = require('./file') module.exports = (hash, ipldResolver, options) => { - try { - hash = cleanMultihash(hash) - } catch (err) { - return pull.error(err) + if (!isIPFS.multihash(hash)) { + return pull.error(new Error('not valid multihash')) } + hash = cleanMultihash(hash) options = options || {} function visitor (item) { diff --git a/src/importer.js b/src/importer.js index 900f2386..6dc92c60 100644 --- a/src/importer.js +++ b/src/importer.js @@ -28,9 +28,14 @@ module.exports = (dagService, options) => { null, 100, (err) => { - if (err) return source.end(err) + if (err) { + return source.end(err) + } - createAndStoreTree(files, dagService, source, () => { + createAndStoreTree(files, dagService, source, (err) => { + if (err) { + return source.end(err) + } source.end() }) } diff --git a/src/util.js b/src/util.js index 79ebe181..78d114e2 100644 --- a/src/util.js +++ b/src/util.js @@ -3,7 +3,6 @@ const UnixFS = require('ipfs-unixfs') const pull = require('pull-stream') const mh = require('multihashes') -const isIPFS = require('is-ipfs') const parallel = require('async/parallel') exports.switchType = (node, dirHandler, fileHandler) => { @@ -17,10 +16,6 @@ exports.switchType = (node, dirHandler, fileHandler) => { } exports.cleanMultihash = (multihash) => { - if (!isIPFS.multihash(multihash)) { - throw new Error('not valid multihash') - } - if (Buffer.isBuffer(multihash)) { return mh.toB58String(multihash) } From d8b68ef324da8d4f913e72f2666bfa33f469e1dd Mon Sep 17 00:00:00 2001 From: Friedel Ziegelmayer Date: Thu, 27 Oct 2016 13:22:02 +0200 Subject: [PATCH 3/6] next aegir --- .gitignore | 1 - .travis.yml | 2 +- package.json | 16 +++++++++------- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/.gitignore b/.gitignore index 103a91cf..22bf811a 100644 --- a/.gitignore +++ b/.gitignore @@ -30,4 +30,3 @@ build/Release node_modules dist -lib diff --git a/.travis.yml b/.travis.yml index dd9e44ef..fc1482fc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,7 +2,7 @@ sudo: false language: node_js node_js: - 4 - - 5 + - 6 - stable # Make sure we have new NPM. diff --git a/package.json b/package.json index 8f959555..32bf462e 100644 --- a/package.json +++ b/package.json @@ -3,16 +3,15 @@ "version": "0.12.0", "description": "JavaScript implementation of the unixfs Engine used by IPFS", "main": "src/index.js", - "jsnext:main": "src/index.js", "scripts": { "lint": "aegir-lint", "build": "aegir-build", - "test": "PHANTOM=off aegir-test", + "test": "aegir-test", "test:node": "aegir-test --env node", - "test:browser": "PHANTOM=off aegir-test --env browser", - "release": "PHANTOM=off aegir-release", - "release-minor": "PHANTOM=off aegir-release --type minor", - "release-major": "PHANTOM=off aegir-release --type major", + "test:browser": "aegir-test --env browser", + "release": "aegir-release", + "release-minor": "aegir-release --type minor", + "release-major": "aegir-release --type major", "coverage": "aegir-coverage", "coverage-publish": "aegir-coverage publish" }, @@ -32,7 +31,10 @@ "bugs": { "url": "https://github.com/ipfs/js-ipfs-unixfs-engine/issues" }, - "homepage": "https://github.com/ipfs/js-ipfs-unixfs-engineg#readme", + "engines": { + "node": ">=4.0.0" + }, + "homepage": "https://github.com/ipfs/js-ipfs-unixfs-engine#readme", "devDependencies": { "aegir": "^8.1.2", "buffer-loader": "0.0.1", From c24e027a1a652a4af0632e92ff278351a6b7d81d Mon Sep 17 00:00:00 2001 From: Friedel Ziegelmayer Date: Wed, 2 Nov 2016 11:09:28 +0100 Subject: [PATCH 4/6] ready for the next level --- .travis.yml | 26 ++- README.md | 8 +- package.json | 14 +- src/exporter/index.js | 3 - src/importer.js | 173 ------------------ test/{test-data => fixtures}/1.2MiB.txt | Bin test/{test-data => fixtures}/1.2MiB.txt.block | Bin .../1.2MiB.txt.link-block0 | Bin .../1.2MiB.txt.link-block1 | Bin .../1.2MiB.txt.link-block2 | Bin .../1.2MiB.txt.link-block3 | Bin .../1.2MiB.txt.link-block4 | Bin .../1.2MiB.txt.unixfs-file | 0 .../1.2MiB.txt.unixfs-raw0 | Bin .../1.2MiB.txt.unixfs-raw1 | Bin .../1.2MiB.txt.unixfs-raw2 | Bin .../1.2MiB.txt.unixfs-raw3 | Bin .../1.2MiB.txt.unixfs-raw4 | Bin test/{test-data => fixtures}/1MiB.txt | Bin test/{test-data => fixtures}/200Bytes.txt | 0 .../200Bytes.txt.block | 0 .../200Bytes.txt.unixfs-file | 0 test/{test-data => fixtures}/dir-big.block | 0 .../dir-big.unixfs-dir | 0 .../dir-big/1.2MiB.txt | Bin test/{test-data => fixtures}/dir-nested.block | 0 .../dir-nested/200Bytes.txt | 0 .../dir-nested/level-1/200Bytes.txt | 0 test/{test-data => fixtures}/dir-small.block | 0 .../dir-small.unixfs-dir | 0 .../dir-small/200Bytes.txt | 0 test/{test-data => fixtures}/empty.txt | 0 .../foo-big/1.2MiB.txt | Bin .../foo/bar/200Bytes.txt | 0 test/{test-data => fixtures}/ipfsmarket-1.ogv | Bin test/{test-data => fixtures}/pam/1.2MiB.txt | Bin .../pam/pum/1.2MiB.txt | Bin .../pam/pum/200Bytes.txt | 0 test/{test-data => fixtures}/pim/1.2MiB.txt | Bin test/{test-data => fixtures}/pim/200Bytes.txt | 0 test/{test-data => fixtures}/test-file.txt | 0 .../test-file.txt.link-block0 | 0 test/{test-data => fixtures}/test-video.ogv | Bin test/test-exporter.js | 7 +- test/test-fixed-size-chunker.js | 7 +- test/test-importer.js | 13 +- 46 files changed, 42 insertions(+), 209 deletions(-) delete mode 100644 src/importer.js rename test/{test-data => fixtures}/1.2MiB.txt (100%) rename test/{test-data => fixtures}/1.2MiB.txt.block (100%) rename test/{test-data => fixtures}/1.2MiB.txt.link-block0 (100%) rename test/{test-data => fixtures}/1.2MiB.txt.link-block1 (100%) rename test/{test-data => fixtures}/1.2MiB.txt.link-block2 (100%) rename test/{test-data => fixtures}/1.2MiB.txt.link-block3 (100%) rename test/{test-data => fixtures}/1.2MiB.txt.link-block4 (100%) rename test/{test-data => fixtures}/1.2MiB.txt.unixfs-file (100%) rename test/{test-data => fixtures}/1.2MiB.txt.unixfs-raw0 (100%) rename test/{test-data => fixtures}/1.2MiB.txt.unixfs-raw1 (100%) rename test/{test-data => fixtures}/1.2MiB.txt.unixfs-raw2 (100%) rename test/{test-data => fixtures}/1.2MiB.txt.unixfs-raw3 (100%) rename test/{test-data => fixtures}/1.2MiB.txt.unixfs-raw4 (100%) rename test/{test-data => fixtures}/1MiB.txt (100%) rename test/{test-data => fixtures}/200Bytes.txt (100%) rename test/{test-data => fixtures}/200Bytes.txt.block (100%) rename test/{test-data => fixtures}/200Bytes.txt.unixfs-file (100%) rename test/{test-data => fixtures}/dir-big.block (100%) rename test/{test-data => fixtures}/dir-big.unixfs-dir (100%) rename test/{test-data => fixtures}/dir-big/1.2MiB.txt (100%) rename test/{test-data => fixtures}/dir-nested.block (100%) rename test/{test-data => fixtures}/dir-nested/200Bytes.txt (100%) rename test/{test-data => fixtures}/dir-nested/level-1/200Bytes.txt (100%) rename test/{test-data => fixtures}/dir-small.block (100%) rename test/{test-data => fixtures}/dir-small.unixfs-dir (100%) rename test/{test-data => fixtures}/dir-small/200Bytes.txt (100%) rename test/{test-data => fixtures}/empty.txt (100%) rename test/{test-data => fixtures}/foo-big/1.2MiB.txt (100%) rename test/{test-data => fixtures}/foo/bar/200Bytes.txt (100%) rename test/{test-data => fixtures}/ipfsmarket-1.ogv (100%) rename test/{test-data => fixtures}/pam/1.2MiB.txt (100%) rename test/{test-data => fixtures}/pam/pum/1.2MiB.txt (100%) rename test/{test-data => fixtures}/pam/pum/200Bytes.txt (100%) rename test/{test-data => fixtures}/pim/1.2MiB.txt (100%) rename test/{test-data => fixtures}/pim/200Bytes.txt (100%) rename test/{test-data => fixtures}/test-file.txt (100%) rename test/{test-data => fixtures}/test-file.txt.link-block0 (100%) rename test/{test-data => fixtures}/test-video.ogv (100%) diff --git a/.travis.yml b/.travis.yml index fc1482fc..b5409ef4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,9 +1,16 @@ sudo: false language: node_js -node_js: - - 4 - - 6 - - stable + +matrix: + include: + - node_js: 4 + env: CXX=g++-4.8 + - node_js: 6 + env: + - SAUCE=true + - CXX=g++-4.8 + - node_js: stable + env: CXX=g++-4.8 # Make sure we have new NPM. before_install: @@ -14,12 +21,17 @@ script: - npm test - npm run coverage -addons: - firefox: 'latest' - before_script: - export DISPLAY=:99.0 - sh -e /etc/init.d/xvfb start after_success: - npm run coverage-publish + +addons: + firefox: 'latest' + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - g++-4.8 \ No newline at end of file diff --git a/README.md b/README.md index 19592aed..fbf69031 100644 --- a/README.md +++ b/README.md @@ -9,6 +9,10 @@ IPFS unixFS Engine [![Coverage Status](https://coveralls.io/repos/github/ipfs/js-ipfs-unixfs-engine/badge.svg?branch=master)](https://coveralls.io/github/ipfs/js-ipfs-unixfs-engine?branch=master) [![Dependency Status](https://david-dm.org/ipfs/js-ipfs-unixfs-engine.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-unixfs-engine) [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard) +![](https://img.shields.io/badge/npm-%3E%3D3.0.0-orange.svg?style=flat-square) +![](https://img.shields.io/badge/Node.js-%3E%3D4.0.0-orange.svg?style=flat-square) + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/ipfs-unixfs-engine.svg)](https://saucelabs.com/u/ipfs-unixfs-engine) > JavaScript implementation of the layout and chunking mechanisms used by IPFS @@ -114,11 +118,11 @@ When run, the stat of DAG Node is outputted for each file on data event until th { multihash: , size: 93242, - path: '/tmp/foo' } + path: '/tmp/foo' } { multihash: , size: 94234, - path: '/tmp' } + path: '/tmp' } ``` diff --git a/package.json b/package.json index 32bf462e..01cd21de 100644 --- a/package.json +++ b/package.json @@ -3,6 +3,9 @@ "version": "0.12.0", "description": "JavaScript implementation of the unixfs Engine used by IPFS", "main": "src/index.js", + "browser": { + "fs": false + }, "scripts": { "lint": "aegir-lint", "build": "aegir-build", @@ -36,7 +39,7 @@ }, "homepage": "https://github.com/ipfs/js-ipfs-unixfs-engine#readme", "devDependencies": { - "aegir": "^8.1.2", + "aegir": "^9.0.1", "buffer-loader": "0.0.1", "chai": "^3.5.0", "fs-pull-blob-store": "^0.4.1", @@ -46,15 +49,14 @@ "ncp": "^2.0.0", "pre-commit": "^1.1.3", "pull-zip": "^2.0.1", - "raw-loader": "^0.5.1", "rimraf": "^2.5.4" }, "dependencies": { - "async": "^2.0.1", + "async": "^2.1.2", "cids": "^0.2.0", "ipfs-unixfs": "^0.1.4", "ipld-dag-pb": "^0.1.3", - "ipld-resolver": "^0.1.1", + "ipld-resolver": "^0.1.2", "is-ipfs": "^0.2.1", "multihashes": "^0.2.2", "pull-block": "^1.0.2", @@ -62,7 +64,7 @@ "pull-pushable": "^2.0.1", "pull-stream": "^3.4.5", "pull-traverse": "^1.0.3", - "pull-write": "^1.1.1", + "pull-write": "^1.1.1" }, "contributors": [ "David Dias ", @@ -74,4 +76,4 @@ "jbenet ", "nginnever " ] -} \ No newline at end of file +} diff --git a/src/exporter/index.js b/src/exporter/index.js index d212cc67..63fba476 100644 --- a/src/exporter/index.js +++ b/src/exporter/index.js @@ -2,11 +2,8 @@ const traverse = require('pull-traverse') const pull = require('pull-stream') -<<<<<<< HEAD:src/exporter/index.js const CID = require('cids') -======= const isIPFS = require('is-ipfs') ->>>>>>> fix some more tests:src/exporter.js const util = require('./../util') const switchType = util.switchType diff --git a/src/importer.js b/src/importer.js deleted file mode 100644 index 6dc92c60..00000000 --- a/src/importer.js +++ /dev/null @@ -1,173 +0,0 @@ -'use strict' - -const merkleDAG = require('ipfs-merkle-dag') -const UnixFS = require('ipfs-unixfs') -const assert = require('assert') -const pull = require('pull-stream') -const pushable = require('pull-pushable') -const write = require('pull-write') -const parallel = require('async/parallel') -const waterfall = require('async/waterfall') - -const fsc = require('./chunker-fixed-size') -const createAndStoreTree = require('./tree') -const getSizeAndHash = require('./util').getSizeAndHash - -const DAGNode = merkleDAG.DAGNode - -const CHUNK_SIZE = 262144 - -module.exports = (dagService, options) => { - assert(dagService, 'Missing dagService') - - const files = [] - - const source = pushable() - const sink = write( - makeWriter(source, files, dagService), - null, - 100, - (err) => { - if (err) { - return source.end(err) - } - - createAndStoreTree(files, dagService, source, (err) => { - if (err) { - return source.end(err) - } - source.end() - }) - } - ) - - return {source, sink} -} - -function makeWriter (source, files, dagService) { - return (items, cb) => { - parallel(items.map((item) => (cb) => { - if (!item.content) { - return createAndStoreDir(item, dagService, (err, node) => { - if (err) return cb(err) - source.push(node) - files.push(node) - cb() - }) - } - - createAndStoreFile(item, dagService, (err, node) => { - if (err) return cb(err) - source.push(node) - files.push(node) - cb() - }) - }), cb) - } -} - -function createAndStoreDir (item, ds, cb) { - // 1. create the empty dir dag node - // 2. write it to the dag store - - const d = new UnixFS('directory') - const n = new DAGNode() - n.data = d.marshal() - - waterfall([ - (cb) => ds.put(n, cb), - (cb) => getSizeAndHash(n, cb), - (res, cb) => { - cb(null, { - path: item.path, - multihash: res.multihash, - size: res.size - // dataSize: d.fileSize() - }) - } - ], cb) -} - -function createAndStoreFile (file, ds, cb) { - if (Buffer.isBuffer(file.content)) { - file.content = pull.values([file.content]) - } - - if (typeof file.content !== 'function') { - return cb(new Error('invalid content')) - } - - // 1. create the unixfs merkledag node - // 2. add its hash and size to the leafs array - - // TODO - Support really large files - // a) check if we already reach max chunks if yes - // a.1) create a parent node for all of the current leaves - // b.2) clean up the leaves array and add just the parent node - - pull( - file.content, - fsc(CHUNK_SIZE), - pull.asyncMap((chunk, cb) => { - const l = new UnixFS('file', Buffer(chunk)) - const n = new DAGNode(l.marshal()) - - waterfall([ - (cb) => ds.put(n, cb), - (cb) => getSizeAndHash(n, cb) - ], (err, stats) => { - if (err) { - return cb(err) - } - - cb(null, { - Hash: stats.multihash, - Size: stats.size, - leafSize: l.fileSize(), - Name: '' - }) - }) - }), - pull.collect((err, leaves) => { - if (err) return cb(err) - - if (leaves.length === 1) { - return cb(null, { - path: file.path, - multihash: leaves[0].Hash, - size: leaves[0].Size - // dataSize: leaves[0].leafSize - }) - } - - // create a parent node and add all the leafs - - const f = new UnixFS('file') - const n = new merkleDAG.DAGNode() - - for (let leaf of leaves) { - f.addBlockSize(leaf.leafSize) - n.addRawLink( - new merkleDAG.DAGLink(leaf.Name, leaf.Size, leaf.Hash) - ) - } - - n.data = f.marshal() - parallel([ - (cb) => ds.put(n, cb), - (cb) => getSizeAndHash(n, cb) - ], (err, res) => { - if (err) { - return cb(err) - } - - cb(null, { - path: file.path, - multihash: res[1].multihash, - size: res[1].size - // dataSize: f.fileSize() - }) - }) - }) - ) -} diff --git a/test/test-data/1.2MiB.txt b/test/fixtures/1.2MiB.txt similarity index 100% rename from test/test-data/1.2MiB.txt rename to test/fixtures/1.2MiB.txt diff --git a/test/test-data/1.2MiB.txt.block b/test/fixtures/1.2MiB.txt.block similarity index 100% rename from test/test-data/1.2MiB.txt.block rename to test/fixtures/1.2MiB.txt.block diff --git a/test/test-data/1.2MiB.txt.link-block0 b/test/fixtures/1.2MiB.txt.link-block0 similarity index 100% rename from test/test-data/1.2MiB.txt.link-block0 rename to test/fixtures/1.2MiB.txt.link-block0 diff --git a/test/test-data/1.2MiB.txt.link-block1 b/test/fixtures/1.2MiB.txt.link-block1 similarity index 100% rename from test/test-data/1.2MiB.txt.link-block1 rename to test/fixtures/1.2MiB.txt.link-block1 diff --git a/test/test-data/1.2MiB.txt.link-block2 b/test/fixtures/1.2MiB.txt.link-block2 similarity index 100% rename from test/test-data/1.2MiB.txt.link-block2 rename to test/fixtures/1.2MiB.txt.link-block2 diff --git a/test/test-data/1.2MiB.txt.link-block3 b/test/fixtures/1.2MiB.txt.link-block3 similarity index 100% rename from test/test-data/1.2MiB.txt.link-block3 rename to test/fixtures/1.2MiB.txt.link-block3 diff --git a/test/test-data/1.2MiB.txt.link-block4 b/test/fixtures/1.2MiB.txt.link-block4 similarity index 100% rename from test/test-data/1.2MiB.txt.link-block4 rename to test/fixtures/1.2MiB.txt.link-block4 diff --git a/test/test-data/1.2MiB.txt.unixfs-file b/test/fixtures/1.2MiB.txt.unixfs-file similarity index 100% rename from test/test-data/1.2MiB.txt.unixfs-file rename to test/fixtures/1.2MiB.txt.unixfs-file diff --git a/test/test-data/1.2MiB.txt.unixfs-raw0 b/test/fixtures/1.2MiB.txt.unixfs-raw0 similarity index 100% rename from test/test-data/1.2MiB.txt.unixfs-raw0 rename to test/fixtures/1.2MiB.txt.unixfs-raw0 diff --git a/test/test-data/1.2MiB.txt.unixfs-raw1 b/test/fixtures/1.2MiB.txt.unixfs-raw1 similarity index 100% rename from test/test-data/1.2MiB.txt.unixfs-raw1 rename to test/fixtures/1.2MiB.txt.unixfs-raw1 diff --git a/test/test-data/1.2MiB.txt.unixfs-raw2 b/test/fixtures/1.2MiB.txt.unixfs-raw2 similarity index 100% rename from test/test-data/1.2MiB.txt.unixfs-raw2 rename to test/fixtures/1.2MiB.txt.unixfs-raw2 diff --git a/test/test-data/1.2MiB.txt.unixfs-raw3 b/test/fixtures/1.2MiB.txt.unixfs-raw3 similarity index 100% rename from test/test-data/1.2MiB.txt.unixfs-raw3 rename to test/fixtures/1.2MiB.txt.unixfs-raw3 diff --git a/test/test-data/1.2MiB.txt.unixfs-raw4 b/test/fixtures/1.2MiB.txt.unixfs-raw4 similarity index 100% rename from test/test-data/1.2MiB.txt.unixfs-raw4 rename to test/fixtures/1.2MiB.txt.unixfs-raw4 diff --git a/test/test-data/1MiB.txt b/test/fixtures/1MiB.txt similarity index 100% rename from test/test-data/1MiB.txt rename to test/fixtures/1MiB.txt diff --git a/test/test-data/200Bytes.txt b/test/fixtures/200Bytes.txt similarity index 100% rename from test/test-data/200Bytes.txt rename to test/fixtures/200Bytes.txt diff --git a/test/test-data/200Bytes.txt.block b/test/fixtures/200Bytes.txt.block similarity index 100% rename from test/test-data/200Bytes.txt.block rename to test/fixtures/200Bytes.txt.block diff --git a/test/test-data/200Bytes.txt.unixfs-file b/test/fixtures/200Bytes.txt.unixfs-file similarity index 100% rename from test/test-data/200Bytes.txt.unixfs-file rename to test/fixtures/200Bytes.txt.unixfs-file diff --git a/test/test-data/dir-big.block b/test/fixtures/dir-big.block similarity index 100% rename from test/test-data/dir-big.block rename to test/fixtures/dir-big.block diff --git a/test/test-data/dir-big.unixfs-dir b/test/fixtures/dir-big.unixfs-dir similarity index 100% rename from test/test-data/dir-big.unixfs-dir rename to test/fixtures/dir-big.unixfs-dir diff --git a/test/test-data/dir-big/1.2MiB.txt b/test/fixtures/dir-big/1.2MiB.txt similarity index 100% rename from test/test-data/dir-big/1.2MiB.txt rename to test/fixtures/dir-big/1.2MiB.txt diff --git a/test/test-data/dir-nested.block b/test/fixtures/dir-nested.block similarity index 100% rename from test/test-data/dir-nested.block rename to test/fixtures/dir-nested.block diff --git a/test/test-data/dir-nested/200Bytes.txt b/test/fixtures/dir-nested/200Bytes.txt similarity index 100% rename from test/test-data/dir-nested/200Bytes.txt rename to test/fixtures/dir-nested/200Bytes.txt diff --git a/test/test-data/dir-nested/level-1/200Bytes.txt b/test/fixtures/dir-nested/level-1/200Bytes.txt similarity index 100% rename from test/test-data/dir-nested/level-1/200Bytes.txt rename to test/fixtures/dir-nested/level-1/200Bytes.txt diff --git a/test/test-data/dir-small.block b/test/fixtures/dir-small.block similarity index 100% rename from test/test-data/dir-small.block rename to test/fixtures/dir-small.block diff --git a/test/test-data/dir-small.unixfs-dir b/test/fixtures/dir-small.unixfs-dir similarity index 100% rename from test/test-data/dir-small.unixfs-dir rename to test/fixtures/dir-small.unixfs-dir diff --git a/test/test-data/dir-small/200Bytes.txt b/test/fixtures/dir-small/200Bytes.txt similarity index 100% rename from test/test-data/dir-small/200Bytes.txt rename to test/fixtures/dir-small/200Bytes.txt diff --git a/test/test-data/empty.txt b/test/fixtures/empty.txt similarity index 100% rename from test/test-data/empty.txt rename to test/fixtures/empty.txt diff --git a/test/test-data/foo-big/1.2MiB.txt b/test/fixtures/foo-big/1.2MiB.txt similarity index 100% rename from test/test-data/foo-big/1.2MiB.txt rename to test/fixtures/foo-big/1.2MiB.txt diff --git a/test/test-data/foo/bar/200Bytes.txt b/test/fixtures/foo/bar/200Bytes.txt similarity index 100% rename from test/test-data/foo/bar/200Bytes.txt rename to test/fixtures/foo/bar/200Bytes.txt diff --git a/test/test-data/ipfsmarket-1.ogv b/test/fixtures/ipfsmarket-1.ogv similarity index 100% rename from test/test-data/ipfsmarket-1.ogv rename to test/fixtures/ipfsmarket-1.ogv diff --git a/test/test-data/pam/1.2MiB.txt b/test/fixtures/pam/1.2MiB.txt similarity index 100% rename from test/test-data/pam/1.2MiB.txt rename to test/fixtures/pam/1.2MiB.txt diff --git a/test/test-data/pam/pum/1.2MiB.txt b/test/fixtures/pam/pum/1.2MiB.txt similarity index 100% rename from test/test-data/pam/pum/1.2MiB.txt rename to test/fixtures/pam/pum/1.2MiB.txt diff --git a/test/test-data/pam/pum/200Bytes.txt b/test/fixtures/pam/pum/200Bytes.txt similarity index 100% rename from test/test-data/pam/pum/200Bytes.txt rename to test/fixtures/pam/pum/200Bytes.txt diff --git a/test/test-data/pim/1.2MiB.txt b/test/fixtures/pim/1.2MiB.txt similarity index 100% rename from test/test-data/pim/1.2MiB.txt rename to test/fixtures/pim/1.2MiB.txt diff --git a/test/test-data/pim/200Bytes.txt b/test/fixtures/pim/200Bytes.txt similarity index 100% rename from test/test-data/pim/200Bytes.txt rename to test/fixtures/pim/200Bytes.txt diff --git a/test/test-data/test-file.txt b/test/fixtures/test-file.txt similarity index 100% rename from test/test-data/test-file.txt rename to test/fixtures/test-file.txt diff --git a/test/test-data/test-file.txt.link-block0 b/test/fixtures/test-file.txt.link-block0 similarity index 100% rename from test/test-data/test-file.txt.link-block0 rename to test/fixtures/test-file.txt.link-block0 diff --git a/test/test-data/test-video.ogv b/test/fixtures/test-video.ogv similarity index 100% rename from test/test-data/test-video.ogv rename to test/fixtures/test-video.ogv diff --git a/test/test-exporter.js b/test/test-exporter.js index e93db266..d1005281 100644 --- a/test/test-exporter.js +++ b/test/test-exporter.js @@ -5,22 +5,21 @@ const expect = require('chai').expect const BlockService = require('ipfs-block-service') const IPLDResolver = require('ipld-resolver') const UnixFS = require('ipfs-unixfs') -const fs = require('fs') -const path = require('path') const bs58 = require('bs58') const pull = require('pull-stream') const zip = require('pull-zip') const CID = require('cids') +const loadFixture = require('aegir/fixtures') const unixFSEngine = require('./../src') const exporter = unixFSEngine.exporter +const bigFile = loadFixture(__dirname, 'fixtures/1.2MiB.txt') + module.exports = (repo) => { describe('exporter', () => { let ipldResolver - const bigFile = fs.readFileSync(path.join(__dirname, '/test-data/1.2MiB.txt')) - before(() => { const bs = new BlockService(repo) ipldResolver = new IPLDResolver(bs) diff --git a/test/test-fixed-size-chunker.js b/test/test-fixed-size-chunker.js index e3e8a076..dd7408b3 100644 --- a/test/test-fixed-size-chunker.js +++ b/test/test-fixed-size-chunker.js @@ -2,14 +2,11 @@ 'use strict' const chunker = require('./../src/chunker/fixed-size') -const fs = require('fs') const expect = require('chai').expect -const path = require('path') const pull = require('pull-stream') +const loadFixture = require('aegir/fixtures') -const rawFile = fs.readFileSync( - path.join(__dirname, '/test-data/1MiB.txt') -) +const rawFile = loadFixture(__dirname, 'fixtures/1MiB.txt') describe('chunker: fixed size', () => { it('chunks non flat buffers', (done) => { diff --git a/test/test-importer.js b/test/test-importer.js index 5b4fb9cb..98a1f6cf 100644 --- a/test/test-importer.js +++ b/test/test-importer.js @@ -4,11 +4,10 @@ const importer = require('./../src').importer const expect = require('chai').expect const BlockService = require('ipfs-block-service') -const fs = require('fs') -const path = require('path') const pull = require('pull-stream') const mh = require('multihashes') const IPLDResolver = require('ipld-resolver') +const loadFixture = require('aegir/fixtures') function stringifyMh (files) { return files.map((file) => { @@ -17,17 +16,13 @@ function stringifyMh (files) { }) } +const bigFile = loadFixture(__dirname, 'fixtures/1.2MiB.txt') +const smallFile = loadFixture(__dirname, 'fixtures/200Bytes.txt') + module.exports = function (repo) { describe('importer', function () { let ipldResolver - const bigFile = fs.readFileSync(path.join(__dirname, '/test-data/1.2MiB.txt')) - const smallFile = fs.readFileSync(path.join(__dirname, '/test-data/200Bytes.txt')) - - // const dirSmall = path.join(__dirname, '/test-data/dir-small') - // const dirBig = path.join(__dirname, '/test-data/dir-big') - // const dirNested = path.join(__dirname, '/test-data/dir-nested') - before(() => { const bs = new BlockService(repo) ipldResolver = new IPLDResolver(bs) From 54d5977758b0461a13005b85568fc6814fb823c7 Mon Sep 17 00:00:00 2001 From: Friedel Ziegelmayer Date: Wed, 2 Nov 2016 15:00:20 +0100 Subject: [PATCH 5/6] ready --- src/util.js | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/src/util.js b/src/util.js index 78d114e2..2742316d 100644 --- a/src/util.js +++ b/src/util.js @@ -3,14 +3,18 @@ const UnixFS = require('ipfs-unixfs') const pull = require('pull-stream') const mh = require('multihashes') -const parallel = require('async/parallel') exports.switchType = (node, dirHandler, fileHandler) => { const data = UnixFS.unmarshal(node.data) const type = data.type - if (type === 'directory') return dirHandler() - if (type === 'file') return fileHandler() + if (type === 'directory') { + return dirHandler() + } + + if (type === 'file') { + return fileHandler() + } return pull.error(new Error('Unkown node type')) } @@ -22,10 +26,3 @@ exports.cleanMultihash = (multihash) => { return multihash } - -exports.getSizeAndHash = (n, cb) => { - parallel({ - multihash: (cb) => n.multihash(cb), - size: (cb) => n.size(cb) - }, cb) -} From b0183a6c447a34735d7c61d273c3215187f01faa Mon Sep 17 00:00:00 2001 From: David Dias Date: Thu, 3 Nov 2016 15:27:09 +0000 Subject: [PATCH 6/6] chore: update deps --- package.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/package.json b/package.json index 01cd21de..14288b0a 100644 --- a/package.json +++ b/package.json @@ -39,13 +39,13 @@ }, "homepage": "https://github.com/ipfs/js-ipfs-unixfs-engine#readme", "devDependencies": { - "aegir": "^9.0.1", + "aegir": "^9.1.0", "buffer-loader": "0.0.1", "chai": "^3.5.0", "fs-pull-blob-store": "^0.4.1", "idb-pull-blob-store": "^0.5.1", "ipfs-block-service": "^0.6.0", - "ipfs-repo": "^0.10.0", + "ipfs-repo": "^0.11.0", "ncp": "^2.0.0", "pre-commit": "^1.1.3", "pull-zip": "^2.0.1", @@ -54,15 +54,15 @@ "dependencies": { "async": "^2.1.2", "cids": "^0.2.0", - "ipfs-unixfs": "^0.1.4", - "ipld-dag-pb": "^0.1.3", - "ipld-resolver": "^0.1.2", + "ipfs-unixfs": "^0.1.5", + "ipld-dag-pb": "^0.8.0", + "ipld-resolver": "^0.2.0", "is-ipfs": "^0.2.1", "multihashes": "^0.2.2", "pull-block": "^1.0.2", "pull-paramap": "^1.2.0", "pull-pushable": "^2.0.1", - "pull-stream": "^3.4.5", + "pull-stream": "^3.5.0", "pull-traverse": "^1.0.3", "pull-write": "^1.1.1" },