|
1 | 1 | 'use strict'
|
2 | 2 |
|
3 |
| -const Importer = require('ipfs-unixfs-engine').importer |
4 |
| -const Exporter = require('ipfs-unixfs-engine').exporter |
| 3 | +const unixfsEngine = require('ipfs-unixfs-engine') |
| 4 | +const Importer = unixfsEngine.Importer |
| 5 | +const Exporter = unixfsEngine.Exporter |
5 | 6 | const UnixFS = require('ipfs-unixfs')
|
| 7 | +const through = require('through2') |
| 8 | +const isStream = require('isstream') |
| 9 | +const promisify = require('promisify-es6') |
| 10 | +const Duplex = require('stream').Duplex |
| 11 | +const multihashes = require('multihashes') |
6 | 12 |
|
7 | 13 | module.exports = function files (self) {
|
8 | 14 | return {
|
9 |
| - add: (arr, callback) => { |
10 |
| - if (typeof arr === 'function') { |
11 |
| - callback = arr |
12 |
| - arr = undefined |
| 15 | + createAddStream: (callback) => { |
| 16 | + const i = new Importer(self._dagS) |
| 17 | + const ds = new Duplex({ objectMode: true }) |
| 18 | + |
| 19 | + ds._read = (n) => {} |
| 20 | + ds._write = (file, enc, next) => { |
| 21 | + i.write(file) |
| 22 | + next() |
13 | 23 | }
|
14 |
| - if (callback === undefined) { |
| 24 | + |
| 25 | + ds.end = () => { |
| 26 | + i.end() |
| 27 | + } |
| 28 | + |
| 29 | + let counter = 0 |
| 30 | + |
| 31 | + i.on('data', (file) => { |
| 32 | + counter++ |
| 33 | + self.object.get(file.multihash, (err, node) => { |
| 34 | + if (err) { |
| 35 | + return ds.emit('error', err) |
| 36 | + } |
| 37 | + ds.push({path: file.path, node: node}) |
| 38 | + counter-- |
| 39 | + }) |
| 40 | + }) |
| 41 | + |
| 42 | + i.on('end', () => { |
| 43 | + function canFinish () { |
| 44 | + if (counter === 0) { |
| 45 | + ds.push(null) |
| 46 | + } else { |
| 47 | + setTimeout(canFinish, 100) |
| 48 | + } |
| 49 | + } |
| 50 | + canFinish() |
| 51 | + }) |
| 52 | + |
| 53 | + callback(null, ds) |
| 54 | + }, |
| 55 | + add: promisify((data, callback) => { |
| 56 | + // Buffer input |
| 57 | + if (Buffer.isBuffer(data)) { |
| 58 | + data = [{ |
| 59 | + path: '', |
| 60 | + content: data |
| 61 | + }] |
| 62 | + } |
| 63 | + // Readable stream input |
| 64 | + if (isStream.isReadable(data)) { |
| 65 | + data = [{ |
| 66 | + path: '', |
| 67 | + content: data |
| 68 | + }] |
| 69 | + } |
| 70 | + if (!callback || typeof callback !== 'function') { |
15 | 71 | callback = function noop () {}
|
16 | 72 | }
|
17 |
| - if (arr === undefined) { |
18 |
| - return new Importer(self._dagS) |
| 73 | + if (!Array.isArray(data)) { |
| 74 | + return callback(new Error('"data" must be an array of { path: string, content: Buffer|Readable } or Buffer or Readable')) |
19 | 75 | }
|
20 | 76 |
|
21 | 77 | const i = new Importer(self._dagS)
|
22 | 78 | const res = []
|
23 | 79 |
|
24 |
| - i.on('data', (info) => { |
25 |
| - res.push(info) |
26 |
| - }) |
27 |
| - |
28 |
| - i.once('end', () => { |
| 80 | + // Transform file info tuples to DAGNodes |
| 81 | + i.pipe(through.obj((info, enc, next) => { |
| 82 | + const mh = multihashes.toB58String(info.multihash) |
| 83 | + self._dagS.get(mh, (err, node) => { |
| 84 | + if (err) return callback(err) |
| 85 | + var obj = { |
| 86 | + path: info.path || mh, |
| 87 | + node: node |
| 88 | + } |
| 89 | + res.push(obj) |
| 90 | + next() |
| 91 | + }) |
| 92 | + }, (done) => { |
29 | 93 | callback(null, res)
|
30 |
| - }) |
| 94 | + })) |
31 | 95 |
|
32 |
| - arr.forEach((tuple) => { |
| 96 | + data.forEach((tuple) => { |
33 | 97 | i.write(tuple)
|
34 | 98 | })
|
35 | 99 |
|
36 | 100 | i.end()
|
37 |
| - }, |
| 101 | + }), |
| 102 | + |
38 | 103 | cat: (hash, callback) => {
|
39 | 104 | self._dagS.get(hash, (err, fetchedNode) => {
|
40 | 105 | if (err) {
|
|
0 commit comments