Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit adaa0a6

Browse files
committed
updates
1 parent 7c137d8 commit adaa0a6

File tree

3 files changed

+77
-51
lines changed

3 files changed

+77
-51
lines changed

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,7 @@
5757
"ipfs-merkle-dag": "^0.5.0",
5858
"ipfs-unixfs": "^0.1.0",
5959
"readable-stream": "^1.1.13",
60+
"run-series": "^1.1.4",
6061
"through2": "^2.0.0"
6162
},
6263
"contributors": [
@@ -67,4 +68,4 @@
6768
"greenkeeperio-bot <[email protected]>",
6869
"nginnever <[email protected]>"
6970
]
70-
}
71+
}

src/exporter.js

Lines changed: 51 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ const debug = require('debug')
44
const log = debug('exporter')
55
log.err = debug('exporter:error')
66
const UnixFS = require('ipfs-unixfs')
7+
const series = require('run-series')
78
const async = require('async')
89
const Readable = require('readable-stream').Readable
910
const pathj = require('path')
@@ -14,24 +15,21 @@ exports = module.exports = Exporter
1415
util.inherits(Exporter, Readable)
1516

1617
function Exporter (hash, dagService, options) {
17-
Readable.call(this, { objectMode: true })
18-
1918
if (!(this instanceof Exporter)) {
20-
return new Exporter(hash, dagService)
19+
return new Exporter(hash, dagService, options)
2120
}
2221

23-
if (options) {
24-
this.options = options
25-
} else {
26-
this.options = {}
27-
}
22+
Readable.call(this, { objectMode: true })
23+
24+
this.options = options || {}
2825

2926
this._read = (n) => {}
3027

31-
this.fileExporter = (node, name, dir, callback) => {
28+
let fileExporter = (node, name, callback) => {
3229
let init
3330

34-
if (typeof dir === 'function') { callback = dir; dir = {} }
31+
if (!callback) { callback = function noop () {} }
32+
3533
var rs = new Readable()
3634
if (node.links.length === 0) {
3735
const unmarshaledData = UnixFS.unmarshal(node.data)
@@ -44,10 +42,8 @@ function Exporter (hash, dagService, options) {
4442
rs.push(unmarshaledData.data)
4543
rs.push(null)
4644
}
47-
this.push({ stream: rs, path: name, dir: dir })
48-
if (callback) {
49-
callback()
50-
}
45+
this.push({ stream: rs, path: name })
46+
callback()
5147
return
5248
} else {
5349
init = false
@@ -56,7 +52,29 @@ function Exporter (hash, dagService, options) {
5652
return
5753
}
5854
init = true
59-
async.forEachSeries(node.links, (link, callback) => {
55+
56+
const array = node.links.map((link) => {
57+
return (cb) => {
58+
dagService.get(link.hash, (err, res) => {
59+
if (err) {
60+
cb(err)
61+
}
62+
var unmarshaledData = UnixFS.unmarshal(res.data)
63+
rs.push(unmarshaledData.data)
64+
cb()
65+
})
66+
}
67+
})
68+
series(array, (err, res) => {
69+
if (err) {
70+
callback()
71+
return
72+
}
73+
rs.push(null)
74+
callback()
75+
return
76+
})
77+
/* async.forEachSeries(node.links, (link, callback) => {
6078
dagService.get(link.hash, (err, res) => {
6179
if (err) {
6280
callback(err)
@@ -67,28 +85,25 @@ function Exporter (hash, dagService, options) {
6785
})
6886
}, (err) => {
6987
if (err) {
70-
if (callback) {
71-
return callback(err)
72-
}
88+
callback()
7389
return
7490
}
7591
rs.push(null)
76-
if (callback) {
77-
callback()
78-
}
92+
callback()
7993
return
80-
})
81-
}
82-
this.push({ stream: rs, path: name, dir: dir })
83-
if (callback) {
84-
callback()
94+
})*/
8595
}
96+
this.push({ stream: rs, path: name })
97+
callback()
8698
return
8799
}
88100
}
89101

90-
this.dirExporter = (node, name, callback) => {
102+
let dirExporter = (node, name, callback) => {
91103
let init
104+
105+
if (!callback) { callback = function noop () {} }
106+
92107
var rs = new Readable()
93108
if (node.links.length === 0) {
94109
init = false
@@ -100,10 +115,8 @@ function Exporter (hash, dagService, options) {
100115
rs.push(node.data)
101116
rs.push(null)
102117
}
103-
this.push({stream: rs, path: name})
104-
if (callback) {
105-
callback()
106-
}
118+
this.push({stream: null, path: name})
119+
callback()
107120
return
108121
} else {
109122
async.forEachSeries(node.links, (link, callback) => {
@@ -113,40 +126,37 @@ function Exporter (hash, dagService, options) {
113126
}
114127
var unmarshaledData = UnixFS.unmarshal(res.data)
115128
if (unmarshaledData.type === 'file') {
116-
return (this.fileExporter(res, pathj.join(name, link.name), callback))
129+
return (fileExporter(res, pathj.join(name, link.name), callback))
117130
}
118131
if (unmarshaledData.type === 'directory') {
119-
return (this.dirExporter(res, pathj.join(name, link.name), callback))
132+
return (dirExporter(res, pathj.join(name, link.name), callback))
120133
}
121134
callback()
122135
})
123136
}, (err) => {
124137
if (err) {
125-
if (callback) {
126-
callback(err)
127-
}
128-
return
129-
}
130-
if (callback) {
131138
callback()
139+
return
132140
}
141+
callback()
133142
return
134143
})
135144
}
136145
}
137146

138147
dagService.get(hash, (err, fetchedNode) => {
139148
if (err) {
149+
this.emit('error', err)
140150
return
141151
}
142152
const data = UnixFS.unmarshal(fetchedNode.data)
143153
const type = data.type
144154

145155
if (type === 'directory') {
146-
this.dirExporter(fetchedNode, hash)
156+
dirExporter(fetchedNode, hash)
147157
}
148158
if (type === 'file') {
149-
this.fileExporter(fetchedNode, hash, false)
159+
fileExporter(fetchedNode, hash)
150160
}
151161
})
152162

test/test-exporter.js

Lines changed: 24 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,14 @@ const BlockService = require('ipfs-block-service')
88
const DAGService = require('ipfs-merkle-dag').DAGService
99
const UnixFS = require('ipfs-unixfs')
1010
const bl = require('bl')
11+
const fs = require('fs')
12+
const path = require('path')
1113

1214
let ds
1315

1416
module.exports = function (repo) {
1517
describe('exporter', function () {
18+
const bigFile = fs.readFileSync(path.join(__dirname, '/test-data/1.2MiB.txt'))
1619
before((done) => {
1720
const bs = new BlockService(repo)
1821
expect(bs).to.exist
@@ -29,8 +32,8 @@ module.exports = function (repo) {
2932
const unmarsh = UnixFS.unmarshal(fetchedNode.data)
3033
expect(err).to.not.exist
3134
const testExport = exporter(hash, ds)
32-
testExport.on('data', (object) => {
33-
object.stream.pipe(bl((err, bldata) => {
35+
testExport.on('data', (file) => {
36+
file.stream.pipe(bl((err, bldata) => {
3437
expect(err).to.not.exist
3538
expect(bldata).to.deep.equal(unmarsh.data)
3639
done()
@@ -44,8 +47,9 @@ module.exports = function (repo) {
4447
const bs = new BlockService(repo)
4548
const ds = new DAGService(bs)
4649
const testExport = exporter(hash, ds)
47-
testExport.on('data', (object) => {
48-
object.stream.pipe(bl((err, bldata) => {
50+
testExport.on('data', (file) => {
51+
file.stream.pipe(bl((err, bldata) => {
52+
expect(bldata).to.deep.equal(bigFile)
4953
expect(err).to.not.exist
5054
done()
5155
}))
@@ -57,9 +61,9 @@ module.exports = function (repo) {
5761
const bs = new BlockService(repo)
5862
const ds = new DAGService(bs)
5963
const testExport = exporter(hash, ds)
60-
testExport.on('data', (object) => {
61-
expect(object.path).to.equal('QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE')
62-
object.stream.pipe(bl((err, bldata) => {
64+
testExport.on('data', (file) => {
65+
expect(file.path).to.equal('QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE')
66+
file.stream.pipe(bl((err, bldata) => {
6367
expect(err).to.not.exist
6468
done()
6569
}))
@@ -72,8 +76,8 @@ module.exports = function (repo) {
7276
const ds = new DAGService(bs)
7377
const testExport = exporter(hash, ds)
7478
var fsa = []
75-
testExport.on('data', (object) => {
76-
fsa.push(object)
79+
testExport.on('data', (files) => {
80+
fsa.push(files)
7781
})
7882
setTimeout(() => {
7983
expect(fsa[0].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/200Bytes.txt')
@@ -83,5 +87,16 @@ module.exports = function (repo) {
8387
done()
8488
}, 1000)
8589
})
90+
91+
it('expect a dag service error over stream', (done) => {
92+
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKj3' // This hash doesn't exist in the repo
93+
const bs = new BlockService(repo)
94+
const ds = new DAGService(bs)
95+
const testExport = exporter(hash, ds)
96+
testExport.on('error', (err) => {
97+
expect(err).to.exist
98+
done()
99+
})
100+
})
86101
})
87102
}

0 commit comments

Comments
 (0)