Skip to content
This repository was archived by the owner on Apr 29, 2020. It is now read-only.

Commit 6a86d55

Browse files
committed
perf: write files in parallel chunks, use a through instead of a map
1 parent 4ef5dbc commit 6a86d55

File tree

2 files changed

+43
-40
lines changed

2 files changed

+43
-40
lines changed

package.json

+1
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@
6161
"pull-batch": "^1.0.0",
6262
"pull-block": "^1.4.0",
6363
"pull-pair": "^1.1.0",
64+
"pull-paramap": "^1.2.2",
6465
"pull-pause": "0.0.2",
6566
"pull-pushable": "^2.2.0",
6667
"pull-stream": "^3.6.9",

src/builder/builder.js

+42-40
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,11 @@ const extend = require('deep-extend')
44
const UnixFS = require('ipfs-unixfs')
55
const pull = require('pull-stream/pull')
66
const values = require('pull-stream/sources/values')
7-
const asyncMap = require('pull-stream/throughs/async-map')
8-
const map = require('pull-stream/throughs/map')
97
const collect = require('pull-stream/sinks/collect')
108
const through = require('pull-through')
119
const parallel = require('async/parallel')
1210
const waterfall = require('async/waterfall')
11+
const paraMap = require('pull-paramap')
1312
const persist = require('../utils/persist')
1413
const reduce = require('./reduce')
1514
const {
@@ -106,50 +105,53 @@ module.exports = function builder (createChunker, ipld, createReducer, _options)
106105
pull(
107106
file.content,
108107
chunker,
109-
map(chunk => {
108+
through(chunk => {
110109
if (options.progress && typeof options.progress === 'function') {
111110
options.progress(chunk.byteLength)
112111
}
113-
return chunk
114112
}),
115-
asyncMap((buffer, callback) => {
116-
if (options.rawLeaves) {
117-
return callback(null, {
118-
size: buffer.length,
119-
leafSize: buffer.length,
120-
data: buffer
121-
})
122-
}
123-
124-
const file = new UnixFS(options.leafType, buffer)
113+
paraMap((buffer, callback) => {
114+
waterfall([
115+
(cb) => {
116+
if (options.rawLeaves) {
117+
return cb(null, {
118+
size: buffer.length,
119+
leafSize: buffer.length,
120+
data: buffer
121+
})
122+
}
125123

126-
DAGNode.create(file.marshal(), [], (err, node) => {
127-
if (err) {
128-
return callback(err)
124+
const file = new UnixFS(options.leafType, buffer)
125+
126+
DAGNode.create(file.marshal(), [], (err, node) => {
127+
if (err) {
128+
return cb(err)
129+
}
130+
131+
cb(null, {
132+
size: node.size,
133+
leafSize: file.fileSize(),
134+
data: node
135+
})
136+
})
137+
},
138+
(leaf, cb) => {
139+
persist(leaf.data, ipld, options, (error, results) => {
140+
if (error) {
141+
return cb(error)
142+
}
143+
144+
cb(null, {
145+
size: leaf.size,
146+
leafSize: leaf.leafSize,
147+
data: results.node,
148+
multihash: results.cid.buffer,
149+
path: leaf.path,
150+
name: ''
151+
})
152+
})
129153
}
130-
131-
callback(null, {
132-
size: node.size,
133-
leafSize: file.fileSize(),
134-
data: node
135-
})
136-
})
137-
}),
138-
asyncMap((leaf, callback) => {
139-
persist(leaf.data, ipld, options, (error, results) => {
140-
if (error) {
141-
return callback(error)
142-
}
143-
144-
callback(null, {
145-
size: leaf.size,
146-
leafSize: leaf.leafSize,
147-
data: results.node,
148-
multihash: results.cid.buffer,
149-
path: leaf.path,
150-
name: ''
151-
})
152-
})
154+
], callback)
153155
}),
154156
through( // mark as single node if only one single node
155157
function onData (data) {

0 commit comments

Comments
 (0)