Skip to content
This repository was archived by the owner on Apr 29, 2020. It is now read-only.

Commit 092b5b4

Browse files
committed
perf: deep require pull stream modules
In able to create a minimal bundle, require pull stream modules directly: https://www.npmjs.com/package/pull-stream#minimal-bundle
1 parent 6029b6a commit 092b5b4

22 files changed

+287
-227
lines changed

src/builder/balanced/balanced-reducer.js

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11
'use strict'
22

3-
const pull = require('pull-stream')
3+
const pull = require('pull-stream/pull')
4+
const values = require('pull-stream/sources/values')
5+
const asyncMap = require('pull-stream/throughs/async-map')
6+
const collect = require('pull-stream/sinks/collect')
47
const pushable = require('pull-pushable')
58
const pullPair = require('pull-pair')
69
const batch = require('pull-batch')
@@ -29,14 +32,14 @@ module.exports = function balancedReduceToRoot (reduce, options) {
2932
function reduceToParents (_chunks, callback) {
3033
let chunks = _chunks
3134
if (Array.isArray(chunks)) {
32-
chunks = pull.values(chunks)
35+
chunks = values(chunks)
3336
}
3437

3538
pull(
3639
chunks,
3740
batch(options.maxChildrenPerNode),
38-
pull.asyncMap(reduce),
39-
pull.collect(reduced)
41+
asyncMap(reduce),
42+
collect(reduced)
4043
)
4144

4245
function reduced (err, roots) {

src/builder/builder.js

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,11 @@
22

33
const extend = require('deep-extend')
44
const UnixFS = require('ipfs-unixfs')
5-
const pull = require('pull-stream')
5+
const pull = require('pull-stream/pull')
6+
const values = require('pull-stream/sources/values')
7+
const asyncMap = require('pull-stream/throughs/async-map')
8+
const map = require('pull-stream/throughs/map')
9+
const collect = require('pull-stream/sinks/collect')
610
const through = require('pull-through')
711
const parallel = require('async/parallel')
812
const waterfall = require('async/waterfall')
@@ -80,7 +84,7 @@ module.exports = function builder (createChunker, ipld, createReducer, _options)
8084

8185
function createAndStoreFile (file, callback) {
8286
if (Buffer.isBuffer(file.content)) {
83-
file.content = pull.values([file.content])
87+
file.content = values([file.content])
8488
}
8589

8690
if (typeof file.content !== 'function') {
@@ -102,13 +106,13 @@ module.exports = function builder (createChunker, ipld, createReducer, _options)
102106
pull(
103107
file.content,
104108
chunker,
105-
pull.map(chunk => {
109+
map(chunk => {
106110
if (options.progress && typeof options.progress === 'function') {
107111
options.progress(chunk.byteLength)
108112
}
109113
return Buffer.from(chunk)
110114
}),
111-
pull.asyncMap((buffer, callback) => {
115+
asyncMap((buffer, callback) => {
112116
if (options.rawLeaves) {
113117
return callback(null, {
114118
size: buffer.length,
@@ -131,7 +135,7 @@ module.exports = function builder (createChunker, ipld, createReducer, _options)
131135
})
132136
})
133137
}),
134-
pull.asyncMap((leaf, callback) => {
138+
asyncMap((leaf, callback) => {
135139
persist(leaf.data, ipld, options, (error, results) => {
136140
if (error) {
137141
return callback(error)
@@ -166,7 +170,7 @@ module.exports = function builder (createChunker, ipld, createReducer, _options)
166170
}
167171
),
168172
reducer,
169-
pull.collect((err, roots) => {
173+
collect((err, roots) => {
170174
if (err) {
171175
callback(err)
172176
} else {

src/builder/flat/index.js

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
'use strict'
22

3-
const pull = require('pull-stream')
3+
const pull = require('pull-stream/pull')
4+
const asyncMap = require('pull-stream/throughs/async-map')
5+
const collect = require('pull-stream/sinks/collect')
46
const pushable = require('pull-pushable')
57
const pullPair = require('pull-pair')
68
const batch = require('pull-batch')
@@ -13,8 +15,8 @@ module.exports = function (reduce, options) {
1315
pull(
1416
source,
1517
batch(Infinity),
16-
pull.asyncMap(reduce),
17-
pull.collect((err, roots) => {
18+
asyncMap(reduce),
19+
collect((err, roots) => {
1820
if (err) {
1921
result.end(err)
2022
return // early

src/builder/trickle/trickle-reducer.js

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
'use strict'
22

3-
const pull = require('pull-stream')
3+
const pull = require('pull-stream/pull')
4+
const asyncMap = require('pull-stream/throughs/async-map')
5+
const collect = require('pull-stream/sinks/collect')
46
const pushable = require('pull-pushable')
57
const batch = require('pull-batch')
68
const pullPair = require('pull-pair')
@@ -19,8 +21,8 @@ module.exports = function trickleReduceToRoot (reduce, options) {
1921
pausable,
2022
trickle(0, -1),
2123
batch(Infinity),
22-
pull.asyncMap(reduce),
23-
pull.collect((err, roots) => {
24+
asyncMap(reduce),
25+
collect((err, roots) => {
2426
if (err) {
2527
result.end(err)
2628
} else {
@@ -82,8 +84,8 @@ module.exports = function trickleReduceToRoot (reduce, options) {
8284
}
8385
),
8486
batch(Infinity),
85-
pull.asyncMap(reduce),
86-
pull.collect((err, nodes) => {
87+
asyncMap(reduce),
88+
collect((err, nodes) => {
8789
pendingResumes--
8890
if (err) {
8991
result.end(err)

src/importer/index.js

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
'use strict'
22

33
const pause = require('pull-pause')
4-
const pull = require('pull-stream')
4+
const pull = require('pull-stream/pull')
5+
const map = require('pull-stream/throughs/map')
56
const writable = require('pull-write')
67
const pushable = require('pull-pushable')
78
const assert = require('assert')
@@ -69,7 +70,7 @@ module.exports = function (ipld, _options) {
6970
entry,
7071
pausable,
7172
dagStream,
72-
pull.map((node) => {
73+
map((node) => {
7374
pending--
7475
if (!pending) {
7576
process.nextTick(() => {

test/builder-balanced.js

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,9 @@
44
const chai = require('chai')
55
chai.use(require('dirty-chai'))
66
const expect = chai.expect
7-
const pull = require('pull-stream')
7+
const pull = require('pull-stream/pull')
8+
const values = require('pull-stream/sources/values')
9+
const collect = require('pull-stream/sinks/collect')
810

911
const builder = require('../src/builder/balanced')
1012

@@ -23,9 +25,9 @@ const options = {
2325
describe('builder: balanced', () => {
2426
it('reduces one value into itself', (callback) => {
2527
pull(
26-
pull.values([1]),
28+
values([1]),
2729
builder(reduce, options),
28-
pull.collect((err, result) => {
30+
collect((err, result) => {
2931
expect(err).to.not.exist()
3032
expect(result).to.be.eql([1])
3133
callback()
@@ -35,9 +37,9 @@ describe('builder: balanced', () => {
3537

3638
it('reduces 3 values into parent', (callback) => {
3739
pull(
38-
pull.values([1, 2, 3]),
40+
values([1, 2, 3]),
3941
builder(reduce, options),
40-
pull.collect((err, result) => {
42+
collect((err, result) => {
4143
expect(err).to.not.exist()
4244
expect(result).to.be.eql([{
4345
children: [1, 2, 3]
@@ -49,9 +51,9 @@ describe('builder: balanced', () => {
4951

5052
it('obeys max children per node', (callback) => {
5153
pull(
52-
pull.values([1, 2, 3, 4]),
54+
values([1, 2, 3, 4]),
5355
builder(reduce, options),
54-
pull.collect((err, result) => {
56+
collect((err, result) => {
5557
expect(err).to.not.exist()
5658
expect(result).to.be.eql([
5759
{
@@ -70,9 +72,9 @@ describe('builder: balanced', () => {
7072

7173
it('refolds 2 parent nodes', (callback) => {
7274
pull(
73-
pull.values([1, 2, 3, 4, 5, 6, 7]),
75+
values([1, 2, 3, 4, 5, 6, 7]),
7476
builder(reduce, options),
75-
pull.collect((err, result) => {
77+
collect((err, result) => {
7678
expect(err).to.not.exist()
7779
expect(result).to.be.eql([
7880
{

test/builder-dir-sharding.js

Lines changed: 25 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,10 @@ chai.use(require('dirty-chai'))
99
const expect = chai.expect
1010
const BlockService = require('ipfs-block-service')
1111
const Ipld = require('ipld')
12-
const pull = require('pull-stream')
12+
const pull = require('pull-stream/pull')
13+
const values = require('pull-stream/sources/values')
14+
const asyncMap = require('pull-stream/throughs/async-map')
15+
const collect = require('pull-stream/sinks/collect')
1316
const pushable = require('pull-pushable')
1417
const whilst = require('async/whilst')
1518
const setImmediate = require('async/setImmediate')
@@ -36,14 +39,14 @@ module.exports = (repo) => {
3639
}
3740

3841
pull(
39-
pull.values([
42+
values([
4043
{
4144
path: 'a/b',
4245
content: pull.values([Buffer.from('i have the best bytes')])
4346
}
4447
]),
4548
importer(ipld, options),
46-
pull.collect((err, nodes) => {
49+
collect((err, nodes) => {
4750
try {
4851
expect(err).to.not.exist()
4952
expect(nodes.length).to.be.eql(2)
@@ -65,14 +68,14 @@ module.exports = (repo) => {
6568
}
6669

6770
pull(
68-
pull.values([
71+
values([
6972
{
7073
path: 'a/b',
71-
content: pull.values([Buffer.from('i have the best bytes')])
74+
content: values([Buffer.from('i have the best bytes')])
7275
}
7376
]),
7477
importer(ipld, options),
75-
pull.collect((err, nodes) => {
78+
collect((err, nodes) => {
7679
try {
7780
expect(err).to.not.exist()
7881
expect(nodes.length).to.be.eql(2)
@@ -92,7 +95,7 @@ module.exports = (repo) => {
9295
it('exporting unsharded hash results in the correct files', (done) => {
9396
pull(
9497
exporter(nonShardedHash, ipld),
95-
pull.collect((err, nodes) => {
98+
collect((err, nodes) => {
9699
try {
97100
expect(err).to.not.exist()
98101
expect(nodes.length).to.be.eql(2)
@@ -107,7 +110,7 @@ module.exports = (repo) => {
107110

108111
pull(
109112
nodes[1].content,
110-
pull.collect(collected)
113+
collect(collected)
111114
)
112115
})
113116
)
@@ -127,7 +130,7 @@ module.exports = (repo) => {
127130
it('exporting sharded hash results in the correct files', (done) => {
128131
pull(
129132
exporter(shardedHash, ipld),
130-
pull.collect((err, nodes) => {
133+
collect((err, nodes) => {
131134
try {
132135
expect(err).to.not.exist()
133136
expect(nodes.length).to.be.eql(2)
@@ -142,7 +145,7 @@ module.exports = (repo) => {
142145

143146
pull(
144147
nodes[1].content,
145-
pull.collect(collected)
148+
collect(collected)
146149
)
147150
})
148151
)
@@ -169,7 +172,7 @@ module.exports = (repo) => {
169172
pull(
170173
push,
171174
importer(ipld),
172-
pull.collect((err, nodes) => {
175+
collect((err, nodes) => {
173176
try {
174177
expect(err).to.not.exist()
175178
expect(nodes.length).to.be.eql(maxDirs + 1)
@@ -193,7 +196,7 @@ module.exports = (repo) => {
193196
i++
194197
const pushable = {
195198
path: 'big/' + leftPad(i.toString(), 4, '0'),
196-
content: pull.values([Buffer.from(i.toString())])
199+
content: values([Buffer.from(i.toString())])
197200
}
198201
push.push(pushable)
199202
setImmediate(callback)
@@ -210,11 +213,11 @@ module.exports = (repo) => {
210213
const entries = {}
211214
pull(
212215
exporter(rootHash, ipld),
213-
pull.asyncMap((node, callback) => {
216+
asyncMap((node, callback) => {
214217
if (node.content) {
215218
pull(
216219
node.content,
217-
pull.collect(collected)
220+
collect(collected)
218221
)
219222
} else {
220223
entries[node.path] = node
@@ -227,7 +230,7 @@ module.exports = (repo) => {
227230
callback(null, node)
228231
}
229232
}),
230-
pull.collect((err, nodes) => {
233+
collect((err, nodes) => {
231234
expect(err).to.not.exist()
232235
const paths = Object.keys(entries).sort()
233236
expect(paths.length).to.be.eql(2001)
@@ -265,7 +268,7 @@ module.exports = (repo) => {
265268
pull(
266269
push,
267270
importer(ipld),
268-
pull.collect((err, nodes) => {
271+
collect((err, nodes) => {
269272
expect(err).to.not.exist()
270273
const last = nodes[nodes.length - 1]
271274
expect(last.path).to.be.eql('big')
@@ -289,7 +292,7 @@ module.exports = (repo) => {
289292
}
290293
const pushed = {
291294
path: dir.concat(leftPad(i.toString(), 4, '0')).join('/'),
292-
content: pull.values([Buffer.from(i.toString())])
295+
content: values([Buffer.from(i.toString())])
293296
}
294297
push.push(pushed)
295298
pending--
@@ -312,11 +315,11 @@ module.exports = (repo) => {
312315
const entries = {}
313316
pull(
314317
exporter(rootHash, ipld),
315-
pull.asyncMap((node, callback) => {
318+
asyncMap((node, callback) => {
316319
if (node.content) {
317320
pull(
318321
node.content,
319-
pull.collect(collected)
322+
collect(collected)
320323
)
321324
} else {
322325
entries[node.path] = node
@@ -329,7 +332,7 @@ module.exports = (repo) => {
329332
callback(null, node)
330333
}
331334
}),
332-
pull.collect(collected)
335+
collect(collected)
333336
)
334337

335338
function collected (err, nodes) {
@@ -371,7 +374,7 @@ module.exports = (repo) => {
371374
const exportHash = new CID(rootHash).toBaseEncodedString() + '/big/big/2000'
372375
pull(
373376
exporter(exportHash, ipld),
374-
pull.collect(collected)
377+
collect(collected)
375378
)
376379

377380
function collected (err, nodes) {
@@ -382,7 +385,7 @@ module.exports = (repo) => {
382385
])
383386
pull(
384387
nodes[0].content,
385-
pull.collect((err, content) => {
388+
collect((err, content) => {
386389
expect(err).to.not.exist()
387390
expect(content.toString()).to.equal('2000')
388391
done()

0 commit comments

Comments
 (0)