Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit 3bb1039

Browse files
committed
Sanitize multihash input.
1 parent e40c42d commit 3bb1039

File tree

4 files changed

+41
-1
lines changed

4 files changed

+41
-1
lines changed

package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@
3737
"aegir": "^5.0.1",
3838
"async": "^1.5.2",
3939
"block-stream2": "^1.1.0",
40-
"bs58": "^3.0.0",
4140
"buffer-loader": "0.0.1",
4241
"chai": "^3.5.0",
4342
"concat-stream": "^1.5.1",
@@ -61,6 +60,7 @@
6160
"ipfs-unixfs": "^0.1.0",
6261
"is-ipfs": "^0.2.0",
6362
"isstream": "^0.1.2",
63+
"multihashes": "^0.2.2",
6464
"readable-stream": "^1.1.13",
6565
"run-series": "^1.1.4",
6666
"streamifier": "^0.1.1",

src/clean-multihash.js

+14
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
'use strict'
2+
3+
const mh = require('multihashes')
4+
const isIPFS = require('is-ipfs')
5+
6+
module.exports = function (multihash) {
7+
if (!isIPFS.multihash(multihash)) {
8+
throw new Error('not valid multihash')
9+
}
10+
if (Buffer.isBuffer(multihash)) {
11+
return mh.toB58String(multihash)
12+
}
13+
return multihash
14+
}

src/exporter.js

+2
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ const Readable = require('readable-stream').Readable
1010
const pathj = require('path')
1111
const util = require('util')
1212
const fieldtrip = require('field-trip')
13+
const cleanMultihash = require('./clean-multihash')
1314

1415
exports = module.exports = Exporter
1516

@@ -24,6 +25,7 @@ function Exporter (hash, dagService, options) {
2425
if (!isIPFS.multihash(hash)) {
2526
throw new Error('not valid multihash')
2627
}
28+
hash = cleanMultihash(hash)
2729

2830
Readable.call(this, { objectMode: true })
2931

test/test-exporter.js

+24
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ const UnixFS = require('ipfs-unixfs')
1010
const concat = require('concat-stream')
1111
const fs = require('fs')
1212
const path = require('path')
13+
const bs58 = require('bs58')
1314

1415
let ds
1516

@@ -24,6 +25,29 @@ module.exports = function (repo) {
2425
done()
2526
})
2627

28+
it('ensure hash inputs are sanitized', (done) => {
29+
const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8'
30+
const bs = new BlockService(repo)
31+
const ds = new DAGService(bs)
32+
const mhBuf = new Buffer(bs58.decode(hash))
33+
ds.get(hash, (err, fetchedNode) => {
34+
const unmarsh = UnixFS.unmarshal(fetchedNode.data)
35+
expect(err).to.not.exist
36+
const testExport = exporter(mhBuf, ds)
37+
testExport.on('error', (err) => {
38+
expect(err).to.not.exist
39+
})
40+
testExport.pipe(concat((files) => {
41+
expect(files).to.be.length(1)
42+
expect(files[0].path).to.equal(hash)
43+
files[0].content.pipe(concat((bldata) => {
44+
expect(bldata).to.deep.equal(unmarsh.data)
45+
done()
46+
}))
47+
}))
48+
})
49+
})
50+
2751
it('export a file with no links', (done) => {
2852
const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8'
2953
const bs = new BlockService(repo)

0 commit comments

Comments
 (0)