Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.

Commit 94f720c

Browse files
committed
Make ipfs.files.add return DAGNodes.
1 parent 126f5f3 commit 94f720c

File tree

3 files changed

+62
-75
lines changed

3 files changed

+62
-75
lines changed

package.json

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,7 @@
7171
"ipfs-multipart": "^0.1.0",
7272
"ipfs-repo": "^0.8.0",
7373
"ipfs-unixfs-engine": "^0.8.0",
74+
"isstream": "^0.1.2",
7475
"joi": "^8.0.5",
7576
"libp2p-ipfs": "^0.6.0",
7677
"libp2p-ipfs-browser": "^0.5.0",
@@ -89,7 +90,8 @@
8990
"run-parallel-limit": "^1.0.3",
9091
"run-series": "^1.1.4",
9192
"run-waterfall": "^1.1.3",
92-
"temp": "^0.8.3"
93+
"temp": "^0.8.3",
94+
"through2": "^2.0.1"
9395
},
9496
"aegir": {
9597
"webpack": {
@@ -122,4 +124,4 @@
122124
"kumavis <[email protected]>",
123125
"nginnever <[email protected]>"
124126
]
125-
}
127+
}

src/core/ipfs/files.js

Lines changed: 46 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -3,38 +3,68 @@
33
const Importer = require('ipfs-unixfs-engine').importer
44
const Exporter = require('ipfs-unixfs-engine').exporter
55
const UnixFS = require('ipfs-unixfs')
6+
const bs58 = require('bs58')
7+
const through = require('through2')
8+
const isStream = require('isstream')
9+
const promisify = require('promisify-es6')
610

711
module.exports = function files (self) {
812
return {
9-
add: (arr, callback) => {
10-
if (typeof arr === 'function') {
11-
callback = arr
12-
arr = undefined
13+
createAddStream: promisify((callback) => {
14+
// TODO: wip
15+
if (data === undefined) {
16+
return new Importer(self._dagS)
1317
}
14-
if (callback === undefined) {
15-
callback = function noop () {}
18+
}),
19+
20+
add: promisify((data, callback) => {
21+
// Buffer input
22+
if (Buffer.isBuffer(data)) {
23+
data = [{
24+
path: '',
25+
content: data
26+
}]
1627
}
17-
if (arr === undefined) {
18-
return new Importer(self._dagS)
28+
// Readable stream input
29+
if (isStream.isReadable(data)) {
30+
data = [{
31+
path: '',
32+
content: data
33+
}]
34+
}
35+
if (!callback || typeof callback !== 'function') {
36+
callback = function oop () {}
37+
}
38+
if (!Array.isArray(data)) {
39+
return callback(new Error('"data" must be an array of { path: string, content: Buffer|Readable } or Buffer or Readable'))
1940
}
2041

2142
const i = new Importer(self._dagS)
2243
const res = []
2344

24-
i.on('data', (info) => {
25-
res.push(info)
26-
})
27-
28-
i.once('end', () => {
45+
// Transform file info tuples to DAGNodes
46+
i.pipe(through.obj(function transform (info, enc, next) {
47+
const mh = bs58.encode(info.multihash).toString()
48+
self._dagS.get(mh, (err, node) => {
49+
if (err) return callback(err)
50+
var obj = {
51+
path: info.path || mh,
52+
node: node
53+
}
54+
res.push(obj)
55+
next()
56+
})
57+
}, function end (done) {
2958
callback(null, res)
30-
})
59+
}))
3160

32-
arr.forEach((tuple) => {
61+
data.forEach((tuple) => {
3362
i.write(tuple)
3463
})
3564

3665
i.end()
37-
},
66+
}),
67+
3868
cat: (hash, callback) => {
3969
self._dagS.get(hash, (err, fetchedNode) => {
4070
if (err) {

test/core-tests/test-files.js

Lines changed: 12 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -1,64 +1,19 @@
11
/* eslint-env mocha */
22
'use strict'
33

4-
const bl = require('bl')
5-
const expect = require('chai').expect
6-
const Readable = require('stream').Readable
7-
const bs58 = require('bs58')
8-
4+
const test = require('/home/noffle/forks/interface-ipfs-core')
95
const IPFS = require('../../src/core')
106

11-
describe('files', () => {
12-
let ipfs
13-
14-
before((done) => {
15-
ipfs = new IPFS(require('./repo-path'))
16-
ipfs.load(done)
17-
})
18-
19-
it('add', (done) => {
20-
const buffered = new Buffer('some data')
21-
const rs = new Readable()
22-
rs.push(buffered)
23-
rs.push(null)
24-
const arr = []
25-
const filePair = {path: 'data.txt', stream: rs}
26-
arr.push(filePair)
27-
ipfs.files.add(arr, (err, res) => {
28-
expect(err).to.not.exist
29-
expect(res[0].path).to.equal('data.txt')
30-
expect(res[0].size).to.equal(17)
31-
expect(bs58.encode(res[0].multihash).toString()).to.equal('QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS')
32-
done()
7+
const common = {
8+
setup: function (cb) {
9+
const ipfs = new IPFS(require('./repo-path'))
10+
ipfs.load(() => {
11+
cb(null, ipfs)
3312
})
34-
})
13+
},
14+
teardown: function (cb) {
15+
cb()
16+
}
17+
}
3518

36-
it('cat', (done) => {
37-
const hash = 'QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o'
38-
ipfs.files.cat(hash, (err, res) => {
39-
expect(err).to.not.exist
40-
res.on('data', (data) => {
41-
data.stream.pipe(bl((err, bldata) => {
42-
expect(err).to.not.exist
43-
expect(bldata.toString()).to.equal('hello world\n')
44-
done()
45-
}))
46-
})
47-
})
48-
})
49-
50-
it('get', (done) => {
51-
// TODO create non-trival get test
52-
const hash = 'QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o'
53-
ipfs.files.get(hash, (err, res) => {
54-
expect(err).to.not.exist
55-
res.on('data', (data) => {
56-
data.stream.pipe(bl((err, bldata) => {
57-
expect(err).to.not.exist
58-
expect(bldata.toString()).to.equal('hello world\n')
59-
done()
60-
}))
61-
})
62-
})
63-
})
64-
})
19+
test.files(common)

0 commit comments

Comments
 (0)