-
{this.state.added_file_hash}
diff --git a/gulpfile.js b/gulpfile.js
deleted file mode 100644
index a9ff11eb5..000000000
--- a/gulpfile.js
+++ /dev/null
@@ -1,12 +0,0 @@
-'use strict'
-
-const gulp = require('gulp')
-
-require('./test/ipfs-factory/tasks')
-
-gulp.task('test:node:before', ['factory:start'])
-gulp.task('test:node:after', ['factory:stop'])
-gulp.task('test:browser:before', ['factory:start'])
-gulp.task('test:browser:after', ['factory:stop'])
-
-require('aegir/gulp')(gulp)
diff --git a/package.json b/package.json
index 5e25107de..c58c7fefc 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "ipfs-api",
- "version": "14.2.1",
+ "version": "14.3.4",
"description": "A client library for the IPFS HTTP API. Follows interface-ipfs-core spec",
"main": "src/index.js",
"browser": {
@@ -10,16 +10,16 @@
"http": "stream-http"
},
"scripts": {
- "test": "gulp test",
- "test:node": "gulp test:node",
- "test:browser": "gulp test:browser",
- "lint": "aegir-lint",
- "build": "gulp build",
- "release": "gulp release",
- "release-minor": "gulp release --type minor",
- "release-major": "gulp release --type major",
- "coverage": "gulp coverage",
- "coverage-publish": "aegir-coverage publish"
+ "test": "aegir test --no-parallel",
+ "test:node": "aegir test --target node --no-parallel",
+ "test:browser": "aegir test --target browser",
+ "lint": "aegir lint",
+ "build": "aegir build",
+ "release": "aegir release --no-parallel",
+ "release-minor": "aegir release --type minor --no-parallel",
+ "release-major": "aegir release --type major --no-parallel",
+ "coverage": "aegir coverage --no-parallel --timeout 100000",
+ "coverage-publish": "aegir coverage --provider coveralls --no-parallel --timeout 100000"
},
"dependencies": {
"async": "^2.5.0",
@@ -31,18 +31,18 @@
"glob": "^7.1.2",
"glob-escape": "0.0.2",
"ipfs-block": "~0.6.0",
- "ipfs-unixfs": "~0.1.12",
- "ipld-dag-pb": "~0.11.0",
+ "ipfs-unixfs": "~0.1.13",
+ "ipld-dag-pb": "~0.11.2",
"is-ipfs": "^0.3.0",
"is-stream": "^1.1.0",
"lru-cache": "^4.1.1",
- "multiaddr": "^2.3.0",
+ "multiaddr": "^3.0.1",
"multihashes": "~0.4.9",
"multipart-stream": "^2.0.1",
"ndjson": "^1.5.0",
"once": "^1.4.0",
- "peer-id": "~0.9.0",
- "peer-info": "~0.10.0",
+ "peer-id": "~0.10.1",
+ "peer-info": "~0.11.0",
"promisify-es6": "^1.0.3",
"pump": "^1.0.2",
"qs": "^6.5.0",
@@ -52,7 +52,7 @@
"tar-stream": "^1.5.4"
},
"engines": {
- "node": ">=4.0.0",
+ "node": ">=6.0.0",
"npm": ">=3.0.0"
},
"repository": {
@@ -60,14 +60,14 @@
"url": "https://github.com/ipfs/js-ipfs-api"
},
"devDependencies": {
- "aegir": "^11.0.2",
+ "aegir": "^12.0.6",
"chai": "^4.1.2",
"dirty-chai": "^2.0.1",
"eslint-plugin-react": "^7.3.0",
"gulp": "^3.9.1",
"hapi": "^16.5.2",
- "interface-ipfs-core": "~0.31.16",
- "ipfsd-ctl": "~0.22.0",
+ "interface-ipfs-core": "~0.31.19",
+ "ipfsd-ctl": "~0.23.0",
"pre-commit": "^1.2.2",
"socket.io": "^2.0.3",
"socket.io-client": "^2.0.3",
@@ -136,4 +136,4 @@
"url": "https://github.com/ipfs/js-ipfs-api/issues"
},
"homepage": "https://github.com/ipfs/js-ipfs-api"
-}
\ No newline at end of file
+}
diff --git a/src/config/replace.js b/src/config/replace.js
index 14f4294ca..f6a41df97 100644
--- a/src/config/replace.js
+++ b/src/config/replace.js
@@ -6,7 +6,7 @@ const promisify = require('promisify-es6')
module.exports = (send) => {
return promisify((config, callback) => {
if (typeof config === 'object') {
- config = streamifier.createReadStream(new Buffer(JSON.stringify(config)))
+ config = streamifier.createReadStream(Buffer.from(JSON.stringify(config)))
}
send({
diff --git a/src/files/add.js b/src/files/add.js
index d5aac24c5..81e8734f4 100644
--- a/src/files/add.js
+++ b/src/files/add.js
@@ -21,7 +21,27 @@ module.exports = (send) => {
return callback(new Error('"files" must be a buffer, readable stream, or array of objects'))
}
- const request = { path: 'add', files: files, qs: opts }
+ const qs = {}
+
+ if (opts['cid-version'] != null) {
+ qs['cid-version'] = opts['cid-version']
+ } else if (opts.cidVersion != null) {
+ qs['cid-version'] = opts.cidVersion
+ }
+
+ if (opts['raw-leaves'] != null) {
+ qs['raw-leaves'] = opts['raw-leaves']
+ } else if (opts.rawLeaves != null) {
+ qs['raw-leaves'] = opts.rawLeaves
+ }
+
+ if (opts.hash != null) {
+ qs.hash = opts.hash
+ } else if (opts.hashAlg != null) {
+ qs.hash = opts.hashAlg
+ }
+
+ const request = { path: 'add', files: files, qs: qs }
// Transform the response stream to DAGNode values
const transform = (res, callback) => DAGNodeStream.streamToValue(send, res, callback)
diff --git a/src/object/data.js b/src/object/data.js
index 9ba2ef100..ace3cc992 100644
--- a/src/object/data.js
+++ b/src/object/data.js
@@ -2,7 +2,7 @@
const promisify = require('promisify-es6')
const streamToValue = require('../utils/stream-to-value')
-const cleanMultihash = require('../utils/clean-multihash')
+const CID = require('cids')
const LRU = require('lru-cache')
const lruOptions = {
max: 128
@@ -11,7 +11,7 @@ const lruOptions = {
const cache = LRU(lruOptions)
module.exports = (send) => {
- return promisify((multihash, options, callback) => {
+ return promisify((cid, options, callback) => {
if (typeof options === 'function') {
callback = options
options = {}
@@ -20,13 +20,16 @@ module.exports = (send) => {
options = {}
}
+ let cidB58Str
+
try {
- multihash = cleanMultihash(multihash, options)
+ cid = new CID(cid)
+ cidB58Str = cid.toBaseEncodedString()
} catch (err) {
return callback(err)
}
- const node = cache.get(multihash)
+ const node = cache.get(cidB58Str)
if (node) {
return callback(null, node.data)
@@ -34,7 +37,7 @@ module.exports = (send) => {
send({
path: 'object/data',
- args: multihash
+ args: cidB58Str
}, (err, result) => {
if (err) {
return callback(err)
diff --git a/src/object/get.js b/src/object/get.js
index 319ab12ce..82b21bb58 100644
--- a/src/object/get.js
+++ b/src/object/get.js
@@ -5,7 +5,7 @@ const dagPB = require('ipld-dag-pb')
const DAGNode = dagPB.DAGNode
const DAGLink = dagPB.DAGLink
const bs58 = require('bs58')
-const cleanMultihash = require('../utils/clean-multihash')
+const CID = require('cids')
const LRU = require('lru-cache')
const lruOptions = {
max: 128
@@ -14,7 +14,7 @@ const lruOptions = {
const cache = LRU(lruOptions)
module.exports = (send) => {
- return promisify((multihash, options, callback) => {
+ return promisify((cid, options, callback) => {
if (typeof options === 'function') {
callback = options
options = {}
@@ -24,13 +24,16 @@ module.exports = (send) => {
options = {}
}
+ let cidB58Str
+
try {
- multihash = cleanMultihash(multihash, options)
+ cid = new CID(cid)
+ cidB58Str = cid.toBaseEncodedString()
} catch (err) {
return callback(err)
}
- const node = cache.get(multihash)
+ const node = cache.get(cidB58Str)
if (node) {
return callback(null, node)
@@ -38,21 +41,21 @@ module.exports = (send) => {
send({
path: 'object/get',
- args: multihash
+ args: cidB58Str
}, (err, result) => {
if (err) {
return callback(err)
}
const links = result.Links.map((l) => {
- return new DAGLink(l.Name, l.Size, new Buffer(bs58.decode(l.Hash)))
+ return new DAGLink(l.Name, l.Size, Buffer.from(bs58.decode(l.Hash)))
})
DAGNode.create(result.Data, links, (err, node) => {
if (err) {
return callback(err)
}
- cache.set(multihash, node)
+ cache.set(cidB58Str, node)
callback(null, node)
})
})
diff --git a/src/object/links.js b/src/object/links.js
index e3ab1c7dd..9b4f6284e 100644
--- a/src/object/links.js
+++ b/src/object/links.js
@@ -46,7 +46,7 @@ module.exports = (send) => {
if (result.Links) {
links = result.Links.map((l) => {
- return new DAGLink(l.Name, l.Size, new Buffer(bs58.decode(l.Hash)))
+ return new DAGLink(l.Name, l.Size, Buffer.from(bs58.decode(l.Hash)))
})
}
callback(null, links)
diff --git a/src/object/new.js b/src/object/new.js
index c9a08ad02..e508be129 100644
--- a/src/object/new.js
+++ b/src/object/new.js
@@ -27,7 +27,7 @@ module.exports = (send) => {
}
data = (new Unixfs('directory')).marshal()
} else {
- data = new Buffer(0)
+ data = Buffer.alloc(0)
}
DAGNode.create(data, (err, node) => {
diff --git a/src/object/put.js b/src/object/put.js
index e7ef76c39..537c9d34f 100644
--- a/src/object/put.js
+++ b/src/object/put.js
@@ -52,7 +52,7 @@ module.exports = (send) => {
if (Buffer.isBuffer(obj) && options.enc) {
buf = obj
} else {
- buf = new Buffer(JSON.stringify(tmpObj))
+ buf = Buffer.from(JSON.stringify(tmpObj))
}
const enc = options.enc || 'json'
@@ -87,7 +87,7 @@ module.exports = (send) => {
})
return
} else {
- DAGNode.create(new Buffer(obj.Data), obj.Links, (err, _node) => {
+ DAGNode.create(Buffer.from(obj.Data), obj.Links, (err, _node) => {
if (err) {
return callback(err)
}
diff --git a/src/utils/get-dagnode.js b/src/utils/get-dagnode.js
index 02cc8e139..1cfd1dcd7 100644
--- a/src/utils/get-dagnode.js
+++ b/src/utils/get-dagnode.js
@@ -3,51 +3,52 @@
const DAGNode = require('ipld-dag-pb').DAGNode
const parallel = require('async/parallel')
const CID = require('cids')
-const mh = require('multihashes')
const streamToValue = require('./stream-to-value')
module.exports = function (send, hash, callback) {
- // Until js-ipfs supports object/get and object/data by CID
- // we need to convert our CID or multihash hash into a multihash
- const multihash = mh.toB58String(new CID(hash).multihash)
+ let cid
+
+ try {
+ cid = new CID(hash)
+ } catch (err) {
+ return callback(err)
+ }
// Retrieve the object and its data in parallel, then produce a DAGNode
// instance using this information.
parallel([
- function get (done) {
+ (done) => {
send({
path: 'object/get',
- args: multihash
+ args: cid.toBaseEncodedString()
}, done)
},
-
- function data (done) {
+ (done) => {
// WORKAROUND: request the object's data separately, since raw bits in JSON
// are interpreted as UTF-8 and corrupt the data.
// See https://github.com/ipfs/go-ipfs/issues/1582 for more details.
send({
path: 'object/data',
- args: multihash
+ args: cid.toBaseEncodedString()
}, done)
- }],
-
- function done (err, res) {
- if (err) {
- return callback(err)
- }
+ }
+ ], (err, res) => {
+ if (err) {
+ return callback(err)
+ }
- var object = res[0]
- var stream = res[1]
+ var object = res[0]
+ var stream = res[1]
- if (Buffer.isBuffer(stream)) {
- DAGNode.create(stream, object.Links, callback)
- } else {
- streamToValue(stream, (err, data) => {
- if (err) {
- return callback(err)
- }
- DAGNode.create(data, object.Links, callback)
- })
- }
- })
+ if (Buffer.isBuffer(stream)) {
+ DAGNode.create(stream, object.Links, callback)
+ } else {
+ streamToValue(stream, (err, data) => {
+ if (err) {
+ return callback(err)
+ }
+ DAGNode.create(data, object.Links, callback)
+ })
+ }
+ })
}
diff --git a/src/utils/get-files-stream.js b/src/utils/get-files-stream.js
index f931fb5c0..b283cee7a 100644
--- a/src/utils/get-files-stream.js
+++ b/src/utils/get-files-stream.js
@@ -25,9 +25,9 @@ function headers (file) {
function strip (name, base) {
const smallBase = base
- .split('/')
- .slice(0, -1)
- .join('/') + '/'
+ .split('/')
+ .slice(0, -1)
+ .join('/') + '/'
return name.replace(smallBase, '')
}
diff --git a/src/utils/pubsub-message-utils.js b/src/utils/pubsub-message-utils.js
index ae7ec6bab..486632f53 100644
--- a/src/utils/pubsub-message-utils.js
+++ b/src/utils/pubsub-message-utils.js
@@ -27,9 +27,9 @@ function deserializeFromBase64 (obj) {
}
return {
- from: bs58.encode(new Buffer(obj.from, 'base64')).toString(),
- seqno: new Buffer(obj.seqno, 'base64'),
- data: new Buffer(obj.data, 'base64'),
+ from: bs58.encode(Buffer.from(obj.from, 'base64')).toString(),
+ seqno: Buffer.from(obj.seqno, 'base64'),
+ data: Buffer.from(obj.data, 'base64'),
topicCIDs: obj.topicIDs || obj.topicCIDs
}
}
diff --git a/test/bitswap.spec.js b/test/bitswap.spec.js
index 6ed821da3..35cd7a855 100644
--- a/test/bitswap.spec.js
+++ b/test/bitswap.spec.js
@@ -7,11 +7,12 @@ const expect = chai.expect
chai.use(dirtyChai)
const FactoryClient = require('./ipfs-factory/client')
-describe('.bitswap', () => {
+describe('.bitswap', function () {
+ this.timeout(20 * 1000) // slow CI
let ipfs
let fc
- before(function (done) {
+ before((done) => {
this.timeout(20 * 1000) // slow CI
fc = new FactoryClient()
fc.spawnNode((err, node) => {
diff --git a/test/bootstrap.spec.js b/test/bootstrap.spec.js
index 041b069d9..65b053485 100644
--- a/test/bootstrap.spec.js
+++ b/test/bootstrap.spec.js
@@ -11,12 +11,13 @@ const FactoryClient = require('./ipfs-factory/client')
const invalidArg = 'this/Is/So/Invalid/'
const validIp4 = '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z'
-describe('.bootstrap', () => {
+describe('.bootstrap', function () {
+ this.timeout(100 * 1000)
+
let ipfs
let fc
- before(function (done) {
- this.timeout(20 * 1000) // slow CI
+ before((done) => {
fc = new FactoryClient()
fc.spawnNode((err, node) => {
expect(err).to.not.exist()
@@ -31,7 +32,9 @@ describe('.bootstrap', () => {
let peers
- describe('Callback API', () => {
+ describe('Callback API', function () {
+ this.timeout(100 * 1000)
+
describe('.add', () => {
it('returns an error when called with an invalid arg', (done) => {
ipfs.bootstrap.add(invalidArg, (err) => {
@@ -112,7 +115,9 @@ describe('.bootstrap', () => {
})
})
- describe('Promise API', () => {
+ describe('Promise API', function () {
+ this.timeout(100 * 1000)
+
describe('.add', () => {
it('returns an error when called without args or options', () => {
return ipfs.bootstrap.add(null)
diff --git a/test/commands.spec.js b/test/commands.spec.js
index c5553d83b..5962d47fb 100644
--- a/test/commands.spec.js
+++ b/test/commands.spec.js
@@ -8,12 +8,13 @@ chai.use(dirtyChai)
const FactoryClient = require('./ipfs-factory/client')
-describe('.commands', () => {
+describe('.commands', function () {
+ this.timeout(20 * 1000)
+
let ipfs
let fc
- before(function (done) {
- this.timeout(20 * 1000) // slow CI
+ before((done) => {
fc = new FactoryClient()
fc.spawnNode((err, node) => {
expect(err).to.not.exist()
diff --git a/test/diag.spec.js b/test/diag.spec.js
index 94ee50bb3..7a74e2be5 100644
--- a/test/diag.spec.js
+++ b/test/diag.spec.js
@@ -7,12 +7,13 @@ const dirtyChai = require('dirty-chai')
const expect = chai.expect
chai.use(dirtyChai)
-describe('.diag', () => {
+describe('.diag', function () {
+ this.timeout(50 * 1000)
+
let ipfs
let fc
- before(function (done) {
- this.timeout(20 * 1000) // slow CI
+ before((done) => {
fc = new FactoryClient()
fc.spawnNode((err, node) => {
expect(err).to.not.exist()
diff --git a/test/files.spec.js b/test/files.spec.js
index d5712fe62..629e5e9b0 100644
--- a/test/files.spec.js
+++ b/test/files.spec.js
@@ -8,6 +8,8 @@ const expect = chai.expect
chai.use(dirtyChai)
const isNode = require('detect-node')
const loadFixture = require('aegir/fixtures')
+const mh = require('multihashes')
+const CID = require('cids')
const FactoryClient = require('./ipfs-factory/client')
@@ -15,12 +17,27 @@ const testfile = isNode
? loadFixture(__dirname, '/fixtures/testfile.txt')
: loadFixture(__dirname, 'fixtures/testfile.txt')
-describe('.files (the MFS API part)', () => {
+// TODO: Test against all algorithms Object.keys(mh.names)
+// This subset is known to work with both go-ipfs and js-ipfs as of 2017-09-05
+const HASH_ALGS = [
+ 'sha1',
+ 'sha2-256',
+ 'sha2-512',
+ 'keccak-224',
+ 'keccak-256',
+ 'keccak-384',
+ 'keccak-512'
+]
+
+describe('.files (the MFS API part)', function () {
+ this.timeout(120 * 1000)
+
let ipfs
let fc
- before(function (done) {
- this.timeout(20 * 1000) // slow CI
+ const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP'
+
+ before((done) => {
fc = new FactoryClient()
fc.spawnNode((err, node) => {
expect(err).to.not.exist()
@@ -31,10 +48,10 @@ describe('.files (the MFS API part)', () => {
after((done) => fc.dismantle(done))
- describe('Callback API', () => {
- it('add file for testing', (done) => {
- const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP'
+ describe('Callback API', function () {
+ this.timeout(120 * 1000)
+ it('add file for testing', (done) => {
ipfs.files.add(testfile, (err, res) => {
expect(err).to.not.exist()
@@ -46,19 +63,49 @@ describe('.files (the MFS API part)', () => {
})
it('files.add with cid-version=1 and raw-leaves=false', (done) => {
- const expectedHash = 'zdj7Wh9x6gXdg4UAqhRYnjBTw9eJF7hvzUU4HjpnZXHYQz9jK'
+ const expectedCid = 'zdj7Wh9x6gXdg4UAqhRYnjBTw9eJF7hvzUU4HjpnZXHYQz9jK'
const options = { 'cid-version': 1, 'raw-leaves': false }
ipfs.files.add(testfile, options, (err, res) => {
expect(err).to.not.exist()
expect(res).to.have.length(1)
- expect(res[0].hash).to.equal(expectedHash)
- expect(res[0].path).to.equal(expectedHash)
+ expect(res[0].hash).to.equal(expectedCid)
+ expect(res[0].path).to.equal(expectedCid)
done()
})
})
+ it('files.add with options', (done) => {
+ ipfs.files.add(testfile, { pin: false }, (err, res) => {
+ expect(err).to.not.exist()
+
+ expect(res).to.have.length(1)
+ expect(res[0].hash).to.equal(expectedMultihash)
+ expect(res[0].path).to.equal(expectedMultihash)
+ done()
+ })
+ })
+
+ HASH_ALGS.forEach((name) => {
+ it(`files.add with hash=${name} and raw-leaves=false`, (done) => {
+ const content = String(Math.random() + Date.now())
+ const file = {
+ path: content + '.txt',
+ content: Buffer.from(content)
+ }
+ const options = { hash: name, 'raw-leaves': false }
+
+ ipfs.files.add([file], options, (err, res) => {
+ if (err) return done(err)
+ expect(res).to.have.length(1)
+ const cid = new CID(res[0].hash)
+ expect(mh.decode(cid.multihash).name).to.equal(name)
+ done()
+ })
+ })
+ })
+
it('files.mkdir', (done) => {
ipfs.files.mkdir('/test-folder', done)
})
@@ -83,7 +130,7 @@ describe('.files (the MFS API part)', () => {
it('files.write', (done) => {
ipfs.files
- .write('/test-folder/test-file-2.txt', new Buffer('hello world'), {create: true}, (err) => {
+ .write('/test-folder/test-file-2.txt', Buffer.from('hello world'), {create: true}, (err) => {
expect(err).to.not.exist()
ipfs.files.read('/test-folder/test-file-2.txt', (err, stream) => {
@@ -105,7 +152,7 @@ describe('.files (the MFS API part)', () => {
it('files.write without options', (done) => {
ipfs.files
- .write('/test-folder/test-file-2.txt', new Buffer('hello world'), (err) => {
+ .write('/test-folder/test-file-2.txt', Buffer.from('hello world'), (err) => {
expect(err).to.not.exist()
ipfs.files.read('/test-folder/test-file-2.txt', (err, stream) => {
@@ -168,7 +215,7 @@ describe('.files (the MFS API part)', () => {
buf += data
})
.on('end', () => {
- expect(new Buffer(buf)).to.deep.equal(testfile)
+ expect(Buffer.from(buf)).to.deep.equal(testfile)
done()
})
})
@@ -183,7 +230,18 @@ describe('.files (the MFS API part)', () => {
})
})
- describe('Promise API', () => {
+ describe('Promise API', function () {
+ this.timeout(120 * 1000)
+
+ it('files.add', () => {
+ return ipfs.files.add(testfile)
+ .then((res) => {
+ expect(res).to.have.length(1)
+ expect(res[0].hash).to.equal(expectedMultihash)
+ expect(res[0].path).to.equal(expectedMultihash)
+ })
+ })
+
it('files.add with cid-version=1 and raw-leaves=false', () => {
const expectedHash = 'zdj7Wh9x6gXdg4UAqhRYnjBTw9eJF7hvzUU4HjpnZXHYQz9jK'
const options = { 'cid-version': 1, 'raw-leaves': false }
@@ -196,13 +254,43 @@ describe('.files (the MFS API part)', () => {
})
})
+ it('files.add with options', () => {
+ return ipfs.files.add(testfile, { pin: false })
+ .then((res) => {
+ expect(res).to.have.length(1)
+ expect(res[0].hash).to.equal(expectedMultihash)
+ expect(res[0].path).to.equal(expectedMultihash)
+ })
+ })
+
+ HASH_ALGS.forEach((name) => {
+ it(`files.add with hash=${name} and raw-leaves=false`, () => {
+ const content = String(Math.random() + Date.now())
+ const file = {
+ path: content + '.txt',
+ content: Buffer.from(content)
+ }
+ const options = { hash: name, 'raw-leaves': false }
+
+ return ipfs.files.add([file], options)
+ .then((res) => {
+ expect(res).to.have.length(1)
+ const cid = new CID(res[0].hash)
+ expect(mh.decode(cid.multihash).name).to.equal(name)
+ })
+ })
+ })
+
it('files.mkdir', () => {
return ipfs.files.mkdir('/test-folder')
})
it('files.cp', () => {
return ipfs.files
- .cp(['/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', '/test-folder/test-file'])
+ .cp([
+ '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP',
+ '/test-folder/test-file'
+ ])
})
it('files.ls', () => {
@@ -214,7 +302,7 @@ describe('.files (the MFS API part)', () => {
it('files.write', (done) => {
ipfs.files
- .write('/test-folder/test-file-2.txt', new Buffer('hello world'), {create: true})
+ .write('/test-folder/test-file-2.txt', Buffer.from('hello world'), {create: true})
.then(() => {
return ipfs.files.read('/test-folder/test-file-2.txt')
})
@@ -237,7 +325,7 @@ describe('.files (the MFS API part)', () => {
it('files.write without options', (done) => {
ipfs.files
- .write('/test-folder/test-file-2.txt', new Buffer('hello world'))
+ .write('/test-folder/test-file-2.txt', Buffer.from('hello world'))
.then(() => {
return ipfs.files.read('/test-folder/test-file-2.txt')
})
@@ -293,7 +381,7 @@ describe('.files (the MFS API part)', () => {
buf += data
})
.on('end', () => {
- expect(new Buffer(buf)).to.deep.equal(testfile)
+ expect(Buffer.from(buf)).to.eql(testfile)
done()
})
})
diff --git a/test/get.spec.js b/test/get.spec.js
index 66cc477be..f401235d1 100644
--- a/test/get.spec.js
+++ b/test/get.spec.js
@@ -25,12 +25,13 @@ if (isNode) {
testfileBig = fs.createReadStream(tfbPath, { bufferSize: 128 })
}
-describe('.get', () => {
+describe('.get', function () {
+ this.timeout(80 * 1000)
+
let ipfs
let fc
- before(function (done) {
- this.timeout(20 * 1000) // slow CI
+ before((done) => {
fc = new FactoryClient()
fc.spawnNode((err, node) => {
expect(err).to.not.exist()
@@ -42,6 +43,8 @@ describe('.get', () => {
after((done) => fc.dismantle(done))
describe('Callback API', () => {
+ this.timeout(80 * 1000)
+
it('add file for testing', (done) => {
const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP'
@@ -190,6 +193,8 @@ describe('.get', () => {
})
describe('Promise API', () => {
+ this.timeout(80 * 1000)
+
it('get', (done) => {
ipfs.get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')
.then((files) => {
@@ -206,9 +211,9 @@ describe('.get', () => {
})
})
})
- .catch((err) => {
- expect(err).to.not.exist()
- })
+ .catch((err) => {
+ expect(err).to.not.exist()
+ })
})
})
})
diff --git a/test/ipfs-factory/client.js b/test/ipfs-factory/client.js
index 5d2685b5c..d30b08966 100644
--- a/test/ipfs-factory/client.js
+++ b/test/ipfs-factory/client.js
@@ -49,7 +49,11 @@ function Factory () {
}
this.dismantle = function (callback) {
- ioC.once('fc-nodes-shutdown', callback)
+ ioC.once('fc-nodes-shutdown', (err) => {
+ ioC.disconnect()
+ sioConnected = false
+ callback(err)
+ })
ioC.emit('fs-dismantle')
}
}
diff --git a/test/ipfs-factory/tasks.js b/test/ipfs-factory/tasks.js
index 97847273e..eca18f8f3 100644
--- a/test/ipfs-factory/tasks.js
+++ b/test/ipfs-factory/tasks.js
@@ -1,20 +1,22 @@
'use strict'
-const gulp = require('gulp')
const factoryServer = require('./server')
let factory
-gulp.task('factory:start', (done) => {
- factoryServer((err, http) => {
- if (err) {
- throw err
- }
- factory = http
- done()
- })
-})
-
-gulp.task('factory:stop', (done) => {
- factory.stop(done)
-})
+module.exports = {
+ start (done) {
+ factoryServer((err, http) => {
+ if (err) {
+ return done(err)
+ }
+ factory = http
+ done()
+ })
+ },
+ stop (done) {
+ factory.stop({
+ timeout: 1
+ }, done)
+ }
+}
diff --git a/test/key.spec.js b/test/key.spec.js
index 7521e9597..9942372b0 100644
--- a/test/key.spec.js
+++ b/test/key.spec.js
@@ -8,12 +8,13 @@ const dirtyChai = require('dirty-chai')
const expect = chai.expect
chai.use(dirtyChai)
-describe('.key', () => {
+describe('.key', function () {
+ this.timeout(50 * 1000)
+
let ipfs
let fc
- before(function (done) {
- this.timeout(20 * 1000) // slow CI
+ before((done) => {
fc = new FactoryClient()
fc.spawnNode((err, node) => {
expect(err).to.not.exist()
diff --git a/test/log.spec.js b/test/log.spec.js
index 4ed6b81bb..24d4fe11c 100644
--- a/test/log.spec.js
+++ b/test/log.spec.js
@@ -8,12 +8,13 @@ const expect = chai.expect
chai.use(dirtyChai)
const FactoryClient = require('./ipfs-factory/client')
-describe('.log', () => {
+describe('.log', function () {
+ this.timeout(100 * 1000)
+
let ipfs
let fc
- before(function (done) {
- this.timeout(20 * 1000) // slow CI
+ before((done) => {
fc = new FactoryClient()
fc.spawnNode((err, node) => {
expect(err).to.not.exist()
@@ -24,7 +25,9 @@ describe('.log', () => {
after((done) => fc.dismantle(done))
- describe('Callback API', () => {
+ describe('Callback API', function () {
+ this.timeout(100 * 1000)
+
it('.log.tail', (done) => {
const req = ipfs.log.tail((err, res) => {
expect(err).to.not.exist()
@@ -62,7 +65,9 @@ describe('.log', () => {
})
})
- describe('Promise API', () => {
+ describe('Promise API', function () {
+ this.timeout(100 * 1000)
+
it('.log.tail', () => {
return ipfs.log.tail()
.then((res) => {
diff --git a/test/ls.spec.js b/test/ls.spec.js
index cc7a64195..0b2bcc828 100644
--- a/test/ls.spec.js
+++ b/test/ls.spec.js
@@ -11,15 +11,16 @@ const path = require('path')
const FactoryClient = require('./ipfs-factory/client')
-describe('.ls', () => {
+describe('.ls', function () {
+ this.timeout(50 * 1000)
+
if (!isNode) { return }
let ipfs
let fc
let folder
- before(function (done) {
- this.timeout(20 * 1000) // slow CI
+ before((done) => {
fc = new FactoryClient()
waterfall([
(cb) => fc.spawnNode(cb),
diff --git a/test/mount.spec.js b/test/mount.spec.js
index 19dc68fa3..15f725edb 100644
--- a/test/mount.spec.js
+++ b/test/mount.spec.js
@@ -2,4 +2,6 @@
'use strict'
// requires FUSE to be installed, not practical for testing
-describe('.mount', () => {})
+describe('.mount', () => {
+ it('missing')
+})
diff --git a/test/name.spec.js b/test/name.spec.js
index b556afe28..37af50183 100644
--- a/test/name.spec.js
+++ b/test/name.spec.js
@@ -14,13 +14,14 @@ const testfile = isNode
? loadFixture(__dirname, '/fixtures/testfile.txt')
: loadFixture(__dirname, 'fixtures/testfile.txt')
-describe('.name', () => {
+describe('.name', function () {
+ this.timeout(50 * 1000)
+
let ipfs
let other
let fc
- before(function (done) {
- this.timeout(50 * 1000) // slow CI
+ before((done) => {
fc = new FactoryClient()
series([
(cb) => {
diff --git a/test/pubsub-in-browser.spec.js b/test/pubsub-in-browser.spec.js
index 53b50d1ec..b6efe9f5c 100644
--- a/test/pubsub-in-browser.spec.js
+++ b/test/pubsub-in-browser.spec.js
@@ -50,133 +50,137 @@ function spawnWithId (factory, callback) {
], callback)
}
-if (!isNode) {
- describe('.pubsub-browser (pubsub not supported in the browsers currently)', () => {
- const topic = 'pubsub-tests'
+describe('.pubsub-browser (pubsub not supported in the browsers currently)', function () {
+ this.timeout(50 * 1000)
- let factory
- let ipfs1
+ if (isNode) {
+ it('skip these in Node.js')
+ return
+ }
+ const topic = 'pubsub-tests'
- before((done) => {
- factory = new FactoryClient()
+ let factory
+ let ipfs1
- series([
- (cb) => spawnWithId(factory, cb)
- ], (err, nodes) => {
- if (err) {
- return done(err)
- }
+ before((done) => {
+ factory = new FactoryClient()
- ipfs1 = nodes[0]
- done()
- })
- })
+ series([
+ (cb) => spawnWithId(factory, cb)
+ ], (err, nodes) => {
+ if (err) {
+ return done(err)
+ }
- after((done) => {
- factory.dismantle(done)
+ ipfs1 = nodes[0]
+ done()
})
+ })
- describe('everything errors', () => {
- describe('Callback API', () => {
- describe('.publish', () => {
- it('throws an error if called in the browser', (done) => {
- ipfs1.pubsub.publish(topic, 'hello friend', (err, topics) => {
- expect(err).to.exist()
- expect(err.message).to.equal(expectedError)
- done()
- })
+ after((done) => {
+ factory.dismantle(done)
+ })
+
+ describe('everything errors', () => {
+ describe('Callback API', () => {
+ describe('.publish', () => {
+ it('throws an error if called in the browser', (done) => {
+ ipfs1.pubsub.publish(topic, 'hello friend', (err, topics) => {
+ expect(err).to.exist()
+ expect(err.message).to.equal(expectedError)
+ done()
+ })
+ })
+ })
+
+ describe('.subscribe', () => {
+ const handler = () => {}
+ it('throws an error if called in the browser', (done) => {
+ ipfs1.pubsub.subscribe(topic, {}, handler, (err, topics) => {
+ expect(err).to.exist()
+ expect(err.message).to.equal(expectedError)
+ done()
+ })
+ })
+ })
+
+ describe('.peers', () => {
+ it('throws an error if called in the browser', (done) => {
+ ipfs1.pubsub.peers(topic, (err, topics) => {
+ expect(err).to.exist()
+ expect(err.message).to.equal(expectedError)
+ done()
})
})
+ })
- describe('.subscribe', () => {
- const handler = () => {}
- it('throws an error if called in the browser', (done) => {
- ipfs1.pubsub.subscribe(topic, {}, handler, (err, topics) => {
+ describe('.ls', () => {
+ it('throws an error if called in the browser', (done) => {
+ ipfs1.pubsub.ls((err, topics) => {
+ expect(err).to.exist()
+ expect(err.message).to.equal(expectedError)
+ done()
+ })
+ })
+ })
+ })
+
+ describe('Promise API', () => {
+ describe('.publish', () => {
+ it('throws an error if called in the browser', () => {
+ return ipfs1.pubsub.publish(topic, 'hello friend')
+ .catch((err) => {
expect(err).to.exist()
expect(err.message).to.equal(expectedError)
- done()
})
- })
})
+ })
- describe('.peers', () => {
- it('throws an error if called in the browser', (done) => {
- ipfs1.pubsub.peers(topic, (err, topics) => {
+ describe('.subscribe', () => {
+ const handler = () => {}
+ it('throws an error if called in the browser', (done) => {
+ ipfs1.pubsub.subscribe(topic, {}, handler)
+ .catch((err) => {
expect(err).to.exist()
expect(err.message).to.equal(expectedError)
done()
})
- })
})
+ })
- describe('.ls', () => {
- it('throws an error if called in the browser', (done) => {
- ipfs1.pubsub.ls((err, topics) => {
+ describe('.peers', () => {
+ it('throws an error if called in the browser', (done) => {
+ ipfs1.pubsub.peers(topic)
+ .catch((err) => {
expect(err).to.exist()
expect(err.message).to.equal(expectedError)
done()
})
- })
})
})
- describe('Promise API', () => {
- describe('.publish', () => {
- it('throws an error if called in the browser', () => {
- return ipfs1.pubsub.publish(topic, 'hello friend')
- .catch((err) => {
- expect(err).to.exist()
- expect(err.message).to.equal(expectedError)
- })
- })
- })
-
- describe('.subscribe', () => {
- const handler = () => {}
- it('throws an error if called in the browser', (done) => {
- ipfs1.pubsub.subscribe(topic, {}, handler)
- .catch((err) => {
- expect(err).to.exist()
- expect(err.message).to.equal(expectedError)
- done()
- })
- })
- })
-
- describe('.peers', () => {
- it('throws an error if called in the browser', (done) => {
- ipfs1.pubsub.peers(topic)
- .catch((err) => {
- expect(err).to.exist()
- expect(err.message).to.equal(expectedError)
- done()
- })
- })
- })
-
- describe('.ls', () => {
- it('throws an error if called in the browser', () => {
- return ipfs1.pubsub.ls()
- .catch((err) => {
- expect(err).to.exist()
- expect(err.message).to.equal(expectedError)
- })
- })
+ describe('.ls', () => {
+ it('throws an error if called in the browser', () => {
+ return ipfs1.pubsub.ls()
+ .catch((err) => {
+ expect(err).to.exist()
+ expect(err.message).to.equal(expectedError)
+ })
})
})
+ })
- describe('.unsubscribe', () => {
- it('throws an error if called in the browser', (done) => {
- try {
- ipfs1.pubsub.unsubscribe()
- done('unsubscribe() didn\'t throw an error')
- } catch (err) {
- expect(err).to.exist()
- expect(err.message).to.equal(expectedError)
- done()
- }
- })
+ describe('.unsubscribe', () => {
+ it('throws an error if called in the browser', (done) => {
+ try {
+ ipfs1.pubsub.unsubscribe()
+ done('unsubscribe() didn\'t throw an error')
+ } catch (err) {
+ expect(err).to.exist()
+ expect(err.message).to.equal(expectedError)
+ done()
+ }
})
})
})
-}
+})
diff --git a/test/refs.spec.js b/test/refs.spec.js
index 7ca22d687..9faedc905 100644
--- a/test/refs.spec.js
+++ b/test/refs.spec.js
@@ -10,15 +10,16 @@ const waterfall = require('async/waterfall')
const path = require('path')
const FactoryClient = require('./ipfs-factory/client')
-describe('.refs', () => {
+describe('.refs', function () {
+ this.timeout(80 * 1000)
+
if (!isNode) { return }
let ipfs
let fc
let folder
- before(function (done) {
- this.timeout(20 * 1000) // slow CI
+ before((done) => {
fc = new FactoryClient()
waterfall([
(cb) => fc.spawnNode(cb),
diff --git a/test/repo.spec.js b/test/repo.spec.js
index fef637e06..0af2a0001 100644
--- a/test/repo.spec.js
+++ b/test/repo.spec.js
@@ -7,12 +7,13 @@ const dirtyChai = require('dirty-chai')
const expect = chai.expect
chai.use(dirtyChai)
-describe('.repo', () => {
+describe('.repo', function () {
+ this.timeout(50 * 1000) // slow CI
+
let ipfs
let fc
- before(function (done) {
- this.timeout(20 * 1000) // slow CI
+ before((done) => {
fc = new FactoryClient()
fc.spawnNode((err, node) => {
expect(err).to.not.exist()