diff --git a/.appveyor.yml b/.appveyor.yml new file mode 100644 index 00000000..de3e3780 --- /dev/null +++ b/.appveyor.yml @@ -0,0 +1,23 @@ +environment: + matrix: + - nodejs_version: "6" + - nodejs_version: "8" + +# cache: +# - node_modules + +platform: + - x64 + +install: + - ps: Install-Product node $env:nodejs_version $env:platform + - npm install + +test_script: + - node --version + - npm --version + - npm test + +build: off + +version: "{build}" diff --git a/package.json b/package.json index a53f215a..200e0b6b 100644 --- a/package.json +++ b/package.json @@ -41,11 +41,12 @@ "homepage": "https://github.com/ipfs/js-ipfs-unixfs-engine#readme", "devDependencies": { "aegir": "^12.1.3", + "ajv": "^5.3.0", "chai": "^4.1.2", "dirty-chai": "^2.0.1", "ipfs": "~0.26.0", "ipfs-block-service": "~0.13.0", - "ipfs-repo": "~0.18.3", + "ipfs-repo": "0.18.3", "ncp": "^2.0.0", "pre-commit": "^1.2.2", "pull-generate": "^2.2.0", diff --git a/src/exporter/dir-flat.js b/src/exporter/dir-flat.js index 8d04c1d5..9392018e 100644 --- a/src/exporter/dir-flat.js +++ b/src/exporter/dir-flat.js @@ -1,6 +1,5 @@ 'use strict' -const path = require('path') const pull = require('pull-stream') const paramap = require('pull-paramap') const CID = require('cids') @@ -22,7 +21,7 @@ function dirExporter (node, name, pathRest, ipldResolver, resolve, parent) { pull.values(node.links), pull.map((link) => ({ linkName: link.name, - path: path.join(name, link.name), + path: name + '/' + link.name, hash: link.multihash })), pull.filter((item) => accepts === undefined || item.linkName === accepts), diff --git a/src/exporter/dir-hamt-sharded.js b/src/exporter/dir-hamt-sharded.js index c301169a..1bf81c51 100644 --- a/src/exporter/dir-hamt-sharded.js +++ b/src/exporter/dir-hamt-sharded.js @@ -1,6 +1,5 @@ 'use strict' -const path = require('path') const pull = require('pull-stream') const paramap = require('pull-paramap') const CID = require('cids') @@ -25,7 +24,7 @@ function shardedDirExporter (node, name, pathRest, ipldResolver, resolve, parent pull.map((link) => { // remove the link prefix (2 chars for the bucket index) const p = link.name.substring(2) - const pp = p ? path.join(name, p) : name + const pp = p ? name + '/' + p : name let accept = true let fromPathRest = false diff --git a/src/exporter/object.js b/src/exporter/object.js index c58ac0ed..af24a970 100644 --- a/src/exporter/object.js +++ b/src/exporter/object.js @@ -1,6 +1,5 @@ 'use strict' -const path = require('path') const CID = require('cids') const pull = require('pull-stream') const pullDefer = require('pull-defer') @@ -10,7 +9,7 @@ module.exports = (node, name, pathRest, ipldResolver, resolve) => { if (pathRest.length) { const pathElem = pathRest.shift() newNode = node[pathElem] - const newName = path.join(name, pathElem) + const newName = name + '/' + pathElem if (CID.isCID(newNode)) { const d = pullDefer.source() ipldResolver.get(sanitizeCID(newNode), (err, newNode) => { diff --git a/test/builder-dir-sharding.js b/test/builder-dir-sharding.js index 42f029bd..203e63c2 100644 --- a/test/builder-dir-sharding.js +++ b/test/builder-dir-sharding.js @@ -18,7 +18,7 @@ const leftPad = require('left-pad') module.exports = (repo) => { describe('builder: directory sharding', function () { - this.timeout(20 * 1000) + this.timeout(30 * 1000) let ipldResolver diff --git a/test/exporter-subtree.js b/test/exporter-subtree.js index 3987abd6..d2c59048 100644 --- a/test/exporter-subtree.js +++ b/test/exporter-subtree.js @@ -16,7 +16,9 @@ const exporter = unixFSEngine.exporter const smallFile = loadFixture(__dirname, 'fixtures/200Bytes.txt') module.exports = (repo) => { - describe('exporter', () => { + describe('exporter', function () { + this.timeout(10 * 1000) + let ipldResolver before(() => { diff --git a/test/exporter.js b/test/exporter.js index 81582470..eacbbb21 100644 --- a/test/exporter.js +++ b/test/exporter.js @@ -83,7 +83,7 @@ module.exports = (repo) => { fileEql(files[0], bigFile, done) }) ) - }) + }).timeout(30 * 1000) it('export a small file with links using CID instead of multihash', (done) => { const cid = new CID('QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q') @@ -96,7 +96,7 @@ module.exports = (repo) => { fileEql(files[0], bigFile, done) }) ) - }) + }).timeout(30 * 1000) it('export a large file > 5mb', (done) => { const hash = 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE' @@ -109,7 +109,7 @@ module.exports = (repo) => { fileEql(files[0], null, done) }) ) - }) + }).timeout(30 * 1000) it('export a directory', (done) => { const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN' @@ -149,7 +149,7 @@ module.exports = (repo) => { ) }) ) - }) + }).timeout(30 * 1000) it('returns an empty stream for dir', (done) => { const hash = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' diff --git a/test/hamt.js b/test/hamt.js index 59c9f8e2..dc07ca25 100644 --- a/test/hamt.js +++ b/test/hamt.js @@ -97,7 +97,7 @@ describe('HAMT', () => { }) it('can remove all the keys and still find remaining', function (done) { - this.timeout(30 * 1000) + this.timeout(50 * 1000) masterHead = keys.pop() iterate() diff --git a/test/import-export-nested-dir.js b/test/import-export-nested-dir.js index da448abf..86eae4af 100644 --- a/test/import-export-nested-dir.js +++ b/test/import-export-nested-dir.js @@ -22,7 +22,9 @@ module.exports = (repo) => { ipldResolver = new IPLDResolver(bs) }) - it('imports', (done) => { + it('imports', function (done) { + this.timeout(20 * 1000) + pull( pull.values([ { path: 'a/b/c/d/e', content: pull.values([Buffer.from('banana')]) }, @@ -56,7 +58,9 @@ module.exports = (repo) => { ) }) - it('exports', done => { + it('exports', function (done) { + this.timeout(20 * 1000) + pull( unixFSEngine.exporter(rootHash, ipldResolver), pull.collect((err, files) => { diff --git a/test/import-export.js b/test/import-export.js index e09b87a0..923cace7 100644 --- a/test/import-export.js +++ b/test/import-export.js @@ -1,4 +1,5 @@ /* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 5] */ 'use strict' const chai = require('chai') @@ -32,7 +33,9 @@ function fileEql (f1, fileData, callback) { } module.exports = (repo) => { - describe('import and export', () => { + describe('import and export', function () { + this.timeout(30 * 1000) + strategies.forEach((strategy) => { const importerOptions = { strategy: strategy } diff --git a/test/importer.js b/test/importer.js index e1edfe62..cdc74333 100644 --- a/test/importer.js +++ b/test/importer.js @@ -161,7 +161,7 @@ module.exports = (repo) => { const expected = extend({}, defaultResults, strategies[strategy]) describe('importer: ' + strategy, function () { - this.timeout(20 * 1000) + this.timeout(30 * 1000) let ipldResolver diff --git a/test/with-dag-api.js b/test/with-dag-api.js index d7ad29d6..b7b50024 100644 --- a/test/with-dag-api.js +++ b/test/with-dag-api.js @@ -13,6 +13,8 @@ const pull = require('pull-stream') const mh = require('multihashes') const loadFixture = require('aegir/fixtures') const IPFS = require('ipfs') +const os = require('os') +const path = require('path') function stringifyMh (files) { return files.map((file) => { @@ -105,7 +107,12 @@ const strategyOverrides = { } -describe('with dag-api', () => { +describe('with dag-api', function () { + // TODO: waiting for IPFS support on windows, https://github.com/ipfs/js-ipfs-unixfs-engine/issues/196 + if (os.platform() === 'win32') { + return + } + strategies.forEach(strategy => { const baseFiles = strategyBaseFiles[strategy] const defaultResults = extend({}, baseFiles, { @@ -159,7 +166,7 @@ describe('with dag-api', () => { const expected = extend({}, defaultResults, strategies[strategy]) describe('importer: ' + strategy, function () { - this.timeout(20 * 1000) + this.timeout(50 * 1000) let node @@ -171,9 +178,11 @@ describe('with dag-api', () => { } } - before((done) => { + before(function (done) { + this.timeout(30 * 1000) + node = new IPFS({ - repo: '/tmp/unixfs-test-' + Math.random(), + repo: path.join(os.tmpdir(), 'unixfs-test-' + Math.random()), start: false })