diff --git a/examples/browser-add/index.html b/examples/browser-add/index.html index d92470205..bd165e2ff 100644 --- a/examples/browser-add/index.html +++ b/examples/browser-add/index.html @@ -4,15 +4,23 @@ JS IPFS API - Example - Browser - Add +

JS IPFS API - Add file from the browser

-
found in ipfs:
-
[ipfs hash]
-
[ipfs content]
+
+
found in ipfs:
+
[ipfs hash]
+
[ipfs content]
diff --git a/examples/browser-add/index.js b/examples/browser-add/index.js index 183956bd9..e3cad0d50 100644 --- a/examples/browser-add/index.js +++ b/examples/browser-add/index.js @@ -1,6 +1,6 @@ 'use strict' -var IPFS = require('ipfs-api') +var IPFS = require('../../src') var ipfs = IPFS() function store () { @@ -11,24 +11,26 @@ function store () { } res.forEach(function (file) { - console.log('successfully stored', file.Hash) - display(file.Hash) + if (file && file.hash) { + console.log('successfully stored', file.hash) + display(file.hash) + } }) }) } function display (hash) { - ipfs.cat(hash, function (err, res) { + // buffer: true results in the returned result being a buffer rather than a stream + ipfs.cat(hash, {buffer: true}, function (err, res) { if (err || !res) { return console.error('ipfs cat error', err, res) } - if (res.readable) { - console.error('unhandled: cat result is a pipe', res) - } else { - document.getElementById('hash').innerText = hash - document.getElementById('content').innerText = res - } + + document.getElementById('hash').innerText = hash + document.getElementById('content').innerText = res.toString() }) } -document.getElementById('store').onclick = store +document.addEventListener('DOMContentLoaded', function () { + document.getElementById('store').onclick = store +}) diff --git a/examples/browser-add/package.json b/examples/browser-add/package.json index a2c8dbcfb..6ac12312f 100644 --- a/examples/browser-add/package.json +++ b/examples/browser-add/package.json @@ -4,15 +4,13 @@ "description": "", "main": "index.js", "scripts": { - "start": "browserify -t brfs index.js > bundle.js && http-server -a 127.0.0.1 -p 8888" + "start": "browserify index.js > bundle.js && http-server -a 127.0.0.1 -p 8888" }, "keywords": [], "author": "Friedel Ziegelmayer", "license": "MIT", "devDependencies": { - "brfs": "^1.4.3", "browserify": "^13.0.1", - "http-server": "^0.9.0", - "ipfs-api": "^6.0.3" + "http-server": "^0.9.0" } } diff --git a/package.json b/package.json index 5bb974025..95c6c6b03 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,8 @@ "browser": { "glob": false, "fs": false, - "stream": "readable-stream" + "stream": "readable-stream", + "http": "stream-http" }, "scripts": { "test": "gulp test", @@ -24,6 +25,7 @@ "async": "^2.1.2", "bl": "^1.1.2", "bs58": "^3.0.0", + "concat-stream": "^1.5.2", "detect-node": "^2.0.3", "flatmap": "0.0.3", "glob": "^7.1.1", @@ -40,9 +42,9 @@ "promisify-es6": "^1.0.2", "qs": "^6.3.0", "readable-stream": "^1.1.14", + "stream-http": "^2.5.0", "streamifier": "^0.1.1", - "tar-stream": "^1.5.2", - "wreck": "^10.0.0" + "tar-stream": "^1.5.2" }, "engines": { "node": ">=4.0.0" diff --git a/src/api/cat.js b/src/api/cat.js index 62adfb73b..762469a14 100644 --- a/src/api/cat.js +++ b/src/api/cat.js @@ -4,7 +4,12 @@ const promisify = require('promisify-es6') const cleanMultihash = require('../clean-multihash') module.exports = (send) => { - return promisify((hash, callback) => { + return promisify((hash, opts, callback) => { + if (typeof opts === 'function') { + callback = opts + opts = {} + } + try { hash = cleanMultihash(hash) } catch (err) { @@ -13,7 +18,8 @@ module.exports = (send) => { send({ path: 'cat', - args: hash + args: hash, + buffer: opts.buffer }, callback) }) } diff --git a/src/api/util/url-add.js b/src/api/util/url-add.js index f8707ca9d..7dc7694b2 100644 --- a/src/api/util/url-add.js +++ b/src/api/util/url-add.js @@ -1,9 +1,11 @@ 'use strict' -const Wreck = require('wreck') -const addToDagNodesTransform = require('./../../add-to-dagnode-transform') - const promisify = require('promisify-es6') +const once = require('once') +const parseUrl = require('url').parse + +const request = require('../../request') +const addToDagNodesTransform = require('./../../add-to-dagnode-transform') module.exports = (send) => { return promisify((url, opts, callback) => { @@ -27,10 +29,12 @@ module.exports = (send) => { } const sendWithTransform = send.withTransform(addToDagNodesTransform) + callback = once(callback) - Wreck.request('GET', url, null, (err, res) => { - if (err) { - return callback(err) + request(parseUrl(url).protocol)(url, (res) => { + res.once('error', callback) + if (res.statusCode >= 400) { + return callback(new Error(`Failed to download with ${res.statusCode}`)) } sendWithTransform({ @@ -38,6 +42,6 @@ module.exports = (send) => { qs: opts, files: res }, callback) - }) + }).end() }) } diff --git a/src/request-api.js b/src/request-api.js index 2467a005b..0b65e476e 100644 --- a/src/request-api.js +++ b/src/request-api.js @@ -1,32 +1,54 @@ 'use strict' -const Wreck = require('wreck') const Qs = require('qs') const ndjson = require('ndjson') -const getFilesStream = require('./get-files-stream') - const isNode = require('detect-node') +const once = require('once') +const concat = require('concat-stream') + +const getFilesStream = require('./get-files-stream') +const request = require('./request') // -- Internal function parseChunkedJson (res, cb) { - const parsed = [] res .pipe(ndjson.parse()) - .on('data', (obj) => { - parsed.push(obj) - }) - .on('end', () => { - cb(null, parsed) - }) + .once('error', cb) + .pipe(concat((data) => cb(null, data))) } -function onRes (buffer, cb, uri) { - return (err, res) => { - if (err) { - return cb(err) - } +function parseRaw (res, cb) { + res + .once('error', cb) + .pipe(concat((data) => cb(null, data))) +} + +function parseJson (res, cb) { + res + .once('error', cb) + .pipe(concat((data) => { + if (!data || data.length === 0) { + return cb() + } + + if (Buffer.isBuffer(data)) { + data = data.toString() + } + let res + try { + res = JSON.parse(data) + } catch (err) { + return cb(err) + } + + cb(null, res) + })) +} + +function onRes (buffer, cb) { + return (res) => { const stream = Boolean(res.headers['x-stream-output']) const chunkedObjects = Boolean(res.headers['x-chunked-output']) const isJson = res.headers['content-type'] && @@ -35,7 +57,7 @@ function onRes (buffer, cb, uri) { if (res.statusCode >= 400 || !res.statusCode) { const error = new Error(`Server responded with ${res.statusCode}`) - return Wreck.read(res, {json: true}, (err, payload) => { + parseJson(res, (err, payload) => { if (err) { return cb(err) } @@ -51,20 +73,21 @@ function onRes (buffer, cb, uri) { return cb(null, res) } - if (chunkedObjects) { - if (isJson) { - return parseChunkedJson(res, cb) - } + if (chunkedObjects && isJson) { + return parseChunkedJson(res, cb) + } - return Wreck.read(res, null, cb) + if (isJson) { + return parseJson(res, cb) } - Wreck.read(res, {json: isJson}, cb) + parseRaw(res, cb) } } function requestAPI (config, options, callback) { options.qs = options.qs || {} + callback = once(callback) if (Array.isArray(options.files)) { options.qs.recursive = true @@ -99,17 +122,12 @@ function requestAPI (config, options, callback) { // this option is only used internally, not passed to daemon delete options.qs.followSymlinks - const port = config.port ? `:${config.port}` : '' - - const opts = { - method: 'POST', - uri: `${config.protocol}://${config.host}${port}${config['api-path']}${options.path}?${Qs.stringify(options.qs, {arrayFormat: 'repeat'})}`, - headers: {} - } + const method = 'POST' + const headers = {} if (isNode) { // Browsers do not allow you to modify the user agent - opts.headers['User-Agent'] = config['user-agent'] + headers['User-Agent'] = config['user-agent'] } if (options.files) { @@ -117,11 +135,25 @@ function requestAPI (config, options, callback) { return callback(new Error('No boundary in multipart stream')) } - opts.headers['Content-Type'] = `multipart/form-data; boundary=${stream.boundary}` - opts.payload = stream + headers['Content-Type'] = `multipart/form-data; boundary=${stream.boundary}` + } + + const qs = Qs.stringify(options.qs, {arrayFormat: 'repeat'}) + const req = request(config.protocol)({ + hostname: config.host, + path: `${config['api-path']}${options.path}?${qs}`, + port: config.port, + method: method, + headers: headers + }, onRes(options.buffer, callback)) + + if (options.files) { + stream.pipe(req) + } else { + req.end() } - return Wreck.request(opts.method, opts.uri, opts, onRes(options.buffer, callback, opts.uri)) + return req } // diff --git a/src/request.js b/src/request.js new file mode 100644 index 000000000..6a77abf3a --- /dev/null +++ b/src/request.js @@ -0,0 +1,12 @@ +'use strict' + +const httpRequest = require('http').request +const httpsRequest = require('https').request + +module.exports = (protocol) => { + if (protocol.indexOf('https') === 0) { + return httpsRequest + } + + return httpRequest +} diff --git a/test/ipfs-api/request-api.spec.js b/test/ipfs-api/request-api.spec.js index e362a255f..9810a4e5d 100644 --- a/test/ipfs-api/request-api.spec.js +++ b/test/ipfs-api/request-api.spec.js @@ -4,64 +4,24 @@ const expect = require('chai').expect const isNode = require('detect-node') const ipfsAPI = require('./../../src/index.js') -const noop = () => {} describe('ipfsAPI request tests', () => { - describe('requestAPI', () => { - const apiAddrs = require('./../setup/tmp-disposable-nodes-addrs.json') - const apiAddr = apiAddrs.a.split('/') - - it('excludes port from URL if config.port is falsy', (done) => { - const Wreck = require('wreck') - const request = Wreck.request - - Wreck.request = (method, uri, opts, cb) => { - Wreck.request = request - expect(uri).to.not.contain(/:\d/) - done() - } - - ipfsAPI({ - host: apiAddr[2], - port: null, - protocol: 'http' - }).id(noop) - }) - - it('includes port in URL if config.port is truthy', (done) => { - const Wreck = require('wreck') - const request = Wreck.request - - Wreck.request = (method, uri, opts, cb) => { - Wreck.request = request - expect(uri).to.contain(':' + apiAddr[4]) - done() - } - - ipfsAPI({ - host: apiAddr[2], - port: apiAddr[4], - protocol: 'http' - }).id(noop) - }) - - it('does not crash if no content-type header is provided', (done) => { - if (!isNode) { - return done() - } - - // go-ipfs always (currently) adds a content-type header, even if no content is present, - // the standard behaviour for an http-api is to omit this header if no content is present - const server = require('http').createServer((req, res) => { - res.writeHead(200) - res.end() - }).listen(6001, () => { - ipfsAPI('/ip4/127.0.0.1/tcp/6001') - .config.replace('test/fixtures/r-config.json', (err) => { - expect(err).to.not.exist - server.close(done) - }) - }) + it('does not crash if no content-type header is provided', (done) => { + if (!isNode) { + return done() + } + + // go-ipfs always (currently) adds a content-type header, even if no content is present, + // the standard behaviour for an http-api is to omit this header if no content is present + const server = require('http').createServer((req, res) => { + res.writeHead(200) + res.end() + }).listen(6001, () => { + ipfsAPI('/ip4/127.0.0.1/tcp/6001') + .config.replace('test/fixtures/r-config.json', (err) => { + expect(err).to.not.exist + server.close(done) + }) }) }) }) diff --git a/test/ipfs-api/util.spec.js b/test/ipfs-api/util.spec.js index e0990a9d1..c9820492f 100644 --- a/test/ipfs-api/util.spec.js +++ b/test/ipfs-api/util.spec.js @@ -61,11 +61,19 @@ describe('.util', () => { }) }) - it('.urlAdd', (done) => { + it('.urlAdd http', (done) => { ipfs.util.addFromURL('http://example.com/', (err, result) => { expect(err).to.not.exist expect(result.length).to.equal(1) done() }) }) + + it('.urlAdd https', (done) => { + ipfs.util.addFromURL('https://example.com/', (err, result) => { + expect(err).to.not.exist + expect(result.length).to.equal(1) + done() + }) + }) })