From 4f0c0e5cf7944dc0195b960e4d5108034fc2f211 Mon Sep 17 00:00:00 2001
From: achingbrain
Date: Thu, 8 Nov 2018 21:45:19 +0000
Subject: [PATCH 01/11] fix: updates ipld-dag-pb dep to version without .cid
properties
Follows on from https://github.com/ipld/js-ipld-dag-pb/pull/99 and
updates this module to not rely on DAGNodes having knowledge of
their CIDs.
---
package.json | 16 +-
src/cli/commands/object/get.js | 54 +++---
src/cli/commands/object/links.js | 11 +-
src/cli/commands/object/new.js | 20 ++-
src/cli/commands/object/patch/add-link.js | 36 +++-
src/cli/commands/object/patch/append-data.js | 14 +-
src/cli/commands/object/patch/rm-link.js | 20 ++-
src/cli/commands/object/patch/set-data.js | 14 +-
src/cli/commands/object/put.js | 21 ++-
src/core/components/dag.js | 2 +-
src/core/components/init.js | 13 +-
src/core/components/object.js | 80 ++++-----
src/core/components/pin-set.js | 81 ++++++---
src/core/components/pin.js | 166 +++++++++++-------
src/core/index.js | 6 +-
src/core/utils.js | 12 +-
src/http/api/resources/object.js | 170 +++++++++----------
test/core/object.spec.js | 93 ++++++----
test/core/pin-set.js | 55 +++---
test/core/pin.js | 2 +-
test/core/preload.spec.js | 106 +++++++++---
test/http-api/object.js | 52 ++++--
22 files changed, 675 insertions(+), 369 deletions(-)
diff --git a/package.json b/package.json
index 3bdaa3b57a..d6295f4aeb 100644
--- a/package.json
+++ b/package.json
@@ -69,8 +69,8 @@
"execa": "^1.0.0",
"form-data": "^2.3.3",
"hat": "0.0.3",
- "interface-ipfs-core": "~0.84.2",
- "ipfsd-ctl": "~0.39.5",
+ "interface-ipfs-core": "~0.86.0",
+ "ipfsd-ctl": "ipfs/js-ipfsd-ctl#update-dag-pb-to-not-have-cid-property",
"ncp": "^2.0.0",
"qs": "^6.5.2",
"rimraf": "^2.6.2",
@@ -102,19 +102,19 @@
"hoek": "^5.0.4",
"human-to-milliseconds": "^1.0.0",
"interface-datastore": "~0.6.0",
- "ipfs-api": "^26.1.0",
+ "ipfs-api": "ipfs/js-ipfs-api",
"ipfs-bitswap": "~0.21.0",
"ipfs-block": "~0.8.0",
"ipfs-block-service": "~0.15.1",
- "ipfs-http-response": "~0.2.0",
- "ipfs-mfs": "~0.4.2",
+ "ipfs-http-response": "~0.2.1",
+ "ipfs-mfs": "~0.5.0",
"ipfs-multipart": "~0.1.0",
"ipfs-repo": "~0.25.0",
"ipfs-unixfs": "~0.1.16",
- "ipfs-unixfs-engine": "~0.33.0",
- "ipld": "~0.19.3",
+ "ipfs-unixfs-engine": "~0.34.0",
+ "ipld": "~0.20.0",
"ipld-bitcoin": "~0.1.8",
- "ipld-dag-pb": "~0.14.11",
+ "ipld-dag-pb": "~0.15.0",
"ipld-ethereum": "^2.0.1",
"ipld-git": "~0.2.2",
"ipld-zcash": "~0.1.6",
diff --git a/src/cli/commands/object/get.js b/src/cli/commands/object/get.js
index 80190d8d96..ccc4b466ab 100644
--- a/src/cli/commands/object/get.js
+++ b/src/cli/commands/object/get.js
@@ -1,6 +1,11 @@
'use strict'
const print = require('../../utils').print
+const {
+ util: {
+ cid
+ }
+} = require('ipld-dag-pb')
module.exports = {
command: 'get ',
@@ -11,6 +16,10 @@ module.exports = {
'data-encoding': {
type: 'string',
default: 'base64'
+ },
+ 'cid-base': {
+ default: 'base58btc',
+ describe: 'CID base to use.'
}
},
@@ -19,26 +28,33 @@ module.exports = {
if (err) {
throw err
}
- const nodeJSON = node.toJSON()
-
- if (Buffer.isBuffer(node.data)) {
- nodeJSON.data = node.data.toString(argv['data-encoding'] || undefined)
- }
-
- const answer = {
- Data: nodeJSON.data,
- Hash: nodeJSON.multihash,
- Size: nodeJSON.size,
- Links: nodeJSON.links.map((l) => {
- return {
- Name: l.name,
- Size: l.size,
- Hash: l.multihash
- }
- })
- }
- print(JSON.stringify(answer))
+ cid(node, (err, result) => {
+ if (err) {
+ throw err
+ }
+
+ let data = node.data
+
+ if (Buffer.isBuffer(data)) {
+ data = node.data.toString(argv.dataEncoding || undefined)
+ }
+
+ const answer = {
+ Data: data,
+ Hash: result.toBaseEncodedString(argv.cidBase),
+ Size: node.size,
+ Links: node.links.map((l) => {
+ return {
+ Name: l.name,
+ Size: l.size,
+ Hash: l.cid.toBaseEncodedString(argv.cidBase)
+ }
+ })
+ }
+
+ print(JSON.stringify(answer))
+ })
})
}
}
diff --git a/src/cli/commands/object/links.js b/src/cli/commands/object/links.js
index d657820fb4..9a3a3e203e 100644
--- a/src/cli/commands/object/links.js
+++ b/src/cli/commands/object/links.js
@@ -7,7 +7,12 @@ module.exports = {
describe: 'Outputs the links pointed to by the specified object',
- builder: {},
+ builder: {
+ 'cid-base': {
+ default: 'base58btc',
+ describe: 'CID base to use.'
+ }
+ },
handler (argv) {
argv.ipfs.object.links(argv.key, {
@@ -18,9 +23,7 @@ module.exports = {
}
links.forEach((link) => {
- link = link.toJSON()
-
- print(`${link.multihash} ${link.size} ${link.name}`)
+ print(`${link.cid.toBaseEncodedString(argv.cidBase)} ${link.size} ${link.name}`)
})
})
}
diff --git a/src/cli/commands/object/new.js b/src/cli/commands/object/new.js
index f96db85fb2..36ab5a3659 100644
--- a/src/cli/commands/object/new.js
+++ b/src/cli/commands/object/new.js
@@ -4,13 +4,23 @@ const debug = require('debug')
const log = debug('cli:object')
log.error = debug('cli:object:error')
const print = require('../../utils').print
+const {
+ util: {
+ cid
+ }
+} = require('ipld-dag-pb')
module.exports = {
command: 'new []',
describe: 'Create new ipfs objects',
- builder: {},
+ builder: {
+ 'cid-base': {
+ default: 'base58btc',
+ describe: 'CID base to use.'
+ }
+ },
handler (argv) {
argv.ipfs.object.new(argv.template, (err, node) => {
@@ -18,9 +28,13 @@ module.exports = {
throw err
}
- const nodeJSON = node.toJSON()
+ cid(node, (err, cid) => {
+ if (err) {
+ throw err
+ }
- print(nodeJSON.multihash)
+ print(cid.toBaseEncodedString(argv.cidBase))
+ })
})
}
}
diff --git a/src/cli/commands/object/patch/add-link.js b/src/cli/commands/object/patch/add-link.js
index eb4530ed72..28982d1bcf 100644
--- a/src/cli/commands/object/patch/add-link.js
+++ b/src/cli/commands/object/patch/add-link.js
@@ -3,13 +3,23 @@
const dagPB = require('ipld-dag-pb')
const DAGLink = dagPB.DAGLink
const print = require('../../../utils').print
+const {
+ util: {
+ cid
+ }
+} = require('ipld-dag-pb')
module.exports = {
command: 'add-link [',
describe: 'Add a link to a given object',
- builder: {},
+ builder: {
+ 'cid-base': {
+ default: 'base58btc',
+ describe: 'CID base to use.'
+ }
+ },
handler (argv) {
const ipfs = argv.ipfs
@@ -20,16 +30,28 @@ module.exports = {
throw err
}
- const link = new DAGLink(argv.name, nodeA.size, nodeA.multihash)
-
- ipfs.object.patch.addLink(argv.root, link, {
- enc: 'base58'
- }, (err, nodeB) => {
+ cid(nodeA, (err, result) => {
if (err) {
throw err
}
- print(nodeB.toJSON().multihash)
+ const link = new DAGLink(argv.name, nodeA.size, result)
+
+ ipfs.object.patch.addLink(argv.root, link, {
+ enc: 'base58'
+ }, (err, nodeB) => {
+ if (err) {
+ throw err
+ }
+
+ cid(nodeB, (err, result) => {
+ if (err) {
+ throw err
+ }
+
+ print(result.toBaseEncodedString(argv.cidBase))
+ })
+ })
})
})
}
diff --git a/src/cli/commands/object/patch/append-data.js b/src/cli/commands/object/patch/append-data.js
index e13722b13c..a2428a196d 100644
--- a/src/cli/commands/object/patch/append-data.js
+++ b/src/cli/commands/object/patch/append-data.js
@@ -6,6 +6,11 @@ const debug = require('debug')
const log = debug('cli:object')
log.error = debug('cli:object:error')
const print = require('../../../utils').print
+const {
+ util: {
+ cid
+ }
+} = require('ipld-dag-pb')
function appendData (key, data, ipfs) {
ipfs.object.patch.appendData(key, data, {
@@ -14,9 +19,14 @@ function appendData (key, data, ipfs) {
if (err) {
throw err
}
- const nodeJSON = node.toJSON()
- print(nodeJSON.multihash)
+ cid(node, (err, cid) => {
+ if (err) {
+ throw err
+ }
+
+ print(cid.toBaseEncodedString())
+ })
})
}
diff --git a/src/cli/commands/object/patch/rm-link.js b/src/cli/commands/object/patch/rm-link.js
index dde69c30f8..ec36dfaf57 100644
--- a/src/cli/commands/object/patch/rm-link.js
+++ b/src/cli/commands/object/patch/rm-link.js
@@ -4,13 +4,23 @@ const debug = require('debug')
const log = debug('cli:object')
log.error = debug('cli:object:error')
const print = require('../../../utils').print
+const {
+ util: {
+ cid
+ }
+} = require('ipld-dag-pb')
module.exports = {
command: 'rm-link ',
describe: 'Remove a link from an object',
- builder: {},
+ builder: {
+ 'cid-base': {
+ default: 'base58btc',
+ describe: 'CID base to use.'
+ }
+ },
handler (argv) {
argv.ipfs.object.patch.rmLink(argv.root, { name: argv.link }, {
@@ -20,9 +30,13 @@ module.exports = {
throw err
}
- const nodeJSON = node.toJSON()
+ cid(node, (err, cid) => {
+ if (err) {
+ throw err
+ }
- print(nodeJSON.multihash)
+ print(cid.toBaseEncodedString(argv.cidBase))
+ })
})
}
}
diff --git a/src/cli/commands/object/patch/set-data.js b/src/cli/commands/object/patch/set-data.js
index 4bfa4e0834..0e9d064d5b 100644
--- a/src/cli/commands/object/patch/set-data.js
+++ b/src/cli/commands/object/patch/set-data.js
@@ -6,6 +6,11 @@ const debug = require('debug')
const log = debug('cli:object')
log.error = debug('cli:object:error')
const print = require('../../../utils').print
+const {
+ util: {
+ cid
+ }
+} = require('ipld-dag-pb')
function parseAndAddNode (key, data, ipfs) {
ipfs.object.patch.setData(key, data, {
@@ -14,9 +19,14 @@ function parseAndAddNode (key, data, ipfs) {
if (err) {
throw err
}
- const nodeJSON = node.toJSON()
- print(nodeJSON.multihash)
+ cid(node, (err, cid) => {
+ if (err) {
+ throw err
+ }
+
+ print(cid.toBaseEncodedString())
+ })
})
}
diff --git a/src/cli/commands/object/put.js b/src/cli/commands/object/put.js
index 98617080df..23521e1f4c 100644
--- a/src/cli/commands/object/put.js
+++ b/src/cli/commands/object/put.js
@@ -3,16 +3,25 @@
const bl = require('bl')
const fs = require('fs')
const print = require('../../utils').print
+const {
+ util: {
+ cid
+ }
+} = require('ipld-dag-pb')
-function putNode (buf, enc, ipfs) {
+function putNode (buf, enc, ipfs, cidEnc) {
ipfs.object.put(buf, { enc: enc }, (err, node) => {
if (err) {
throw err
}
- const nodeJSON = node.toJSON()
+ cid(node, (err, cid) => {
+ if (err) {
+ throw err
+ }
- print(`added ${nodeJSON.multihash}`)
+ print(`added ${cid.toBaseEncodedString(cidEnc)}`)
+ })
})
}
@@ -25,6 +34,10 @@ module.exports = {
'input-enc': {
type: 'string',
default: 'json'
+ },
+ 'cid-base': {
+ default: 'base58btc',
+ describe: 'CID base to use.'
}
},
@@ -40,7 +53,7 @@ module.exports = {
throw err
}
- putNode(input, argv.inputEnc, ipfs)
+ putNode(input, argv.inputEnc, ipfs, argv.cidBase)
}))
}
}
diff --git a/src/core/components/dag.js b/src/core/components/dag.js
index 5b33ee8214..dc9ef2bdf5 100644
--- a/src/core/components/dag.js
+++ b/src/core/components/dag.js
@@ -163,7 +163,7 @@ module.exports = function dag (self) {
if (err) { return callback(err) }
mapAsync(res.value.links, (link, cb) => {
- self.dag._getRecursive(link.multihash, options, cb)
+ self.dag._getRecursive(link.cid, options, cb)
}, (err, nodes) => {
// console.log('nodes:', nodes)
if (err) return callback(err)
diff --git a/src/core/components/init.js b/src/core/components/init.js
index 3473516b9f..51c197e977 100644
--- a/src/core/components/init.js
+++ b/src/core/components/init.js
@@ -7,6 +7,10 @@ const promisify = require('promisify-es6')
const defaultsDeep = require('@nodeutils/defaults-deep')
const defaultConfig = require('../runtime/config-nodejs.js')
const Keychain = require('libp2p-keychain')
+const {
+ DAGNode
+} = require('ipld-dag-pb')
+const UnixFs = require('ipfs-unixfs')
const IPNS = require('../ipns')
const OfflineDatastore = require('../ipns/routing/offline-datastore')
@@ -125,8 +129,13 @@ module.exports = function init (self) {
const tasks = [
(cb) => {
waterfall([
- (cb) => self.object.new('unixfs-dir', cb),
- (emptyDirNode, cb) => self._ipns.initializeKeyspace(privateKey, emptyDirNode.toJSON().multihash, cb)
+ (cb) => DAGNode.create(new UnixFs('directory').marshal(), cb),
+ (node, cb) => self.dag.put(node, {
+ version: 0,
+ format: 'dag-pb',
+ hashAlg: 'sha2-256'
+ }, cb),
+ (cid, cb) => self._ipns.initializeKeyspace(privateKey, cid.toBaseEncodedString(), cb)
], cb)
}
]
diff --git a/src/core/components/object.js b/src/core/components/object.js
index 41ad06f1cd..2bc91d3026 100644
--- a/src/core/components/object.js
+++ b/src/core/components/object.js
@@ -1,6 +1,7 @@
'use strict'
const waterfall = require('async/waterfall')
+const parallel = require('async/parallel')
const setImmediate = require('async/setImmediate')
const promisify = require('promisify-es6')
const dagPB = require('ipld-dag-pb')
@@ -20,6 +21,8 @@ function normalizeMultihash (multihash, enc) {
return Buffer.from(multihash, enc)
} else if (Buffer.isBuffer(multihash)) {
return multihash
+ } else if (CID.isCID(multihash)) {
+ return multihash.buffer
} else {
throw new Error('unsupported multihash')
}
@@ -84,9 +87,11 @@ module.exports = function object (self) {
return cb(err)
}
- const cid = new CID(node.multihash)
-
- self._ipld.put(node, { cid }, (err) => {
+ self._ipld.put(node, {
+ version: 0,
+ hashAlg: 'sha2-256',
+ format: 'dag-pb'
+ }, (err, cid) => {
if (err) return cb(err)
if (options.preload !== false) {
@@ -132,9 +137,11 @@ module.exports = function object (self) {
return callback(err)
}
- const cid = new CID(node.multihash)
-
- self._ipld.put(node, { cid }, (err) => {
+ self._ipld.put(node, {
+ version: 0,
+ hashAlg: 'sha2-256',
+ format: 'dag-pb'
+ }, (err, cid) => {
if (err) {
return callback(err)
}
@@ -176,7 +183,7 @@ module.exports = function object (self) {
next()
})
}
- } else if (obj.multihash) {
+ } else if (DAGNode.isDAGNode(obj)) {
// already a dag node
node = obj
next()
@@ -193,20 +200,18 @@ module.exports = function object (self) {
}
function next () {
- let cid
-
- try {
- cid = new CID(node.multihash)
- } catch (err) {
- return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID')))
- }
-
- self._ipld.put(node, { cid }, (err) => {
+ self._ipld.put(node, {
+ version: 0,
+ hashAlg: 'sha2-256',
+ format: 'dag-pb'
+ }, (err, cid) => {
if (err) {
return callback(err)
}
- self.object.get(node.multihash, { preload: options.preload }, callback)
+ self.object.get(cid, {
+ preload: options.preload
+ }, callback)
})
}
}),
@@ -246,9 +251,7 @@ module.exports = function object (self) {
return callback(err)
}
- const node = result.value
-
- callback(null, node)
+ callback(null, result.value)
})
}),
@@ -288,29 +291,30 @@ module.exports = function object (self) {
options = {}
}
- self.object.get(multihash, options, (err, node) => {
+ waterfall([
+ (cb) => self.object.get(multihash, options, cb),
+ (node, cb) => {
+ parallel({
+ serialized: (next) => dagPB.util.serialize(node, next),
+ cid: (next) => dagPB.util.cid(node, next),
+ node: (next) => next(null, node)
+ }, cb)
+ }
+ ], (err, result) => {
if (err) {
return callback(err)
}
- dagPB.util.serialize(node, (err, serialized) => {
- if (err) {
- return callback(err)
- }
-
- const blockSize = serialized.length
- const linkLength = node.links.reduce((a, l) => a + l.size, 0)
+ const blockSize = result.serialized.length
+ const linkLength = result.node.links.reduce((a, l) => a + l.size, 0)
- const nodeJSON = node.toJSON()
-
- callback(null, {
- Hash: nodeJSON.multihash,
- NumLinks: node.links.length,
- BlockSize: blockSize,
- LinksSize: blockSize - node.data.length,
- DataSize: node.data.length,
- CumulativeSize: blockSize + linkLength
- })
+ callback(null, {
+ Hash: result.cid.toBaseEncodedString(),
+ NumLinks: result.node.links.length,
+ BlockSize: blockSize,
+ LinksSize: blockSize - result.node.data.length,
+ DataSize: result.node.data.length,
+ CumulativeSize: blockSize + linkLength
})
})
}),
diff --git a/src/core/components/pin-set.js b/src/core/components/pin-set.js
index 1f31697825..91f72bf90b 100644
--- a/src/core/components/pin-set.js
+++ b/src/core/components/pin-set.js
@@ -61,6 +61,7 @@ exports = module.exports = function (dag) {
// should this be part of `object` API?
hasDescendant: (root, childhash, callback) => {
const seen = {}
+
if (CID.isCID(childhash) || Buffer.isBuffer(childhash)) {
childhash = toB58String(childhash)
}
@@ -68,33 +69,53 @@ exports = module.exports = function (dag) {
return searchChildren(root, callback)
function searchChildren (root, cb) {
- some(root.links, ({ multihash }, someCb) => {
- const bs58Link = toB58String(multihash)
- if (bs58Link === childhash) { return someCb(null, true) }
- if (bs58Link in seen) { return someCb(null, false) }
+ some(root.links, ({ cid }, done) => {
+ const bs58Link = toB58String(cid)
+
+ if (bs58Link === childhash) {
+ return done(null, true)
+ }
+
+ if (bs58Link in seen) {
+ return done(null, false)
+ }
seen[bs58Link] = true
- dag.get(multihash, '', { preload: false }, (err, res) => {
- if (err) { return someCb(err) }
- searchChildren(res.value, someCb)
+ dag.get(cid, '', { preload: false }, (err, res) => {
+ if (err) {
+ return done(err)
+ }
+
+ searchChildren(res.value, done)
})
}, cb)
}
},
storeSet: (keys, callback) => {
- const pins = keys.map(key => ({
- key: key,
- data: null
- }))
+ const pins = keys.map(key => {
+ if (typeof key === 'string' || Buffer.isBuffer(key)) {
+ key = new CID(key)
+ }
+
+ return {
+ key: key,
+ data: null
+ }
+ })
pinSet.storeItems(pins, (err, rootNode) => {
if (err) { return callback(err) }
- const opts = { cid: new CID(rootNode.multihash), preload: false }
- dag.put(rootNode, opts, (err, cid) => {
- if (err) { return callback(err) }
- callback(null, rootNode)
+
+ dag.put(rootNode, {
+ version: 0,
+ format: 'dag-pb',
+ hashAlg: 'sha2-256',
+ preload: false
+ }, (err, cid) => {
+ if (err) { return callback(err, cid) }
+ callback(null, { node: rootNode, cid })
})
})
},
@@ -118,12 +139,14 @@ exports = module.exports = function (dag) {
if (pins.length <= maxItems) {
const nodes = pins
- .map(item => ({
- link: new DAGLink('', 1, item.key),
- data: item.data || Buffer.alloc(0)
- }))
+ .map(item => {
+ return ({
+ link: new DAGLink('', 1, item.key),
+ data: item.data || Buffer.alloc(0)
+ })
+ })
// sorting makes any ordering of `pins` produce the same DAGNode
- .sort((a, b) => Buffer.compare(a.link.multihash, b.link.multihash))
+ .sort((a, b) => Buffer.compare(a.link.cid.buffer, b.link.cid.buffer))
const rootLinks = fanoutLinks.concat(nodes.map(item => item.link))
const rootData = Buffer.concat(
@@ -169,10 +192,16 @@ exports = module.exports = function (dag) {
function storeChild (err, child, binIdx, cb) {
if (err) { return cb(err) }
- const opts = { cid: new CID(child.multihash), preload: false }
- dag.put(child, opts, err => {
+ const opts = {
+ version: 0,
+ hashAlg: 'sha2-256',
+ format: 'dag-pb',
+ preload: false
+ }
+
+ dag.put(child, opts, (err, cid) => {
if (err) { return cb(err) }
- fanoutLinks[binIdx] = new DAGLink('', child.size, child.multihash)
+ fanoutLinks[binIdx] = new DAGLink('', child.size, cid)
cb(null)
})
}
@@ -185,10 +214,10 @@ exports = module.exports = function (dag) {
return callback(new Error('No link found with name ' + name))
}
- dag.get(link.multihash, '', { preload: false }, (err, res) => {
+ dag.get(link.cid, '', { preload: false }, (err, res) => {
if (err) { return callback(err) }
const keys = []
- const step = link => keys.push(link.multihash)
+ const step = link => keys.push(link.cid.buffer)
pinSet.walkItems(res.value, step, err => {
if (err) { return callback(err) }
return callback(null, keys)
@@ -208,7 +237,7 @@ exports = module.exports = function (dag) {
if (idx < pbh.header.fanout) {
// the first pbh.header.fanout links are fanout bins
// if a fanout bin is not 'empty', dig into and walk its DAGLinks
- const linkHash = link.multihash
+ const linkHash = link.cid.buffer
if (!emptyKey.equals(linkHash)) {
// walk the links of this fanout bin
diff --git a/src/core/components/pin.js b/src/core/components/pin.js
index 39e8765ca0..e89c9f3fad 100644
--- a/src/core/components/pin.js
+++ b/src/core/components/pin.js
@@ -2,7 +2,7 @@
'use strict'
const promisify = require('promisify-es6')
-const { DAGNode, DAGLink } = require('ipld-dag-pb')
+const { DAGNode, DAGLink, util } = require('ipld-dag-pb')
const CID = require('cids')
const map = require('async/map')
const mapSeries = require('async/mapSeries')
@@ -10,11 +10,10 @@ const series = require('async/series')
const parallel = require('async/parallel')
const eachLimit = require('async/eachLimit')
const waterfall = require('async/waterfall')
-const someLimit = require('async/someLimit')
+const detectLimit = require('async/detectLimit')
const setImmediate = require('async/setImmediate')
const { Key } = require('interface-datastore')
const errCode = require('err-code')
-
const createPinSet = require('./pin-set')
const { resolvePath } = require('../utils')
@@ -54,15 +53,23 @@ module.exports = (self) => {
const indirectKeys = new Set()
eachLimit(recursiveKeys(), concurrencyLimit, (multihash, cb) => {
dag._getRecursive(multihash, (err, nodes) => {
- if (err) { return cb(err) }
+ if (err) {
+ return cb(err)
+ }
- nodes
- .map(({ multihash }) => toB58String(multihash))
- // recursive pins pre-empt indirect pins
- .filter(key => !recursivePins.has(key))
- .forEach(key => indirectKeys.add(key))
+ map(nodes, (node, cb) => util.cid(node, cb), (err, cids) => {
+ if (err) {
+ return cb(err)
+ }
+
+ cids
+ .map(cids => cids.toBaseEncodedString())
+ // recursive pins pre-empt indirect pins
+ .filter(key => !recursivePins.has(key))
+ .forEach(key => indirectKeys.add(key))
- cb()
+ cb()
+ })
})
}, (err) => {
if (err) { return callback(err) }
@@ -79,28 +86,43 @@ module.exports = (self) => {
// create a DAGLink to the node with direct pins
cb => waterfall([
cb => pinset.storeSet(directKeys(), cb),
- (node, cb) => DAGLink.create(types.direct, node.size, node.multihash, cb),
+ ({ node, cid }, cb) => DAGLink.create(types.direct, node.size, cid, cb),
(link, cb) => { dLink = link; cb(null) }
], cb),
// create a DAGLink to the node with recursive pins
cb => waterfall([
cb => pinset.storeSet(recursiveKeys(), cb),
- (node, cb) => DAGLink.create(types.recursive, node.size, node.multihash, cb),
+ ({ node, cid }, cb) => DAGLink.create(types.recursive, node.size, cid, cb),
(link, cb) => { rLink = link; cb(null) }
], cb),
// the pin-set nodes link to a special 'empty' node, so make sure it exists
cb => DAGNode.create(Buffer.alloc(0), (err, empty) => {
if (err) { return cb(err) }
- dag.put(empty, { cid: new CID(empty.multihash), preload: false }, cb)
+ dag.put(empty, {
+ version: 0,
+ hashAlg: 'sha2-256',
+ format: 'dag-pb',
+ preload: false
+ }, cb)
}),
// create a root node with DAGLinks to the direct and recursive DAGs
cb => DAGNode.create(Buffer.alloc(0), [dLink, rLink], (err, node) => {
if (err) { return cb(err) }
root = node
- dag.put(root, { cid: new CID(root.multihash), preload: false }, cb)
+ dag.put(root, {
+ version: 0,
+ hashAlg: 'sha2-256',
+ format: 'dag-pb',
+ preload: false
+ }, (err, cid) => {
+ if (!err) {
+ root.multihash = cid.buffer
+ }
+ cb(err)
+ })
}),
// hack for CLI tests
@@ -261,34 +283,35 @@ module.exports = (self) => {
if (paths) {
// check the pinned state of specific hashes
- resolvePath(self.object, paths, (err, mhs) => {
- if (err) { return callback(err) }
+ waterfall([
+ (cb) => resolvePath(self.object, paths, cb),
+ (hashes, cb) => mapSeries(hashes, (hash, done) => pin._isPinnedWithType(hash, types.all, done), cb),
+ (results, cb) => {
+ results = results
+ .filter(result => result.pinned)
+ .map(({ key, reason }) => {
+ switch (reason) {
+ case types.direct:
+ case types.recursive:
+ return {
+ hash: key,
+ type: reason
+ }
+ default:
+ return {
+ hash: key,
+ type: `${types.indirect} through ${reason}`
+ }
+ }
+ })
- mapSeries(mhs, (multihash, cb) => {
- pin._isPinnedWithType(multihash, types.all, (err, res) => {
- if (err) { return cb(err) }
- const { pinned, reason } = res
- const key = toB58String(multihash)
- if (!pinned) {
- return cb(new Error(`Path ${key} is not pinned`))
- }
-
- switch (reason) {
- case types.direct:
- case types.recursive:
- return cb(null, {
- hash: key,
- type: reason
- })
- default:
- return cb(null, {
- hash: key,
- type: `${types.indirect} through ${reason}`
- })
- }
- })
- }, callback)
- })
+ if (!results.length) {
+ return cb(new Error(`Path is not pinned`))
+ }
+
+ cb(null, results)
+ }
+ ], callback)
} else {
// show all pinned items of type
let pins = []
@@ -333,40 +356,53 @@ module.exports = (self) => {
_isPinnedWithType: promisify((multihash, type, callback) => {
const key = toB58String(multihash)
const { recursive, direct, all } = types
+
// recursive
if ((type === recursive || type === all) && recursivePins.has(key)) {
- return callback(null, { pinned: true, reason: recursive })
+ return callback(null, {
+ key,
+ pinned: true,
+ reason: recursive
+ })
}
- if ((type === recursive)) {
- return callback(null, { pinned: false })
+
+ if (type === recursive) {
+ return callback(null, {
+ key,
+ pinned: false
+ })
}
+
// direct
if ((type === direct || type === all) && directPins.has(key)) {
- return callback(null, { pinned: true, reason: direct })
+ return callback(null, {
+ key,
+ pinned: true,
+ reason: direct
+ })
}
- if ((type === direct)) {
- return callback(null, { pinned: false })
+
+ if (type === direct) {
+ return callback(null, {
+ key,
+ pinned: false
+ })
}
// indirect (default)
// check each recursive key to see if multihash is under it
// arbitrary limit, enables handling 1000s of pins.
- let foundPin
- someLimit(recursiveKeys(), concurrencyLimit, (key, cb) => {
- dag.get(new CID(key), (err, res) => {
- if (err) { return cb(err) }
-
- pinset.hasDescendant(res.value, multihash, (err, has) => {
- if (has) {
- foundPin = toB58String(res.value.multihash)
- }
- cb(err, has)
- })
- })
- }, (err, found) => {
- if (err) { return callback(err) }
- return callback(null, { pinned: found, reason: foundPin })
- })
+ detectLimit(recursiveKeys().map(key => new CID(key)), concurrencyLimit, (cid, cb) => {
+ waterfall([
+ (done) => dag.get(cid, '', { preload: false }, done),
+ (result, done) => done(null, result.value),
+ (node, done) => pinset.hasDescendant(node, key, done)
+ ], cb)
+ }, (err, cid) => callback(err, {
+ key,
+ pinned: Boolean(cid),
+ reason: cid
+ }))
}),
_load: promisify(callback => {
@@ -376,7 +412,9 @@ module.exports = (self) => {
(_, cb) => repo.datastore.has(pinDataStoreKey, cb),
(has, cb) => has ? cb() : cb(new Error('No pins to load')),
(cb) => repo.datastore.get(pinDataStoreKey, cb),
- (mh, cb) => dag.get(new CID(mh), '', { preload: false }, cb)
+ (mh, cb) => {
+ dag.get(new CID(mh), '', { preload: false }, cb)
+ }
], (err, pinRoot) => {
if (err) {
if (err.message === 'No pins to load') {
diff --git a/src/core/index.js b/src/core/index.js
index 93ff3431d8..0d53a21342 100644
--- a/src/core/index.js
+++ b/src/core/index.js
@@ -185,7 +185,11 @@ class IPFS extends EventEmitter {
}
// ipfs.files
- const mfs = components.mfs(this)
+ const mfs = components.mfs({
+ ipld: this._ipld,
+ repo: this._repo,
+ repoOwner: (this._options.mfs && this._options.mfs.repoOwner) || true
+ })
Object.keys(mfs).forEach(key => {
this.files[key] = mfs[key]
diff --git a/src/core/utils.js b/src/core/utils.js
index 152f582202..24fcd68edb 100644
--- a/src/core/utils.js
+++ b/src/core/utils.js
@@ -105,31 +105,33 @@ const resolvePath = promisify(function (objectAPI, ipfsPaths, callback) {
const rootHash = new CID(parsedPath.hash)
const rootLinks = parsedPath.links
+
if (!rootLinks.length) {
return cb(null, rootHash.buffer)
}
- objectAPI.get(rootHash.multihash, follow.bind(null, rootLinks))
+ objectAPI.get(rootHash, follow.bind(null, rootHash, rootLinks))
// recursively follow named links to the target node
- function follow (links, err, obj) {
+ function follow (cid, links, err, obj) {
if (err) {
return cb(err)
}
+
if (!links.length) {
// done tracing, obj is the target node
- return cb(null, obj.multihash)
+ return cb(null, cid.buffer)
}
const linkName = links[0]
const nextObj = obj.links.find(link => link.name === linkName)
if (!nextObj) {
return cb(new Error(
- `no link named "${linkName}" under ${obj.toJSON().multihash}`
+ `no link named "${linkName}" under ${cid.toBaseEncodedString()}`
))
}
- objectAPI.get(nextObj.multihash, follow.bind(null, links.slice(1)))
+ objectAPI.get(nextObj.cid, follow.bind(null, nextObj.cid, links.slice(1)))
}
}, callback)
})
diff --git a/src/http/api/resources/object.js b/src/http/api/resources/object.js
index dc68c71251..5d970dc1ae 100644
--- a/src/http/api/resources/object.js
+++ b/src/http/api/resources/object.js
@@ -6,7 +6,6 @@ const dagPB = require('ipld-dag-pb')
const DAGLink = dagPB.DAGLink
const DAGNode = dagPB.DAGNode
const waterfall = require('async/waterfall')
-const series = require('async/series')
const debug = require('debug')
const log = debug('jsipfs:http-api:object')
log.error = debug('jsipfs:http-api:object:error')
@@ -36,7 +35,10 @@ exports.new = (request, reply) => {
const ipfs = request.server.app.ipfs
const template = request.query.arg
- ipfs.object.new(template, (err, node) => {
+ waterfall([
+ (cb) => ipfs.object.new(template, cb),
+ (node, cb) => dagPB.util.cid(node, (err, cid) => cb(err, { node, cid }))
+ ], (err, results) => {
if (err) {
log.error(err)
return reply({
@@ -45,17 +47,17 @@ exports.new = (request, reply) => {
}).code(500)
}
- const nodeJSON = node.toJSON()
+ const nodeJSON = results.node.toJSON()
const answer = {
Data: nodeJSON.data,
- Hash: nodeJSON.multihash,
+ Hash: results.cid.toBaseEncodedString(),
Size: nodeJSON.size,
Links: nodeJSON.links.map((l) => {
return {
Name: l.name,
Size: l.size,
- Hash: l.multihash
+ Hash: l.cid
}
})
}
@@ -74,7 +76,10 @@ exports.get = {
const enc = request.query.enc || 'base58'
const ipfs = request.server.app.ipfs
- ipfs.object.get(key, { enc: enc }, (err, node) => {
+ waterfall([
+ (cb) => ipfs.object.get(key, { enc: enc }, cb),
+ (node, cb) => dagPB.util.cid(node, (err, cid) => cb(err, { node, cid }))
+ ], (err, results) => {
if (err) {
log.error(err)
return reply({
@@ -83,21 +88,21 @@ exports.get = {
}).code(500)
}
- const nodeJSON = node.toJSON()
+ const nodeJSON = results.node.toJSON()
- if (Buffer.isBuffer(node.data)) {
- nodeJSON.data = node.data.toString(request.query['data-encoding'] || undefined)
+ if (Buffer.isBuffer(results.node.data)) {
+ nodeJSON.data = results.node.data.toString(request.query['data-encoding'] || undefined)
}
const answer = {
Data: nodeJSON.data,
- Hash: nodeJSON.multihash,
+ Hash: results.cid.toBaseEncodedString(),
Size: nodeJSON.size,
Links: nodeJSON.links.map((l) => {
return {
Name: l.name,
Size: l.size,
- Hash: l.multihash
+ Hash: l.cid
}
})
}
@@ -128,25 +133,28 @@ exports.put = {
// TODO fix: stream is not emitting the 'end' event
stream.on('data', (data) => {
if (enc === 'protobuf') {
- dagPB.util.deserialize(data, (err, node) => {
+ waterfall([
+ (cb) => dagPB.util.deserialize(data, cb),
+ (node, cb) => dagPB.util.cid(node, (err, cid) => cb(err, { node, cid }))
+ ], (err, results) => {
if (err) {
return reply({
- Message: 'Failed to receive protobuf encoded: ' + err,
+ Message: 'Failed to put object: ' + err,
Code: 0
}).code(500).takeover()
}
- const nodeJSON = node.toJSON()
+ const nodeJSON = results.node.toJSON()
const answer = {
Data: nodeJSON.data,
- Hash: nodeJSON.multihash,
+ Hash: results.cid.toBaseEncodedString(),
Size: nodeJSON.size,
Links: nodeJSON.links.map((l) => {
return {
Name: l.name,
Size: l.size,
- Hash: l.multihash
+ Hash: l.cid
}
})
}
@@ -190,18 +198,11 @@ exports.put = {
const ipfs = request.server.app.ipfs
let node = request.pre.args.node
- series([
- (cb) => {
- DAGNode.create(Buffer.from(node.Data), node.Links, (err, _node) => {
- if (err) {
- return cb(err)
- }
- node = _node
- cb()
- })
- },
- (cb) => ipfs.object.put(node, cb)
- ], (err) => {
+ waterfall([
+ (cb) => DAGNode.create(Buffer.from(node.Data), node.Links, cb),
+ (node, cb) => ipfs.object.put(node, cb),
+ (node, cb) => dagPB.util.cid(node, (err, cid) => cb(err, { cid, node }))
+ ], (err, results) => {
if (err) {
log.error(err)
@@ -211,17 +212,17 @@ exports.put = {
}).code(500)
}
- const nodeJSON = node.toJSON()
+ const nodeJSON = results.node.toJSON()
const answer = {
Data: nodeJSON.data,
- Hash: nodeJSON.multihash,
+ Hash: results.cid.toBaseEncodedString(),
Size: nodeJSON.size,
Links: nodeJSON.links.map((l) => {
return {
Name: l.name,
Size: l.size,
- Hash: l.multihash
+ Hash: l.cid
}
})
}
@@ -244,7 +245,7 @@ exports.stat = {
if (err) {
log.error(err)
return reply({
- Message: 'Failed to get object: ' + err,
+ Message: 'Failed to stat object: ' + err,
Code: 0
}).code(500)
}
@@ -267,7 +268,7 @@ exports.data = {
if (err) {
log.error(err)
return reply({
- Message: 'Failed to get object: ' + err,
+ Message: 'Failed to get object data: ' + err,
Code: 0
}).code(500)
}
@@ -286,24 +287,27 @@ exports.links = {
const key = request.pre.args.key
const ipfs = request.server.app.ipfs
- ipfs.object.get(key, (err, node) => {
+ waterfall([
+ (cb) => ipfs.object.get(key, cb),
+ (node, cb) => dagPB.util.cid(node, (err, cid) => cb(err, { node, cid }))
+ ], (err, results) => {
if (err) {
log.error(err)
return reply({
- Message: 'Failed to get object: ' + err,
+ Message: 'Failed to get object links: ' + err,
Code: 0
}).code(500)
}
- const nodeJSON = node.toJSON()
+ const nodeJSON = results.node.toJSON()
return reply({
- Hash: nodeJSON.multihash,
+ Hash: results.cid.toBaseEncodedString(),
Links: nodeJSON.links.map((l) => {
return {
Name: l.name,
Size: l.size,
- Hash: l.multihash
+ Hash: l.cid
}
})
})
@@ -360,27 +364,30 @@ exports.patchAppendData = {
const data = request.pre.args.data
const ipfs = request.server.app.ipfs
- ipfs.object.patch.appendData(key, data, (err, node) => {
+ waterfall([
+ (cb) => ipfs.object.patch.appendData(key, data, cb),
+ (node, cb) => dagPB.util.cid(node, (err, cid) => cb(err, { node, cid }))
+ ], (err, results) => {
if (err) {
log.error(err)
return reply({
- Message: 'Failed to apend data to object: ' + err,
+ Message: 'Failed to append data to object: ' + err,
Code: 0
}).code(500)
}
- const nodeJSON = node.toJSON()
+ const nodeJSON = results.node.toJSON()
const answer = {
Data: nodeJSON.data,
- Hash: nodeJSON.multihash,
+ Hash: results.cid.toBaseEncodedString(),
Size: nodeJSON.size,
Links: nodeJSON.links.map((l) => {
return {
Name: l.name,
Size: l.size,
- Hash: l.multihash
+ Hash: l.cid
}
})
}
@@ -400,20 +407,23 @@ exports.patchSetData = {
const data = request.pre.args.data
const ipfs = request.server.app.ipfs
- ipfs.object.patch.setData(key, data, (err, node) => {
+ waterfall([
+ (cb) => ipfs.object.patch.setData(key, data, cb),
+ (node, cb) => dagPB.util.cid(node, (err, cid) => cb(err, { node, cid }))
+ ], (err, results) => {
if (err) {
log.error(err)
return reply({
- Message: 'Failed to apend data to object: ' + err,
+ Message: 'Failed to set data on object: ' + err,
Code: 0
}).code(500)
}
- const nodeJSON = node.toJSON()
+ const nodeJSON = results.node.toJSON()
return reply({
- Hash: nodeJSON.multihash,
+ Hash: results.cid.toBaseEncodedString(),
Links: nodeJSON.links
})
})
@@ -464,50 +474,35 @@ exports.patchAddLink = {
const ref = request.pre.args.ref
const ipfs = request.server.app.ipfs
- ipfs.object.get(ref, (err, linkedObj) => {
+ waterfall([
+ (cb) => ipfs.object.get(ref, cb),
+ (node, cb) => ipfs.object.patch.addLink(root, new DAGLink(name, node.size, ref), cb),
+ (node, cb) => dagPB.util.cid(node, (err, cid) => cb(err, { node, cid }))
+ ], (err, results) => {
if (err) {
log.error(err)
return reply({
- Message: 'Failed to get linked object: ' + err,
+ Message: 'Failed to add link to object: ' + err,
Code: 0
}).code(500)
}
- waterfall([
- (cb) => {
- const link = new DAGLink(
- name,
- linkedObj.size,
- linkedObj.multihash)
- cb(null, link)
- },
- (link, cb) => ipfs.object.patch.addLink(root, link, cb)
- ], (err, node) => {
- if (err) {
- log.error(err)
- return reply({
- Message: 'Failed to get linked object: ' + err,
- Code: 0
- }).code(500)
- }
+ const nodeJSON = results.node.toJSON()
- const nodeJSON = node.toJSON()
-
- const answer = {
- Data: nodeJSON.data,
- Hash: nodeJSON.multihash,
- Size: nodeJSON.size,
- Links: nodeJSON.links.map((l) => {
- return {
- Name: l.name,
- Size: l.size,
- Hash: l.multihash
- }
- })
- }
+ const answer = {
+ Data: nodeJSON.data,
+ Hash: results.cid.toBaseEncodedString(),
+ Size: nodeJSON.size,
+ Links: nodeJSON.links.map((l) => {
+ return {
+ Name: l.name,
+ Size: l.size,
+ Hash: l.cid
+ }
+ })
+ }
- return reply(answer)
- })
+ return reply(answer)
})
}
}
@@ -547,7 +542,10 @@ exports.patchRmLink = {
const link = request.pre.args.link
const ipfs = request.server.app.ipfs
- ipfs.object.patch.rmLink(root, { name: link }, (err, node) => {
+ waterfall([
+ (cb) => ipfs.object.patch.rmLink(root, { name: link }, cb),
+ (node, cb) => dagPB.util.cid(node, (err, cid) => cb(err, { node, cid }))
+ ], (err, results) => {
if (err) {
log.error(err)
return reply({
@@ -556,17 +554,17 @@ exports.patchRmLink = {
}).code(500)
}
- const nodeJSON = node.toJSON()
+ const nodeJSON = results.node.toJSON()
const answer = {
Data: nodeJSON.data,
- Hash: nodeJSON.multihash,
+ Hash: results.cid.toBaseEncodedString(),
Size: nodeJSON.size,
Links: nodeJSON.links.map((l) => {
return {
Name: l.name,
Size: l.size,
- Hash: l.multihash
+ Hash: l.cid
}
})
}
diff --git a/test/core/object.spec.js b/test/core/object.spec.js
index 6dc0c10fb7..ab1d0eea85 100644
--- a/test/core/object.spec.js
+++ b/test/core/object.spec.js
@@ -9,7 +9,13 @@ chai.use(dirtyChai)
const hat = require('hat')
const IPFSFactory = require('ipfsd-ctl')
const auto = require('async/auto')
+const waterfall = require('async/waterfall')
const IPFS = require('../../src/core')
+const {
+ util: {
+ cid
+ }
+} = require('ipld-dag-pb')
describe('object', () => {
let ipfsd, ipfs
@@ -52,23 +58,19 @@ describe('object', () => {
ipfs.object.put(Buffer.from(hat()), (err, dagNode) => {
expect(err).to.not.exist()
- ipfs.object.get(dagNode.multihash, null, (err) => {
+ cid(dagNode, (err, result) => {
expect(err).to.not.exist()
- done()
+
+ ipfs.object.get(result, null, (err) => {
+ expect(err).to.not.exist()
+ done()
+ })
})
})
})
})
describe('put', () => {
- it('should callback with error for invalid CID input', (done) => {
- ipfs.object.put({ multihash: 'INVALID CID' }, (err) => {
- expect(err).to.exist()
- expect(err.code).to.equal('ERR_INVALID_CID')
- done()
- })
- })
-
it('should not error when passed null options', (done) => {
ipfs.object.put(Buffer.from(hat()), null, (err) => {
expect(err).to.not.exist()
@@ -80,18 +82,28 @@ describe('object', () => {
describe('patch.addLink', () => {
it('should not error when passed null options', (done) => {
auto({
- a: (cb) => ipfs.object.put(Buffer.from(hat()), cb),
- b: (cb) => ipfs.object.put(Buffer.from(hat()), cb)
- }, (err, nodes) => {
+ a: (cb) => {
+ waterfall([
+ (done) => ipfs.object.put(Buffer.from(hat()), done),
+ (node, done) => cid(node, (err, cid) => done(err, { node, cid }))
+ ], cb)
+ },
+ b: (cb) => {
+ waterfall([
+ (done) => ipfs.object.put(Buffer.from(hat()), done),
+ (node, done) => cid(node, (err, cid) => done(err, { node, cid }))
+ ], cb)
+ }
+ }, (err, results) => {
expect(err).to.not.exist()
const link = {
- name: nodes.b.name,
- multihash: nodes.b.multihash,
- size: nodes.b.size
+ name: 'link-name',
+ cid: results.b.cid,
+ size: results.b.node.size
}
- ipfs.object.patch.addLink(nodes.a.multihash, link, null, (err) => {
+ ipfs.object.patch.addLink(results.a.cid, link, null, (err) => {
expect(err).to.not.exist()
done()
})
@@ -102,20 +114,33 @@ describe('object', () => {
describe('patch.rmLink', () => {
it('should not error when passed null options', (done) => {
auto({
- nodeA: (cb) => ipfs.object.put(Buffer.from(hat()), cb),
- nodeB: (cb) => ipfs.object.put(Buffer.from(hat()), cb),
+ nodeA: (cb) => {
+ waterfall([
+ (done) => ipfs.object.put(Buffer.from(hat()), done),
+ (node, done) => cid(node, (err, cid) => done(err, { node, cid }))
+ ], cb)
+ },
+ nodeB: (cb) => {
+ waterfall([
+ (done) => ipfs.object.put(Buffer.from(hat()), done),
+ (node, done) => cid(node, (err, cid) => done(err, { node, cid }))
+ ], cb)
+ },
nodeAWithLink: ['nodeA', 'nodeB', (res, cb) => {
- ipfs.object.patch.addLink(res.nodeA.multihash, {
- name: res.nodeB.name,
- multihash: res.nodeB.multihash,
- size: res.nodeB.size
- }, cb)
+ waterfall([
+ (done) => ipfs.object.patch.addLink(res.nodeA.cid, {
+ name: res.nodeB.node.name,
+ multihash: res.nodeB.cid,
+ size: res.nodeB.node.size
+ }, done),
+ (node, done) => cid(node, (err, cid) => done(err, { node, cid }))
+ ], cb)
}]
}, (err, res) => {
expect(err).to.not.exist()
- const link = res.nodeAWithLink.links[0]
- ipfs.object.patch.rmLink(res.nodeAWithLink.multihash, link, null, (err) => {
+ const link = res.nodeAWithLink.node.links[0]
+ ipfs.object.patch.rmLink(res.nodeAWithLink.cid, link, null, (err) => {
expect(err).to.not.exist()
done()
})
@@ -128,9 +153,13 @@ describe('object', () => {
ipfs.object.put(Buffer.from(hat()), null, (err, dagNode) => {
expect(err).to.not.exist()
- ipfs.object.patch.appendData(dagNode.multihash, Buffer.from(hat()), null, (err) => {
+ cid(dagNode, (err, result) => {
expect(err).to.not.exist()
- done()
+
+ ipfs.object.patch.appendData(result, Buffer.from(hat()), null, (err) => {
+ expect(err).to.not.exist()
+ done()
+ })
})
})
})
@@ -141,9 +170,13 @@ describe('object', () => {
ipfs.object.put(Buffer.from(hat()), null, (err, dagNode) => {
expect(err).to.not.exist()
- ipfs.object.patch.setData(dagNode.multihash, Buffer.from(hat()), null, (err) => {
+ cid(dagNode, (err, result) => {
expect(err).to.not.exist()
- done()
+
+ ipfs.object.patch.setData(result, Buffer.from(hat()), null, (err) => {
+ expect(err).to.not.exist()
+ done()
+ })
})
})
})
diff --git a/test/core/pin-set.js b/test/core/pin-set.js
index 5207bd48ad..ece713d61c 100644
--- a/test/core/pin-set.js
+++ b/test/core/pin-set.js
@@ -9,8 +9,12 @@ chai.use(dirtyChai)
const parallelLimit = require('async/parallelLimit')
const series = require('async/series')
-const { fromB58String } = require('multihashes')
-const { DAGNode } = require('ipld-dag-pb')
+const {
+ util: {
+ cid
+ },
+ DAGNode
+} = require('ipld-dag-pb')
const CID = require('cids')
const IPFS = require('../../src/core')
@@ -31,7 +35,7 @@ function createNodes (num, callback) {
const items = []
for (let i = 0; i < num; i++) {
items.push(cb =>
- createNode(String(i), (err, node) => cb(err, node.multihash))
+ createNode(String(i), (err, res) => cb(err, res.cid.toBaseEncodedString()))
)
}
@@ -44,7 +48,18 @@ function createNode (data, links = [], callback) {
links = []
}
- DAGNode.create(data, links, callback)
+ DAGNode.create(data, links, (err, node) => {
+ if (err) {
+ return callback(err)
+ }
+
+ cid(node, (err, result) => {
+ callback(err, {
+ node,
+ cid: result
+ })
+ })
+ })
}
describe('pinSet', function () {
@@ -73,17 +88,16 @@ describe('pinSet', function () {
it('generates a root node with links and hash', function (done) {
const expectedRootHash = 'QmcLiSTjcjoVC2iuGbk6A2PVcWV3WvjZT4jxfNis1vjyrR'
- createNode('data', (err, node) => {
+ createNode('data', (err, result) => {
expect(err).to.not.exist()
- const nodeHash = node.multihash
+ const nodeHash = result.cid.toBaseEncodedString()
pinSet.storeSet([nodeHash], (err, rootNode) => {
expect(err).to.not.exist()
- const node = rootNode.toJSON()
- expect(node.multihash).to.eql(expectedRootHash)
- expect(node.links).to.have.length(defaultFanout + 1)
+ expect(rootNode.cid.toBaseEncodedString()).to.eql(expectedRootHash)
+ expect(rootNode.node.links).to.have.length(defaultFanout + 1)
- const lastLink = node.links[node.links.length - 1]
- const mhash = fromB58String(lastLink.multihash)
+ const lastLink = rootNode.node.links[rootNode.node.links.length - 1]
+ const mhash = lastLink.cid.toBaseEncodedString()
expect(mhash).to.eql(nodeHash)
done()
})
@@ -96,23 +110,22 @@ describe('pinSet', function () {
this.timeout(19 * 1000)
const expectedHash = 'QmbvhSy83QWfgLXDpYjDmLWBFfGc8utoqjcXHyj3gYuasT'
const count = maxItems + 1
- createNodes(count, (err, nodes) => {
+ createNodes(count, (err, cids) => {
expect(err).to.not.exist()
- pinSet.storeSet(nodes, (err, node) => {
+ pinSet.storeSet(cids, (err, result) => {
expect(err).to.not.exist()
- node = node.toJSON()
- expect(node.size).to.eql(3184696)
- expect(node.links).to.have.length(defaultFanout)
- expect(node.multihash).to.eql(expectedHash)
+ expect(result.node.size).to.eql(3184696)
+ expect(result.node.links).to.have.length(defaultFanout)
+ expect(result.cid.toBaseEncodedString()).to.eql(expectedHash)
- pinSet.loadSet(node, '', (err, loaded) => {
+ pinSet.loadSet(result.node, '', (err, loaded) => {
expect(err).to.not.exist()
expect(loaded).to.have.length(30)
const hashes = loaded.map(l => new CID(l).toBaseEncodedString())
// just check the first node, assume all are children if successful
- pinSet.hasDescendant(node, hashes[0], (err, has) => {
+ pinSet.hasDescendant(result.node, hashes[0], (err, has) => {
expect(err).to.not.exist()
expect(has).to.eql(true)
done()
@@ -174,10 +187,10 @@ describe('pinSet', function () {
createNodes(defaultFanout, (err, nodes) => {
expect(err).to.not.exist()
- pinSet.storeSet(nodes, (err, node) => {
+ pinSet.storeSet(nodes, (err, result) => {
expect(err).to.not.exist()
- pinSet.walkItems(node, walker, err => {
+ pinSet.walkItems(result.node, walker, err => {
expect(err).to.not.exist()
expect(seen).to.have.length(defaultFanout)
expect(seen[0].idx).to.eql(defaultFanout)
diff --git a/test/core/pin.js b/test/core/pin.js
index f99ded2548..6cfb57a49f 100644
--- a/test/core/pin.js
+++ b/test/core/pin.js
@@ -126,7 +126,7 @@ describe('pin', function () {
return pin._isPinnedWithType(pins.mercuryWiki, pinTypes.indirect)
.then(result => {
expect(result.pinned).to.eql(true)
- expect(result.reason).to.eql(pins.root)
+ expect(result.reason.toBaseEncodedString()).to.eql(pins.root)
})
})
diff --git a/test/core/preload.spec.js b/test/core/preload.spec.js
index 37528b81fe..8af0743880 100644
--- a/test/core/preload.spec.js
+++ b/test/core/preload.spec.js
@@ -3,13 +3,17 @@
'use strict'
const hat = require('hat')
-const CID = require('cids')
const parallel = require('async/parallel')
const waterfall = require('async/waterfall')
const chai = require('chai')
const dirtyChai = require('dirty-chai')
const expect = chai.expect
chai.use(dirtyChai)
+const {
+ util: {
+ cid
+ }
+} = require('ipld-dag-pb')
const MockPreloadNode = require('../utils/mock-preload-node')
const IPFS = require('../../src')
@@ -157,8 +161,11 @@ describe('preload', () => {
ipfs.object.new((err, node) => {
expect(err).to.not.exist()
- const cid = new CID(node.multihash)
- MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done)
+ cid(node, (err, result) => {
+ expect(err).to.not.exist()
+
+ MockPreloadNode.waitForCids(result.toBaseEncodedString(), done)
+ })
})
})
@@ -166,27 +173,43 @@ describe('preload', () => {
ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }, (err, node) => {
expect(err).to.not.exist()
- const cid = new CID(node.multihash)
- MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done)
+ cid(node, (err, result) => {
+ expect(err).to.not.exist()
+
+ MockPreloadNode.waitForCids(result.toBaseEncodedString(), done)
+ })
})
})
it('should preload content added with object.patch.addLink', (done) => {
parallel({
- parent: (cb) => ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }, cb),
- link: (cb) => ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }, cb)
- }, (err, nodes) => {
+ parent: (cb) => {
+ waterfall([
+ (done) => ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }, done),
+ (node, done) => cid(node, (err, cid) => done(err, { node, cid }))
+ ], cb)
+ },
+ link: (cb) => {
+ waterfall([
+ (done) => ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }, done),
+ (node, done) => cid(node, (err, cid) => done(err, { node, cid }))
+ ], cb)
+ }
+ }, (err, result) => {
expect(err).to.not.exist()
- ipfs.object.patch.addLink(nodes.parent.multihash, {
+ ipfs.object.patch.addLink(result.parent.cid, {
name: 'link',
- multihash: nodes.link.multihash,
- size: nodes.link.size
+ cid: result.link.cid,
+ size: result.link.node.size
}, (err, node) => {
expect(err).to.not.exist()
- const cid = new CID(node.multihash)
- MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done)
+ cid(node, (err, result) => {
+ expect(err).to.not.exist()
+
+ MockPreloadNode.waitForCids(result.toBaseEncodedString(), done)
+ })
})
})
})
@@ -194,24 +217,32 @@ describe('preload', () => {
it('should preload content added with object.patch.rmLink', (done) => {
waterfall([
(cb) => ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }, cb),
- (link, cb) => {
+ (node, cb) => cid(node, (err, cid) => cb(err, { node, cid })),
+ ({ node, cid }, cb) => {
ipfs.object.put({
Data: Buffer.from(hat()),
Links: [{
name: 'link',
- multihash: link.multihash,
- size: link.size
+ cid: cid,
+ size: node.size
}]
}, cb)
}
], (err, parent) => {
expect(err).to.not.exist()
- ipfs.object.patch.rmLink(parent.multihash, { name: 'link' }, (err, node) => {
+ cid(parent, (err, result) => {
expect(err).to.not.exist()
- const cid = new CID(node.multihash)
- MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done)
+ ipfs.object.patch.rmLink(result, { name: 'link' }, (err, node) => {
+ expect(err).to.not.exist()
+
+ cid(node, (err, result) => {
+ expect(err).to.not.exist()
+
+ MockPreloadNode.waitForCids(result.toBaseEncodedString(), done)
+ })
+ })
})
})
})
@@ -220,11 +251,18 @@ describe('preload', () => {
ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }, (err, node) => {
expect(err).to.not.exist()
- ipfs.object.patch.setData(node.multihash, Buffer.from(hat()), (err, node) => {
+ cid(node, (err, result) => {
expect(err).to.not.exist()
- const cid = new CID(node.multihash)
- MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done)
+ ipfs.object.patch.setData(result, Buffer.from(hat()), (err, node) => {
+ expect(err).to.not.exist()
+
+ cid(node, (err, result) => {
+ expect(err).to.not.exist()
+
+ MockPreloadNode.waitForCids(result.toBaseEncodedString(), done)
+ })
+ })
})
})
})
@@ -233,11 +271,18 @@ describe('preload', () => {
ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }, (err, node) => {
expect(err).to.not.exist()
- ipfs.object.patch.appendData(node.multihash, Buffer.from(hat()), (err, node) => {
+ cid(node, (err, result) => {
expect(err).to.not.exist()
- const cid = new CID(node.multihash)
- MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done)
+ ipfs.object.patch.appendData(result, Buffer.from(hat()), (err, node) => {
+ expect(err).to.not.exist()
+
+ cid(node, (err, result) => {
+ expect(err).to.not.exist()
+
+ MockPreloadNode.waitForCids(result.toBaseEncodedString(), done)
+ })
+ })
})
})
})
@@ -245,10 +290,15 @@ describe('preload', () => {
it('should preload content retrieved with object.get', (done) => {
ipfs.object.new(null, { preload: false }, (err, node) => {
expect(err).to.not.exist()
- ipfs.object.get(node.multihash, (err) => {
+
+ cid(node, (err, result) => {
expect(err).to.not.exist()
- const cid = new CID(node.multihash)
- MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done)
+
+ ipfs.object.get(result, (err) => {
+ expect(err).to.not.exist()
+
+ MockPreloadNode.waitForCids(result.toBaseEncodedString(), done)
+ })
})
})
})
diff --git a/test/http-api/object.js b/test/http-api/object.js
index a056ad95cd..39482e9961 100644
--- a/test/http-api/object.js
+++ b/test/http-api/object.js
@@ -45,10 +45,14 @@ describe('object endpoint', () => {
it('.new', (done) => {
ipfs.object.new(asJson((err, res) => {
expect(err).to.not.exist()
- expect(res.multihash)
- .to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n')
expect(res.links).to.be.eql([])
- done()
+
+ dagPB.util.cid(res, (err, cid) => {
+ expect(err).to.not.exist()
+ expect(cid.toBaseEncodedString())
+ .to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n')
+ done()
+ })
}))
})
@@ -91,10 +95,9 @@ describe('object endpoint', () => {
const filePath = fs.readFileSync('test/fixtures/test-data/node.json')
const expectedResult = {
data: Buffer.from('another'),
- multihash: 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm',
links: [{
name: 'some link',
- multihash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V',
+ cid: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V',
size: 8
}],
size: 68
@@ -103,7 +106,13 @@ describe('object endpoint', () => {
ipfs.object.put(filePath, { enc: 'json' }, asJson((err, res) => {
expect(err).to.not.exist()
expect(res).to.eql(expectedResult)
- done()
+
+ dagPB.util.cid(res, (err, cid) => {
+ expect(err).to.not.exist()
+ expect(cid.toBaseEncodedString())
+ .to.equal('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm')
+ done()
+ })
}))
})
})
@@ -179,7 +188,7 @@ describe('object endpoint', () => {
it('returns value', (done) => {
const expectedResult = {
name: 'some link',
- multihash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V',
+ cid: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V',
size: 8
}
@@ -213,7 +222,6 @@ describe('object endpoint', () => {
const filePath = 'test/fixtures/test-data/badnode.json'
const expectedResult = {
data: fs.readFileSync(filePath),
- multihash: 'QmfY37rjbPCZRnhvvJuQ46htW3VCAWziVB991P79h6WSv6',
links: [],
size: 19
}
@@ -221,7 +229,13 @@ describe('object endpoint', () => {
ipfs.object.patch.appendData(key, filePath, { enc: 'base58' }, asJson((err, res) => {
expect(err).to.not.exist()
expect(res).to.eql(expectedResult)
- done()
+
+ dagPB.util.cid(res, (err, cid) => {
+ expect(err).to.not.exist()
+ expect(cid.toBaseEncodedString())
+ .to.equal('QmfY37rjbPCZRnhvvJuQ46htW3VCAWziVB991P79h6WSv6')
+ done()
+ })
}))
})
})
@@ -248,7 +262,6 @@ describe('object endpoint', () => {
const filePath = 'test/fixtures/test-data/badnode.json'
const expectedResult = {
data: fs.readFileSync(filePath),
- multihash: 'QmfY37rjbPCZRnhvvJuQ46htW3VCAWziVB991P79h6WSv6',
links: [],
size: 19
}
@@ -256,7 +269,13 @@ describe('object endpoint', () => {
ipfs.object.patch.setData(key, filePath, { enc: 'base58' }, asJson((err, res) => {
expect(err).to.not.exist()
expect(res).to.eql(expectedResult)
- done()
+
+ dagPB.util.cid(res, (err, cid) => {
+ expect(err).to.not.exist()
+ expect(cid.toBaseEncodedString())
+ .to.equal('QmfY37rjbPCZRnhvvJuQ46htW3VCAWziVB991P79h6WSv6')
+ done()
+ })
}))
})
})
@@ -294,13 +313,18 @@ describe('object endpoint', () => {
const link = new DAGLink(name, 10, ref)
ipfs.object.patch.addLink(root, link, { enc: 'base58' }, asJson((err, res) => {
expect(err).not.to.exist()
- expect(res.multihash).to.equal('QmdVHE8fUD6FLNLugtNxqDFyhaCgdob372hs6BYEe75VAK')
expect(res.links[0]).to.eql({
name: 'foo',
- multihash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn',
+ cid: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn',
size: 4
})
- done()
+
+ dagPB.util.cid(res, (err, cid) => {
+ expect(err).to.not.exist()
+ expect(cid.toBaseEncodedString())
+ .to.equal('QmdVHE8fUD6FLNLugtNxqDFyhaCgdob372hs6BYEe75VAK')
+ done()
+ })
}))
})
})
From ccd0c16d32139f72bbdc1155a05cd5a6c5fa2595 Mon Sep 17 00:00:00 2001
From: David Dias
Date: Fri, 26 Oct 2018 14:53:13 +0200
Subject: [PATCH 02/11] docs: adjust README to new files API
---
README.md | 87 +++++++++++++++++++++++++++++--------------------------
1 file changed, 46 insertions(+), 41 deletions(-)
diff --git a/README.md b/README.md
index fd25760249..bf3750f73c 100644
--- a/README.md
+++ b/README.md
@@ -25,13 +25,13 @@
]
-### Project status
+### Project status - `Alpha`
We've come a long way, but this project is still in Alpha, lots of development is happening, API might change, beware of the Dragons 🐉..
**Want to get started?** Check our [examples folder](/examples) to learn how to spawn an IPFS node in Node.js and in the Browser.
-You can check the development status at the [Waffle Board](https://waffle.io/ipfs/js-ipfs).
+You can check the development status at the [Kanban Board](https://waffle.io/ipfs/js-ipfs).
[](https://waffle.io/ipfs/js-ipfs/metrics/throughput)
@@ -494,36 +494,35 @@ The core API is grouped into several areas:
#### Files
-- [files](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md)
- - [`ipfs.files.add(data, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesadd).
- - [`ipfs.files.addPullStream([options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesaddpullstream)
- - [`ipfs.files.addReadableStream([options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesaddreadablestream)
- - [`ipfs.files.cat(ipfsPath, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filescat).
- - [`ipfs.files.catPullStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filescatpullstream)
- - [`ipfs.files.catReadableStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filescatreadablestream)
- - [`ipfs.files.get(ipfsPath, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesget).
- - [`ipfs.files.getPullStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesgetpullstream)
- - [`ipfs.files.getReadableStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesgetreadablestream)
+- [Regular Files API](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md)
+ - [`ipfs.add(data, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#add)
+ - [`ipfs.addPullStream([options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#addpullstream)
+ - [`ipfs.addReadableStream([options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#addreadablestream)
+ - [`ipfs.addFromStream(stream, [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#addfromstream)
+ - [`ipfs.addFromFs(path, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#addfromfs)
+ - [`ipfs.addFromUrl(url, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#addfromurl)
+ - [`ipfs.cat(ipfsPath, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#cat)
+ - [`ipfs.catPullStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#catpullstream)
+ - [`ipfs.catReadableStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#catreadablestream)
+ - [`ipfs.get(ipfsPath, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#get)
+ - [`ipfs.getPullStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#getpullstream)
+ - [`ipfs.getReadableStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#getreadablestream)
- [`ipfs.ls(ipfsPath, [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#ls)
- [`ipfs.lsPullStream(ipfsPath)`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#lspullstream)
- [`ipfs.lsReadableStream(ipfsPath)`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#lsreadablestream)
- - [MFS (mutable file system) specific](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#mutable-file-system)
- - [`ipfs.files.cp([from, to], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filescp)
- - [`ipfs.files.flush([path], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesflush)
- - [`ipfs.files.ls([path], [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesls)
- - [`ipfs.files.mkdir(path, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesmkdir)
- - [`ipfs.files.mv([from, to], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesmv)
- - [`ipfs.files.read(path, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesread)
- - [`ipfs.files.readPullStream(path, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesreadpullstream)
- - [`ipfs.files.readReadableStream(path, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesreadreadablestream)
- - [`ipfs.files.rm(path, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesrm)
- - [`ipfs.files.stat(path, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesstat)
- - [`ipfs.files.write(path, content, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#fileswrite)
+- [MFS (mutable file system) specific](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#mutable-file-system)
+ - [`ipfs.files.cp([from, to], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filescp)
+ - [`ipfs.files.flush([path], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesflush)
+ - [`ipfs.files.ls([path], [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesls)
+ - [`ipfs.files.mkdir(path, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesmkdir)
+ - [`ipfs.files.mv([from, to], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesmv)
+ - [`ipfs.files.read(path, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesread)
+ - [`ipfs.files.readPullStream(path, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesreadpullstream)
+ - [`ipfs.files.readReadableStream(path, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesreadreadablestream)
+ - [`ipfs.files.rm(path, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesrm)
+ - [`ipfs.files.stat(path, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesstat)
+ - [`ipfs.files.write(path, content, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#fileswrite)
-- [block](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/BLOCK.md)
- - [`ipfs.block.get(cid, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/BLOCK.md#blockget)
- - [`ipfs.block.put(block, cid, [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/BLOCK.md#blockput)
- - [`ipfs.block.stat(cid, [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/BLOCK.md#blockstat)
#### Graph
@@ -532,7 +531,12 @@ The core API is grouped into several areas:
- [`ipfs.dag.get(cid, [path], [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/DAG.md#dagget)
- [`ipfs.dag.tree(cid, [path], [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/DAG.md#dagtree)
-- [object](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/OBJECT.md)
+- [pin](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/PIN.md)
+ - [`ipfs.pin.add(hash, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/PIN.md#pinadd)
+ - [`ipfs.pin.ls([hash], [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/PIN.md#pinls)
+ - [`ipfs.pin.rm(hash, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/PIN.md#pinrm)
+
+- [object (legacy)](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/OBJECT.md)
- [`ipfs.object.new([template], [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/OBJECT.md#objectnew)
- [`ipfs.object.put(obj, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/OBJECT.md#objectput)
- [`ipfs.object.get(multihash, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/OBJECT.md#objectget)
@@ -544,10 +548,15 @@ The core API is grouped into several areas:
- [`ipfs.object.patch.appendData(multihash, data, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/OBJECT.md#objectpatchappenddata)
- [`ipfs.object.patch.setData(multihash, data, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/OBJECT.md#objectpatchsetdata)
-- [pin](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/PIN.md)
- - [`ipfs.pin.add(hash, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/PIN.md#pinadd)
- - [`ipfs.pin.ls([hash], [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/PIN.md#pinls)
- - [`ipfs.pin.rm(hash, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/PIN.md#pinrm)
+#### Block
+
+- [block](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/BLOCK.md)
+ - [`ipfs.block.get(cid, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/BLOCK.md#blockget)
+ - [`ipfs.block.put(block, cid, [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/BLOCK.md#blockput)
+ - [`ipfs.block.stat(cid, [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/BLOCK.md#blockstat)
+- [bitswap](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/BITSWAP.md)
+ - [`ipfs.bitswap.wantlist([peerId], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/BITSWAP.md#bitswapwantlist)
+ - [`ipfs.bitswap.stat([callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/BITSWAP.md#bitswapstat)
#### Name
@@ -574,10 +583,6 @@ The core API is grouped into several areas:
- [`ipfs.bootstrap.add(addr, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/BOOTSTRAP.md#bootstrapadd)
- [`ipfs.bootstrap.rm(peer, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/BOOTSTRAP.md#bootstraprm)
-- [bitswap](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/BITSWAP.md)
- - [`ipfs.bitswap.wantlist([peerId], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/BITSWAP.md#bitswapwantlist)
- - [`ipfs.bitswap.stat([callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/BITSWAP.md#bitswapstat)
-
- dht (not implemented yet)
- [pubsub](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/PUBSUB.md)
@@ -1007,10 +1012,10 @@ What does this image explain?
IPFS implementation in JavaScript is a work in progress. As such, there's a few things you can do right now to help out:
- * Go through the modules below and **check out existing issues**. This would be especially useful for modules in active development. Some knowledge of IPFS may be required, as well as the infrastructure behind it - for instance, you may need to read up on p2p and more complex operations like muxing to be able to help technically.
- * **Perform code reviews**. More eyes will help (a) speed the project along, (b) ensure quality, and (c) reduce possible future bugs.
- * Take a look at go-ipfs and some of the planning repositories or issues: for instance, the [libp2p spec](https://github.com/ipfs/specs/pull/19). Contributions here that would be most helpful are **top-level comments** about how it should look based on our understanding. Again, the more eyes the better.
- * **Add tests**. There can never be enough tests.
+- Go through the modules below and **check out existing issues**. This would be especially useful for modules in active development. Some knowledge of IPFS may be required, as well as the infrastructure behind it - for instance, you may need to read up on p2p and more complex operations like muxing to be able to help technically.
+- **Perform code reviews**. More eyes will help (a) speed the project along, (b) ensure quality, and (c) reduce possible future bugs.
+- Take a look at go-ipfs and some of the planning repositories or issues: for instance, the [libp2p spec](https://github.com/ipfs/specs/pull/19). Contributions here that would be most helpful are **top-level comments** about how it should look based on our understanding. Again, the more eyes the better.
+- **Add tests**. There can never be enough tests.
### Want to hack on IPFS?
From 8c62c6c72d02f061f85e526386676a099db46e18 Mon Sep 17 00:00:00 2001
From: David Dias
Date: Fri, 26 Oct 2018 15:07:27 +0200
Subject: [PATCH 03/11] test: update tests to use latest files APIs
---
test/cli/init.js | 2 +-
test/core/bitswap.spec.js | 4 ++--
test/core/circuit-relay.js | 4 ++--
test/core/files-sharding.spec.js | 4 ++--
test/core/files.spec.js | 12 +++++------
test/core/interface.spec.js | 37 +++++++++++++++++++++++++++++++-
test/core/kad-dht.node.js | 4 ++--
test/core/name.js | 13 +++++++----
test/core/pin.js | 2 +-
test/core/preload.spec.js | 32 +++++++++++++--------------
test/core/utils.js | 2 +-
test/gateway/index.js | 10 ++++-----
test/utils/ipfs-exec.js | 2 +-
13 files changed, 84 insertions(+), 44 deletions(-)
diff --git a/test/cli/init.js b/test/cli/init.js
index b1a826c1db..c8bcf61765 100644
--- a/test/cli/init.js
+++ b/test/cli/init.js
@@ -41,7 +41,7 @@ describe('init', function () {
expect(repoExistsSync('version')).to.equal(true)
// Test that the following was written when init-ing the repo
- // jsipfs files cat /ipfs/QmfGBRT6BbWJd7yUc2uYdaUZJBbnEFvTqehPFoSMQ6wgdr/readme
+ // jsipfs cat /ipfs/QmfGBRT6BbWJd7yUc2uYdaUZJBbnEFvTqehPFoSMQ6wgdr/readme
let command = out.substring(out.indexOf('cat'), out.length - 2 /* omit the newline char */)
return ipfs(command)
}).then((out) => expect(out).to.equal(readme))
diff --git a/test/core/bitswap.spec.js b/test/core/bitswap.spec.js
index 5c0ce9f976..9540aba53e 100644
--- a/test/core/bitswap.spec.js
+++ b/test/core/bitswap.spec.js
@@ -228,10 +228,10 @@ describe('bitswap', function () {
(cb) => addNode(fDaemon, inProcNode, cb),
// 1. Add file to tmp instance
(remote, cb) => {
- remote.files.add([{ path: 'awesome.txt', content: file }], cb)
+ remote.add([{ path: 'awesome.txt', content: file }], cb)
},
// 2. Request file from local instance
- (filesAdded, cb) => inProcNode.files.cat(filesAdded[0].hash, cb)
+ (filesAdded, cb) => inProcNode.cat(filesAdded[0].hash, cb)
], (err, data) => {
expect(err).to.not.exist()
expect(data).to.eql(file)
diff --git a/test/core/circuit-relay.js b/test/core/circuit-relay.js
index 709b24188c..1566ce04b1 100644
--- a/test/core/circuit-relay.js
+++ b/test/core/circuit-relay.js
@@ -118,8 +118,8 @@ describe('circuit relay', () => {
it('should transfer', function (done) {
const data = crypto.randomBytes(128)
waterfall([
- (cb) => nodeA.files.add(data, cb),
- (res, cb) => nodeB.files.cat(res[0].hash, cb),
+ (cb) => nodeA.add(data, cb),
+ (res, cb) => nodeB.cat(res[0].hash, cb),
(buffer, cb) => {
expect(buffer).to.deep.equal(data)
cb()
diff --git a/test/core/files-sharding.spec.js b/test/core/files-sharding.spec.js
index 8c96f00aa4..01d014c4f7 100644
--- a/test/core/files-sharding.spec.js
+++ b/test/core/files-sharding.spec.js
@@ -66,7 +66,7 @@ describe('files directory (sharding tests)', () => {
pull(
pull.values(createTestFiles()),
- ipfs.files.addPullStream(),
+ ipfs.addPullStream(),
pull.collect((err, results) => {
expect(err).to.not.exist()
const last = results[results.length - 1]
@@ -118,7 +118,7 @@ describe('files directory (sharding tests)', () => {
pull(
pull.values(createTestFiles()),
- ipfs.files.addPullStream(),
+ ipfs.addPullStream(),
pull.collect((err, results) => {
expect(err).to.not.exist()
const last = results[results.length - 1]
diff --git a/test/core/files.spec.js b/test/core/files.spec.js
index 033058a809..77f99739ca 100644
--- a/test/core/files.spec.js
+++ b/test/core/files.spec.js
@@ -42,7 +42,7 @@ describe('files', () => {
describe('get', () => {
it('should callback with error for invalid IPFS path input', (done) => {
const invalidPath = null
- ipfs.files.get(invalidPath, (err) => {
+ ipfs.get(invalidPath, (err) => {
expect(err).to.exist()
expect(err.code).to.equal('ERR_INVALID_PATH')
done()
@@ -53,7 +53,7 @@ describe('files', () => {
describe('getReadableStream', () => {
it('should return erroring stream for invalid IPFS path input', (done) => {
const invalidPath = null
- const stream = ipfs.files.getReadableStream(invalidPath)
+ const stream = ipfs.getReadableStream(invalidPath)
stream.on('error', (err) => {
expect(err).to.exist()
@@ -67,7 +67,7 @@ describe('files', () => {
it('should return erroring stream for invalid IPFS path input', (done) => {
const invalidPath = null
pull(
- ipfs.files.getPullStream(invalidPath),
+ ipfs.getPullStream(invalidPath),
pull.collect((err) => {
expect(err).to.exist()
expect(err.code).to.equal('ERR_INVALID_PATH')
@@ -79,14 +79,14 @@ describe('files', () => {
describe('add', () => {
it('should not error when passed null options', (done) => {
- ipfs.files.add(Buffer.from(hat()), null, (err) => {
+ ipfs.add(Buffer.from(hat()), null, (err) => {
expect(err).to.not.exist()
done()
})
})
it('should add a file with a v1 CID', (done) => {
- ipfs.files.add(Buffer.from([0, 1, 2]), {
+ ipfs.add(Buffer.from([0, 1, 2]), {
cidVersion: 1
}, (err, files) => {
expect(err).to.not.exist()
@@ -98,7 +98,7 @@ describe('files', () => {
})
it('should add a file with a v1 CID and not raw leaves', (done) => {
- ipfs.files.add(Buffer.from([0, 1, 2]), {
+ ipfs.add(Buffer.from([0, 1, 2]), {
cidVersion: 1,
rawLeaves: false
}, (err, files) => {
diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js
index fb5c964c17..c1316b8125 100644
--- a/test/core/interface.spec.js
+++ b/test/core/interface.spec.js
@@ -78,7 +78,42 @@ describe('interface-ipfs-core tests', () => {
] : true
})
- tests.files(defaultCommonFactory)
+ tests.filesRegular(defaultCommonFactory, {
+ skip: [
+ // .addFromStream
+ isNode ? null : {
+ name: 'same as .add',
+ reason: 'Designed for Node.js only'
+ },
+ // .addFromFs
+ isNode ? null : {
+ name: 'a directory',
+ reason: 'Designed for Node.js only'
+ },
+ isNode ? null : {
+ name: 'a directory with an odd name',
+ reason: 'Designed for Node.js only'
+ },
+ isNode ? null : {
+ name: 'add and ignore a directory',
+ reason: 'Designed for Node.js only'
+ },
+ isNode ? null : {
+ name: 'a file',
+ reason: 'Designed for Node.js only'
+ },
+ isNode ? null : {
+ name: 'a hidden file in a directory',
+ reason: 'Designed for Node.js only'
+ },
+ isNode ? null : {
+ name: 'a with only-hash=true',
+ reason: 'Designed for Node.js only'
+ }
+ ]
+ })
+
+ tests.filesMFS(defaultCommonFactory)
tests.key(CommonFactory.create({
spawnOptions: {
diff --git a/test/core/kad-dht.node.js b/test/core/kad-dht.node.js
index 2670a6f846..1f1f484ae6 100644
--- a/test/core/kad-dht.node.js
+++ b/test/core/kad-dht.node.js
@@ -78,10 +78,10 @@ describe.skip('verify that kad-dht is doing its thing', () => {
content: Buffer.from('hello kad')
}
- nodeC.files.add(file, (err, filesAdded) => {
+ nodeC.add(file, (err, filesAdded) => {
expect(err).to.not.exist()
- nodeA.files.cat(filesAdded[0].hash, (err, data) => {
+ nodeA.cat(filesAdded[0].hash, (err, data) => {
expect(err).to.not.exist()
expect(data.length).to.equal(file.data.length)
expect(data).to.eql(file.data)
diff --git a/test/core/name.js b/test/core/name.js
index 790aee384d..4c974b2782 100644
--- a/test/core/name.js
+++ b/test/core/name.js
@@ -21,7 +21,7 @@ const df = DaemonFactory.create({ type: 'proc' })
const ipfsRef = '/ipfs/QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU'
-describe('name', function () {
+describe.only('name', function () {
if (!isNode) {
return
}
@@ -402,10 +402,15 @@ describe('name', function () {
it('should resolve an ipfs path correctly', function (done) {
node.files.add(fixture, (err, res) => {
expect(err).to.not.exist()
- ipnsPath.resolvePath(node, `/ipfs/${res[0].hash}`, (err, value) => {
+
+ node.name.publish(`/ipfs/${res[0].hash}`, (err, res) => {
expect(err).to.not.exist()
- expect(value).to.exist()
- done()
+
+ ipnsPath.resolvePath(node, `/ipfs/${res[0].hash}`, (err, value) => {
+ expect(err).to.not.exist()
+ expect(value).to.exist()
+ done()
+ })
})
})
})
diff --git a/test/core/pin.js b/test/core/pin.js
index 6cfb57a49f..558a46132b 100644
--- a/test/core/pin.js
+++ b/test/core/pin.js
@@ -77,7 +77,7 @@ describe('pin', function () {
ipfs = new IPFS({ repo })
ipfs.on('ready', () => {
pin = ipfs.pin
- ipfs.files.add(fixtures, done)
+ ipfs.add(fixtures, done)
})
})
diff --git a/test/core/preload.spec.js b/test/core/preload.spec.js
index 8af0743880..1617c862a4 100644
--- a/test/core/preload.spec.js
+++ b/test/core/preload.spec.js
@@ -49,15 +49,15 @@ describe('preload', () => {
after((done) => repo.teardown(done))
- it('should preload content added with files.add', (done) => {
- ipfs.files.add(Buffer.from(hat()), (err, res) => {
+ it('should preload content added with add', (done) => {
+ ipfs.add(Buffer.from(hat()), (err, res) => {
expect(err).to.not.exist()
MockPreloadNode.waitForCids(res[0].hash, done)
})
})
- it('should preload multiple content added with files.add', (done) => {
- ipfs.files.add([{
+ it('should preload multiple content added with add', (done) => {
+ ipfs.add([{
content: Buffer.from(hat())
}, {
content: Buffer.from(hat())
@@ -69,8 +69,8 @@ describe('preload', () => {
})
})
- it('should preload multiple content and intermediate dirs added with files.add', (done) => {
- ipfs.files.add([{
+ it('should preload multiple content and intermediate dirs added with add', (done) => {
+ ipfs.add([{
path: 'dir0/dir1/file0',
content: Buffer.from(hat())
}, {
@@ -89,8 +89,8 @@ describe('preload', () => {
})
})
- it('should preload multiple content and wrapping dir for content added with files.add and wrapWithDirectory option', (done) => {
- ipfs.files.add([{
+ it('should preload multiple content and wrapping dir for content added with add and wrapWithDirectory option', (done) => {
+ ipfs.add([{
path: 'dir0/dir1/file0',
content: Buffer.from(hat())
}, {
@@ -109,20 +109,20 @@ describe('preload', () => {
})
})
- it('should preload content retrieved with files.cat', (done) => {
- ipfs.files.add(Buffer.from(hat()), { preload: false }, (err, res) => {
+ it('should preload content retrieved with cat', (done) => {
+ ipfs.add(Buffer.from(hat()), { preload: false }, (err, res) => {
expect(err).to.not.exist()
- ipfs.files.cat(res[0].hash, (err) => {
+ ipfs.cat(res[0].hash, (err) => {
expect(err).to.not.exist()
MockPreloadNode.waitForCids(res[0].hash, done)
})
})
})
- it('should preload content retrieved with files.get', (done) => {
- ipfs.files.add(Buffer.from(hat()), { preload: false }, (err, res) => {
+ it('should preload content retrieved with get', (done) => {
+ ipfs.add(Buffer.from(hat()), { preload: false }, (err, res) => {
expect(err).to.not.exist()
- ipfs.files.get(res[0].hash, (err) => {
+ ipfs.get(res[0].hash, (err) => {
expect(err).to.not.exist()
MockPreloadNode.waitForCids(res[0].hash, done)
})
@@ -130,7 +130,7 @@ describe('preload', () => {
})
it('should preload content retrieved with ls', (done) => {
- ipfs.files.add([{
+ ipfs.add([{
path: 'dir0/dir1/file0',
content: Buffer.from(hat())
}, {
@@ -382,7 +382,7 @@ describe('preload disabled', () => {
after((done) => repo.teardown(done))
it('should not preload if disabled', (done) => {
- ipfs.files.add(Buffer.from(hat()), (err, res) => {
+ ipfs.add(Buffer.from(hat()), (err, res) => {
expect(err).to.not.exist()
MockPreloadNode.waitForCids(res[0].hash, (err) => {
diff --git a/test/core/utils.js b/test/core/utils.js
index 49bd7f19fc..1ad815e926 100644
--- a/test/core/utils.js
+++ b/test/core/utils.js
@@ -122,7 +122,7 @@ describe('utils', () => {
node = new IPFS({
repo: repo
})
- node.once('ready', () => node.files.add(fixtures, done))
+ node.once('ready', () => node.add(fixtures, done))
})
after(done => node.stop(done))
diff --git a/test/gateway/index.js b/test/gateway/index.js
index 837261be84..952b0d3a82 100644
--- a/test/gateway/index.js
+++ b/test/gateway/index.js
@@ -70,7 +70,7 @@ describe('HTTP Gateway', function () {
emptyDir('nested-folder/empty')
]
- http.api.node.files.add(dirs, (err, res) => {
+ http.api.node.add(dirs, (err, res) => {
expect(err).to.not.exist()
const root = res[res.length - 1]
@@ -82,7 +82,7 @@ describe('HTTP Gateway', function () {
(cb) => {
const expectedMultihash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq'
- http.api.node.files.add(bigFile, (err, res) => {
+ http.api.node.add(bigFile, (err, res) => {
expect(err).to.not.exist()
const file = res[0]
expect(file.path).to.equal(expectedMultihash)
@@ -93,7 +93,7 @@ describe('HTTP Gateway', function () {
(cb) => {
const expectedMultihash = 'QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o'
- http.api.node.files.add(Buffer.from('hello world' + '\n'), { cidVersion: 0 }, (err, res) => {
+ http.api.node.add(Buffer.from('hello world' + '\n'), { cidVersion: 0 }, (err, res) => {
expect(err).to.not.exist()
const file = res[0]
expect(file.path).to.equal(expectedMultihash)
@@ -108,7 +108,7 @@ describe('HTTP Gateway', function () {
content('cat-folder/cat.jpg')
]
- http.api.node.files.add(dir, (err, res) => {
+ http.api.node.add(dir, (err, res) => {
expect(err).to.not.exist()
const file = res[1]
expect(file.path).to.equal('test-folder/cat-folder')
@@ -124,7 +124,7 @@ describe('HTTP Gateway', function () {
content('unsniffable-folder/hexagons.svg')
]
- http.api.node.files.add(dir, (err, res) => {
+ http.api.node.add(dir, (err, res) => {
expect(err).to.not.exist()
const file = res[res.length - 2]
expect(file.path).to.equal('test-folder/unsniffable-folder')
diff --git a/test/utils/ipfs-exec.js b/test/utils/ipfs-exec.js
index 786a3f7f3b..52b6e6ed1f 100644
--- a/test/utils/ipfs-exec.js
+++ b/test/utils/ipfs-exec.js
@@ -13,7 +13,7 @@ const _ = require('lodash')
// The top level export is a function that can be passed a `repoPath`
// and optional `opts` to customize the execution of the commands.
// This function returns the actual executer, which consists of
-// `ipfs('get ')` and `ipfs.fail('files get ')`
+// `ipfs('get ')` and `ipfs.fail('get ')`
// The first one executes and asserts that the command ran successfully
// and returns a promise which is resolved to `stdout` of the command.
// The `.fail` variation asserts that the command exited with `Code > 0`
From db3110a135a44f319eb15c18f1f2b185f7b6d02f Mon Sep 17 00:00:00 2001
From: David Dias
Date: Fri, 26 Oct 2018 15:07:46 +0200
Subject: [PATCH 04/11] feat: update CLI to use latest Files API
---
src/cli/bin.js | 15 ++-------------
src/cli/commands/{files => }/add.js | 6 +++---
src/cli/commands/{files => }/cat.js | 2 +-
src/cli/commands/{files => }/get.js | 4 ++--
4 files changed, 8 insertions(+), 19 deletions(-)
rename src/cli/commands/{files => }/add.js (97%)
rename src/cli/commands/{files => }/cat.js (91%)
rename src/cli/commands/{files => }/get.js (94%)
diff --git a/src/cli/bin.js b/src/cli/bin.js
index 49163d909e..291d71bc82 100755
--- a/src/cli/bin.js
+++ b/src/cli/bin.js
@@ -69,20 +69,9 @@ if (args[0] === 'daemon' || args[0] === 'init') {
throw err
}
- // add mfs commands
+ // add MFS (Files API) commands
mfs(cli)
- // NOTE: This creates an alias of
- // `jsipfs files {add, get, cat}` to `jsipfs {add, get, cat}`.
- // This will stay until https://github.com/ipfs/specs/issues/98 is resolved.
- const addCmd = require('./commands/files/add')
- const catCmd = require('./commands/files/cat')
- const getCmd = require('./commands/files/get')
- const aliases = [addCmd, catCmd, getCmd]
- aliases.forEach((alias) => {
- cli.command(alias)
- })
-
cli
.commandDir('commands')
.help()
@@ -113,7 +102,7 @@ if (args[0] === 'daemon' || args[0] === 'init') {
exitCode = 1
})
.then(() => cleanup())
- .catch(() => {})
+ .catch((e) => {})
.then(() => {
if (exitCode !== 0) {
process.exit(exitCode)
diff --git a/src/cli/commands/files/add.js b/src/cli/commands/add.js
similarity index 97%
rename from src/cli/commands/files/add.js
rename to src/cli/commands/add.js
index 2ab492f27f..7615d41f01 100644
--- a/src/cli/commands/files/add.js
+++ b/src/cli/commands/add.js
@@ -11,9 +11,9 @@ const getFolderSize = require('get-folder-size')
const byteman = require('byteman')
const waterfall = require('async/waterfall')
const mh = require('multihashes')
-const utils = require('../../utils')
-const print = require('../../utils').print
-const createProgressBar = require('../../utils').createProgressBar
+const utils = require('../utils')
+const print = require('../utils').print
+const createProgressBar = require('../utils').createProgressBar
function checkPath (inPath, recursive) {
// This function is to check for the following possible inputs
diff --git a/src/cli/commands/files/cat.js b/src/cli/commands/cat.js
similarity index 91%
rename from src/cli/commands/files/cat.js
rename to src/cli/commands/cat.js
index f68d6badfa..3a7548fea0 100644
--- a/src/cli/commands/files/cat.js
+++ b/src/cli/commands/cat.js
@@ -29,7 +29,7 @@ module.exports = {
length: argv.length
}
- const stream = argv.ipfs.files.catReadableStream(path, options)
+ const stream = argv.ipfs.catReadableStream(path, options)
stream.once('error', (err) => {
throw err
diff --git a/src/cli/commands/files/get.js b/src/cli/commands/get.js
similarity index 94%
rename from src/cli/commands/files/get.js
rename to src/cli/commands/get.js
index d9e0a39a67..1f571e175d 100644
--- a/src/cli/commands/files/get.js
+++ b/src/cli/commands/get.js
@@ -5,7 +5,7 @@ const path = require('path')
const mkdirp = require('mkdirp')
const pull = require('pull-stream')
const toPull = require('stream-to-pull-stream')
-const print = require('../../utils').print
+const print = require('../utils').print
function checkArgs (hash, outPath) {
// format the output directory
@@ -63,7 +63,7 @@ module.exports = {
const dir = checkArgs(ipfsPath, argv.output)
- const stream = argv.ipfs.files.getReadableStream(ipfsPath)
+ const stream = argv.ipfs.getReadableStream(ipfsPath)
stream.once('error', (err) => {
if (err) { throw err }
From f3af8478b453803d0315ce18a83461e9fdd1fbe7 Mon Sep 17 00:00:00 2001
From: David Dias
Date: Fri, 26 Oct 2018 15:08:02 +0200
Subject: [PATCH 05/11] feat: update HTTP API and Gateway to use latest Files
API
---
src/http/api/resources/files.js | 6 +++---
src/http/api/routes/files.js | 12 ++++++------
src/http/gateway/resources/gateway.js | 2 +-
3 files changed, 10 insertions(+), 10 deletions(-)
diff --git a/src/http/api/resources/files.js b/src/http/api/resources/files.js
index 939ac25df3..0a902d4e4e 100644
--- a/src/http/api/resources/files.js
+++ b/src/http/api/resources/files.js
@@ -79,7 +79,7 @@ exports.cat = {
const options = request.pre.args.options
const ipfs = request.server.app.ipfs
- ipfs.files.cat(key, options, (err, stream) => {
+ ipfs.cat(key, options, (err, stream) => {
if (err) {
log.error(err)
if (err.message === 'No such file') {
@@ -113,7 +113,7 @@ exports.get = {
const ipfs = request.server.app.ipfs
const pack = tar.pack()
- ipfs.files.get(cid, (err, filesArray) => {
+ ipfs.get(cid, (err, filesArray) => {
if (err) {
log.error(err)
pack.emit('error', err)
@@ -258,7 +258,7 @@ exports.add = {
pull(
fileAdder,
- ipfs.files.addPullStream(options),
+ ipfs.addPullStream(options),
pull.map((file) => {
return {
Name: file.path, // addPullStream already turned this into a hash if it wanted to
diff --git a/src/http/api/routes/files.js b/src/http/api/routes/files.js
index 44b00f9fa8..fca1a68001 100644
--- a/src/http/api/routes/files.js
+++ b/src/http/api/routes/files.js
@@ -12,9 +12,9 @@ module.exports = (server) => {
path: '/api/v0/cat',
config: {
pre: [
- { method: resources.files.cat.parseArgs, assign: 'args' }
+ { method: resources.cat.parseArgs, assign: 'args' }
],
- handler: resources.files.cat.handler
+ handler: resources.cat.handler
}
})
@@ -24,9 +24,9 @@ module.exports = (server) => {
path: '/api/v0/get',
config: {
pre: [
- { method: resources.files.get.parseArgs, assign: 'args' }
+ { method: resources.get.parseArgs, assign: 'args' }
],
- handler: resources.files.get.handler
+ handler: resources.get.handler
}
})
@@ -40,8 +40,8 @@ module.exports = (server) => {
output: 'stream',
maxBytes: Number.MAX_SAFE_INTEGER
},
- handler: resources.files.add.handler,
- validate: resources.files.add.validate
+ handler: resources.add.handler,
+ validate: resources.add.validate
}
})
diff --git a/src/http/gateway/resources/gateway.js b/src/http/gateway/resources/gateway.js
index 4b6df5977f..4f6cc67b8c 100644
--- a/src/http/gateway/resources/gateway.js
+++ b/src/http/gateway/resources/gateway.js
@@ -94,7 +94,7 @@ module.exports = {
return handleGatewayResolverError(err)
}
- const stream = ipfs.files.catReadableStream(data.multihash)
+ const stream = ipfs.catReadableStream(data.multihash)
stream.once('error', (err) => {
if (err) {
log.error(err)
From 89b097bcbbced9b26dcacfeaa60f05b85593f349 Mon Sep 17 00:00:00 2001
From: David Dias
Date: Sun, 28 Oct 2018 15:11:38 +0100
Subject: [PATCH 06/11] feat: wip on migrating core to new API, got caught by
circular dep with mfs
---
src/core/components/{mfs.js => files-mfs.js} | 0
.../components/{files.js => files-regular.js} | 59 ++++++++++---------
src/core/components/index.js | 4 +-
src/core/components/init-assets.js | 2 +-
src/core/index.js | 40 +++++++------
test/core/interface.spec.js | 5 +-
test/http-api/interface.js | 3 +-
7 files changed, 61 insertions(+), 52 deletions(-)
rename src/core/components/{mfs.js => files-mfs.js} (100%)
rename src/core/components/{files.js => files-regular.js} (94%)
diff --git a/src/core/components/mfs.js b/src/core/components/files-mfs.js
similarity index 100%
rename from src/core/components/mfs.js
rename to src/core/components/files-mfs.js
diff --git a/src/core/components/files.js b/src/core/components/files-regular.js
similarity index 94%
rename from src/core/components/files.js
rename to src/core/components/files-regular.js
index f13f42ae43..4ed720644c 100644
--- a/src/core/components/files.js
+++ b/src/core/components/files-regular.js
@@ -24,6 +24,23 @@ const WRAPPER = 'wrapper/'
function noop () {}
+function normalizePath (path) {
+ if (Buffer.isBuffer(path)) {
+ path = toB58String(path)
+ }
+ if (CID.isCID(path)) {
+ path = path.toBaseEncodedString()
+ }
+ if (path.indexOf('/ipfs/') === 0) {
+ path = path.substring('/ipfs/'.length)
+ }
+ if (path.charAt(path.length - 1) === '/') {
+ path = path.substring(0, path.length - 1)
+ }
+
+ return path
+}
+
function prepareFile (self, opts, file, callback) {
opts = opts || {}
@@ -47,7 +64,9 @@ function prepareFile (self, opts, file, callback) {
}
cb(null, {
- path: opts.wrapWithDirectory ? file.path.substring(WRAPPER.length) : (file.path || b58Hash),
+ path: opts.wrapWithDirectory
+ ? file.path.substring(WRAPPER.length)
+ : (file.path || b58Hash),
hash: b58Hash,
size
})
@@ -154,7 +173,8 @@ class AddHelper extends Duplex {
}
}
-module.exports = function files (self) {
+module.exports = function (self) {
+ // Internal add func that gets used by all add funcs
function _addPullStream (options = {}) {
let chunkerOptions
try {
@@ -191,6 +211,7 @@ module.exports = function files (self) {
)
}
+ // Internal cat func that gets used by all cat funcs
function _catPullStream (ipfsPath, options) {
if (typeof ipfsPath === 'function') {
throw new Error('You must supply an ipfsPath')
@@ -232,7 +253,8 @@ module.exports = function files (self) {
return d
}
- function _lsPullStreamImmutable (ipfsPath, options) {
+ // Internal ls func that gets used by all ls funcs
+ function _lsPullStream (ipfsPath, options) {
options = options || {}
const path = normalizePath(ipfsPath)
@@ -301,7 +323,7 @@ module.exports = function files (self) {
return function () {
const args = Array.from(arguments)
- // If we files.add(), then promisify thinks the pull stream
+ // If we .add(), then promisify thinks the pull stream
// is a callback! Add an empty options object in this case so that a
// promise is returned.
if (args.length === 1 && isSource(args[0])) {
@@ -337,7 +359,7 @@ module.exports = function files (self) {
}
if (typeof callback !== 'function') {
- throw new Error('Please supply a callback to ipfs.files.cat')
+ throw new Error('Please supply a callback to ipfs.cat')
}
pull(
@@ -441,7 +463,7 @@ module.exports = function files (self) {
return exporter(ipfsPath, self._ipld, options)
},
- lsImmutable: promisify((ipfsPath, options, callback) => {
+ ls: promisify((ipfsPath, options, callback) => {
if (typeof options === 'function') {
callback = options
options = {}
@@ -450,7 +472,7 @@ module.exports = function files (self) {
options = options || {}
pull(
- _lsPullStreamImmutable(ipfsPath, options),
+ _lsPullStream(ipfsPath, options),
pull.collect((err, values) => {
if (err) {
callback(err)
@@ -461,27 +483,10 @@ module.exports = function files (self) {
)
}),
- lsReadableStreamImmutable: (ipfsPath, options) => {
- return toStream.source(_lsPullStreamImmutable(ipfsPath, options))
+ lsReadableStream: (ipfsPath, options) => {
+ return toStream.source(_lsPullStream(ipfsPath, options))
},
- lsPullStreamImmutable: _lsPullStreamImmutable
+ lsPullStream: _lsPullStream
}
}
-
-function normalizePath (path) {
- if (Buffer.isBuffer(path)) {
- path = toB58String(path)
- }
- if (CID.isCID(path)) {
- path = path.toBaseEncodedString()
- }
- if (path.indexOf('/ipfs/') === 0) {
- path = path.substring('/ipfs/'.length)
- }
- if (path.charAt(path.length - 1) === '/') {
- path = path.substring(0, path.length - 1)
- }
-
- return path
-}
diff --git a/src/core/components/index.js b/src/core/components/index.js
index cf6506e6dd..ac893efbdd 100644
--- a/src/core/components/index.js
+++ b/src/core/components/index.js
@@ -19,13 +19,13 @@ exports.ping = require('./ping')
exports.pingPullStream = require('./ping-pull-stream')
exports.pingReadableStream = require('./ping-readable-stream')
exports.pin = require('./pin')
-exports.files = require('./files')
+exports.filesRegular = require('./files-regular')
+exports.filesMFS = require('./files-mfs')
exports.bitswap = require('./bitswap')
exports.pubsub = require('./pubsub')
exports.dht = require('./dht')
exports.dns = require('./dns')
exports.key = require('./key')
exports.stats = require('./stats')
-exports.mfs = require('./mfs')
exports.resolve = require('./resolve')
exports.name = require('./name')
diff --git a/src/core/components/init-assets.js b/src/core/components/init-assets.js
index 9756a83d56..362bad7345 100644
--- a/src/core/components/init-assets.js
+++ b/src/core/components/init-assets.js
@@ -21,7 +21,7 @@ module.exports = function addDefaultAssets (self, log, callback) {
const addPath = element.substring(index + 1)
return { path: addPath, content: file(element) }
}),
- self.files.addPullStream(),
+ self.addPullStream(),
pull.through(file => {
if (file.path === 'init-docs') {
const cid = new CID(file.hash)
diff --git a/src/core/index.js b/src/core/index.js
index 0d53a21342..96fba15c56 100644
--- a/src/core/index.js
+++ b/src/core/index.js
@@ -147,7 +147,6 @@ class IPFS extends EventEmitter {
this.dag = components.dag(this)
this.libp2p = components.libp2p(this)
this.swarm = components.swarm(this)
- this.files = components.files(this)
this.name = components.name(this)
this.bitswap = components.bitswap(this)
this.pin = components.pin(this)
@@ -173,28 +172,33 @@ class IPFS extends EventEmitter {
this.state = require('./state')(this)
- // ipfs.ls
- this.ls = this.files.lsImmutable
- this.lsReadableStream = this.files.lsReadableStreamImmutable
- this.lsPullStream = this.files.lsPullStreamImmutable
+ // ipfs regular Files APIs
+ const filesRegular = components.filesRegular(this)
+ this.add = filesRegular.add
+ this.addReadableStream = filesRegular.addReadableStream
+ this.addPullStream = filesRegular.addPullStream
+ // TODO create this.addFromFs
+ // TODO create this.addFromStream
+ // TODO create this.addFromUrl
+ this.cat = filesRegular.catImmutable
+ this.catReadableStream = filesRegular.catReadableStream
+ this.catPullStream = filesRegular.catPullStream
+ this.get = filesRegular.getImmutable
+ this.getReadableStream = filesRegular.getReadableStream
+ this.getPullStream = filesRegular.getPullStream
+ this.ls = filesRegular.lsImmutable
+ this.lsReadableStream = filesRegular.lsReadableStream
+ this.lsPullStream = filesRegular.lsPullStream
+
+ // ipfs.files API (aka MFS)
+ this.files = components.filesMFS(this)
// ipfs.util
this.util = {
- crypto: crypto,
- isIPFS: isIPFS
+ crypto,
+ isIPFS
}
- // ipfs.files
- const mfs = components.mfs({
- ipld: this._ipld,
- repo: this._repo,
- repoOwner: (this._options.mfs && this._options.mfs.repoOwner) || true
- })
-
- Object.keys(mfs).forEach(key => {
- this.files[key] = mfs[key]
- })
-
boot(this)
}
}
diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js
index c1316b8125..372e8996b5 100644
--- a/test/core/interface.spec.js
+++ b/test/core/interface.spec.js
@@ -113,7 +113,8 @@ describe('interface-ipfs-core tests', () => {
]
})
- tests.filesMFS(defaultCommonFactory)
+ // TODO needs MFS module to be updated
+ // tests.filesMFS(defaultCommonFactory)
tests.key(CommonFactory.create({
spawnOptions: {
@@ -122,8 +123,6 @@ describe('interface-ipfs-core tests', () => {
}
}))
- tests.ls(defaultCommonFactory)
-
tests.miscellaneous(CommonFactory.create({
// No need to stop, because the test suite does a 'stop' test.
createTeardown: () => cb => cb()
diff --git a/test/http-api/interface.js b/test/http-api/interface.js
index 00a1743482..08b689aa00 100644
--- a/test/http-api/interface.js
+++ b/test/http-api/interface.js
@@ -25,7 +25,8 @@ describe('interface-ipfs-core over ipfs-api tests', () => {
skip: { reason: 'TODO: DHT is not implemented in js-ipfs yet!' }
})
- tests.files(defaultCommonFactory)
+ tests.filesRegular(defaultCommonFactory)
+ tests.filesMFS(defaultCommonFactory)
tests.key(CommonFactory.create({
spawnOptions: {
From 7497eadd8ed4cc178d84fea25d7581fea8f11ac8 Mon Sep 17 00:00:00 2001
From: David Dias
Date: Sun, 4 Nov 2018 19:31:09 +0100
Subject: [PATCH 07/11] feat: attach mfs through proxy object to avoid circular
dep
---
src/core/components/files-mfs.js | 67 ++++++++++++++++----------------
test/core/files.spec.js | 2 +-
2 files changed, 34 insertions(+), 35 deletions(-)
diff --git a/src/core/components/files-mfs.js b/src/core/components/files-mfs.js
index 2fd44d2979..e42e495b30 100644
--- a/src/core/components/files-mfs.js
+++ b/src/core/components/files-mfs.js
@@ -3,38 +3,37 @@
const promisify = require('promisify-es6')
const mfs = require('ipfs-mfs/core')
-module.exports = self => {
- const mfsSelf = Object.assign({}, self)
-
- // A patched dag API to ensure preload doesn't happen for MFS operations
- // (MFS is preloaded periodically)
- mfsSelf.dag = Object.assign({}, self.dag, {
- get: promisify((cid, path, opts, cb) => {
- if (typeof path === 'function') {
- cb = path
- path = undefined
- }
-
- if (typeof opts === 'function') {
- cb = opts
- opts = {}
- }
-
- opts = Object.assign({}, opts, { preload: false })
-
- return self.dag.get(cid, path, opts, cb)
- }),
- put: promisify((node, opts, cb) => {
- if (typeof opts === 'function') {
- cb = opts
- opts = {}
- }
-
- opts = Object.assign({}, opts, { preload: false })
-
- return self.dag.put(node, opts, cb)
- })
- })
-
- return mfs(mfsSelf, mfsSelf._options)
+module.exports = (self) => {
+ const proxy = {
+ add: self.add,
+ dag: {
+ get: promisify((cid, path, opts, cb) => {
+ if (typeof path === 'function') {
+ cb = path
+ path = undefined
+ }
+
+ if (typeof opts === 'function') {
+ cb = opts
+ opts = {}
+ }
+
+ opts = Object.assign({}, opts, { preload: false })
+
+ return self.dag.get(cid, path, opts, cb)
+ }),
+ put: promisify((node, opts, cb) => {
+ if (typeof opts === 'function') {
+ cb = opts
+ opts = {}
+ }
+
+ opts = Object.assign({}, opts, { preload: false })
+
+ return self.dag.put(node, opts, cb)
+ })
+ }
+ }
+
+ return mfs(proxy, self._options)
}
diff --git a/test/core/files.spec.js b/test/core/files.spec.js
index 77f99739ca..d1c6e96662 100644
--- a/test/core/files.spec.js
+++ b/test/core/files.spec.js
@@ -11,7 +11,7 @@ const pull = require('pull-stream')
const IPFSFactory = require('ipfsd-ctl')
const IPFS = require('../../src/core')
-describe('files', () => {
+describe.only('files', () => {
let ipfsd, ipfs
before(function (done) {
From df6182a2f2efbca7001d958553751efa32315156 Mon Sep 17 00:00:00 2001
From: Alan Shaw
Date: Mon, 19 Nov 2018 13:31:44 +0000
Subject: [PATCH 08/11] fix: fixes and cleanup
License: MIT
Signed-off-by: Alan Shaw
---
package.json | 4 +-
src/cli/bin.js | 2 +-
src/cli/commands/add.js | 2 +-
src/core/components/files-mfs.js | 40 +++---------------
src/core/components/files-regular.js | 4 ++
src/core/index.js | 23 +---------
.../resources/{files.js => files-regular.js} | 2 +-
src/http/api/resources/index.js | 2 +-
src/http/api/routes/files.js | 16 +++----
test/core/files.spec.js | 2 +-
test/core/interface.spec.js | 42 +++++--------------
test/http-api/interface.js | 1 +
12 files changed, 37 insertions(+), 103 deletions(-)
rename src/http/api/resources/{files.js => files-regular.js} (99%)
diff --git a/package.json b/package.json
index d6295f4aeb..0004b50785 100644
--- a/package.json
+++ b/package.json
@@ -107,12 +107,12 @@
"ipfs-block": "~0.8.0",
"ipfs-block-service": "~0.15.1",
"ipfs-http-response": "~0.2.1",
- "ipfs-mfs": "~0.5.0",
+ "ipfs-mfs": "~0.5.2",
"ipfs-multipart": "~0.1.0",
"ipfs-repo": "~0.25.0",
"ipfs-unixfs": "~0.1.16",
"ipfs-unixfs-engine": "~0.34.0",
- "ipld": "~0.20.0",
+ "ipld": "~0.20.1",
"ipld-bitcoin": "~0.1.8",
"ipld-dag-pb": "~0.15.0",
"ipld-ethereum": "^2.0.1",
diff --git a/src/cli/bin.js b/src/cli/bin.js
index 291d71bc82..401357ebf3 100755
--- a/src/cli/bin.js
+++ b/src/cli/bin.js
@@ -102,7 +102,7 @@ if (args[0] === 'daemon' || args[0] === 'init') {
exitCode = 1
})
.then(() => cleanup())
- .catch((e) => {})
+ .catch(() => {})
.then(() => {
if (exitCode !== 0) {
process.exit(exitCode)
diff --git a/src/cli/commands/add.js b/src/cli/commands/add.js
index 7615d41f01..8b930434d6 100644
--- a/src/cli/commands/add.js
+++ b/src/cli/commands/add.js
@@ -231,7 +231,7 @@ module.exports = {
}
}
- next(null, ipfs.files.addPullStream(options))
+ next(null, ipfs.addPullStream(options))
}
], (err, addStream) => {
if (err) throw err
diff --git a/src/core/components/files-mfs.js b/src/core/components/files-mfs.js
index e42e495b30..531219c4fd 100644
--- a/src/core/components/files-mfs.js
+++ b/src/core/components/files-mfs.js
@@ -1,39 +1,9 @@
'use strict'
-const promisify = require('promisify-es6')
const mfs = require('ipfs-mfs/core')
-module.exports = (self) => {
- const proxy = {
- add: self.add,
- dag: {
- get: promisify((cid, path, opts, cb) => {
- if (typeof path === 'function') {
- cb = path
- path = undefined
- }
-
- if (typeof opts === 'function') {
- cb = opts
- opts = {}
- }
-
- opts = Object.assign({}, opts, { preload: false })
-
- return self.dag.get(cid, path, opts, cb)
- }),
- put: promisify((node, opts, cb) => {
- if (typeof opts === 'function') {
- cb = opts
- opts = {}
- }
-
- opts = Object.assign({}, opts, { preload: false })
-
- return self.dag.put(node, opts, cb)
- })
- }
- }
-
- return mfs(proxy, self._options)
-}
+module.exports = self => mfs({
+ ipld: self._ipld,
+ repo: self._repo,
+ repoOwner: self._options.repoOwner
+})
diff --git a/src/core/components/files-regular.js b/src/core/components/files-regular.js
index 4ed720644c..d843ba6735 100644
--- a/src/core/components/files-regular.js
+++ b/src/core/components/files-regular.js
@@ -488,5 +488,9 @@ module.exports = function (self) {
},
lsPullStream: _lsPullStream
+
+ // TODO create addFromFs
+ // TODO create addFromStream
+ // TODO create addFromUrl
}
}
diff --git a/src/core/index.js b/src/core/index.js
index 96fba15c56..566cec8404 100644
--- a/src/core/index.js
+++ b/src/core/index.js
@@ -137,6 +137,7 @@ class IPFS extends EventEmitter {
this.shutdown = this.stop
this.isOnline = components.isOnline(this)
// - interface-ipfs-core defined API
+ Object.assign(this, components.filesRegular(this))
this.version = components.version(this)
this.id = components.id(this)
this.repo = components.repo(this)
@@ -145,6 +146,7 @@ class IPFS extends EventEmitter {
this.block = components.block(this)
this.object = components.object(this)
this.dag = components.dag(this)
+ this.files = components.filesMFS(this)
this.libp2p = components.libp2p(this)
this.swarm = components.swarm(this)
this.name = components.name(this)
@@ -172,27 +174,6 @@ class IPFS extends EventEmitter {
this.state = require('./state')(this)
- // ipfs regular Files APIs
- const filesRegular = components.filesRegular(this)
- this.add = filesRegular.add
- this.addReadableStream = filesRegular.addReadableStream
- this.addPullStream = filesRegular.addPullStream
- // TODO create this.addFromFs
- // TODO create this.addFromStream
- // TODO create this.addFromUrl
- this.cat = filesRegular.catImmutable
- this.catReadableStream = filesRegular.catReadableStream
- this.catPullStream = filesRegular.catPullStream
- this.get = filesRegular.getImmutable
- this.getReadableStream = filesRegular.getReadableStream
- this.getPullStream = filesRegular.getPullStream
- this.ls = filesRegular.lsImmutable
- this.lsReadableStream = filesRegular.lsReadableStream
- this.lsPullStream = filesRegular.lsPullStream
-
- // ipfs.files API (aka MFS)
- this.files = components.filesMFS(this)
-
// ipfs.util
this.util = {
crypto,
diff --git a/src/http/api/resources/files.js b/src/http/api/resources/files-regular.js
similarity index 99%
rename from src/http/api/resources/files.js
rename to src/http/api/resources/files-regular.js
index 0a902d4e4e..1a02fab3d1 100644
--- a/src/http/api/resources/files.js
+++ b/src/http/api/resources/files-regular.js
@@ -282,7 +282,7 @@ exports.add = {
}
}
-exports.immutableLs = {
+exports.ls = {
// uses common parseKey method that returns a `key`
parseArgs: exports.parseKey,
diff --git a/src/http/api/resources/index.js b/src/http/api/resources/index.js
index 58b68962cf..66646d29d5 100644
--- a/src/http/api/resources/index.js
+++ b/src/http/api/resources/index.js
@@ -13,7 +13,7 @@ exports.block = require('./block')
exports.swarm = require('./swarm')
exports.bitswap = require('./bitswap')
exports.file = require('./file')
-exports.files = require('./files')
+exports.filesRegular = require('./files-regular')
exports.pubsub = require('./pubsub')
exports.dns = require('./dns')
exports.key = require('./key')
diff --git a/src/http/api/routes/files.js b/src/http/api/routes/files.js
index fca1a68001..2e51ca7b0c 100644
--- a/src/http/api/routes/files.js
+++ b/src/http/api/routes/files.js
@@ -12,9 +12,9 @@ module.exports = (server) => {
path: '/api/v0/cat',
config: {
pre: [
- { method: resources.cat.parseArgs, assign: 'args' }
+ { method: resources.filesRegular.cat.parseArgs, assign: 'args' }
],
- handler: resources.cat.handler
+ handler: resources.filesRegular.cat.handler
}
})
@@ -24,9 +24,9 @@ module.exports = (server) => {
path: '/api/v0/get',
config: {
pre: [
- { method: resources.get.parseArgs, assign: 'args' }
+ { method: resources.filesRegular.get.parseArgs, assign: 'args' }
],
- handler: resources.get.handler
+ handler: resources.filesRegular.get.handler
}
})
@@ -40,8 +40,8 @@ module.exports = (server) => {
output: 'stream',
maxBytes: Number.MAX_SAFE_INTEGER
},
- handler: resources.add.handler,
- validate: resources.add.validate
+ handler: resources.filesRegular.add.handler,
+ validate: resources.filesRegular.add.validate
}
})
@@ -51,9 +51,9 @@ module.exports = (server) => {
path: '/api/v0/ls',
config: {
pre: [
- { method: resources.files.immutableLs.parseArgs, assign: 'args' }
+ { method: resources.filesRegular.ls.parseArgs, assign: 'args' }
],
- handler: resources.files.immutableLs.handler
+ handler: resources.filesRegular.ls.handler
}
})
diff --git a/test/core/files.spec.js b/test/core/files.spec.js
index d1c6e96662..77f99739ca 100644
--- a/test/core/files.spec.js
+++ b/test/core/files.spec.js
@@ -11,7 +11,7 @@ const pull = require('pull-stream')
const IPFSFactory = require('ipfsd-ctl')
const IPFS = require('../../src/core')
-describe.only('files', () => {
+describe('files', () => {
let ipfsd, ipfs
before(function (done) {
diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js
index 372e8996b5..6fde7915c9 100644
--- a/test/core/interface.spec.js
+++ b/test/core/interface.spec.js
@@ -79,38 +79,16 @@ describe('interface-ipfs-core tests', () => {
})
tests.filesRegular(defaultCommonFactory, {
- skip: [
- // .addFromStream
- isNode ? null : {
- name: 'same as .add',
- reason: 'Designed for Node.js only'
- },
- // .addFromFs
- isNode ? null : {
- name: 'a directory',
- reason: 'Designed for Node.js only'
- },
- isNode ? null : {
- name: 'a directory with an odd name',
- reason: 'Designed for Node.js only'
- },
- isNode ? null : {
- name: 'add and ignore a directory',
- reason: 'Designed for Node.js only'
- },
- isNode ? null : {
- name: 'a file',
- reason: 'Designed for Node.js only'
- },
- isNode ? null : {
- name: 'a hidden file in a directory',
- reason: 'Designed for Node.js only'
- },
- isNode ? null : {
- name: 'a with only-hash=true',
- reason: 'Designed for Node.js only'
- }
- ]
+ skip: [{
+ name: 'addFromStream',
+ reason: 'TODO: not implemented yet'
+ }, {
+ name: 'addFromFs',
+ reason: 'TODO: not implemented yet'
+ }, {
+ name: 'addFromUrl',
+ reason: 'TODO: not implemented yet'
+ }]
})
// TODO needs MFS module to be updated
diff --git a/test/http-api/interface.js b/test/http-api/interface.js
index 08b689aa00..314cab9dec 100644
--- a/test/http-api/interface.js
+++ b/test/http-api/interface.js
@@ -26,6 +26,7 @@ describe('interface-ipfs-core over ipfs-api tests', () => {
})
tests.filesRegular(defaultCommonFactory)
+
tests.filesMFS(defaultCommonFactory)
tests.key(CommonFactory.create({
From 0562ace050e25f0df0612f9a3babb69b85c93a8e Mon Sep 17 00:00:00 2001
From: Alan Shaw
Date: Tue, 20 Nov 2018 09:18:34 +0000
Subject: [PATCH 09/11] fix: bad merge
License: MIT
Signed-off-by: Alan Shaw
---
test/core/name.js | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/test/core/name.js b/test/core/name.js
index 4c974b2782..245c4f36b5 100644
--- a/test/core/name.js
+++ b/test/core/name.js
@@ -400,7 +400,7 @@ describe.only('name', function () {
after((done) => ipfsd.stop(done))
it('should resolve an ipfs path correctly', function (done) {
- node.files.add(fixture, (err, res) => {
+ node.add(fixture, (err, res) => {
expect(err).to.not.exist()
node.name.publish(`/ipfs/${res[0].hash}`, (err, res) => {
@@ -416,7 +416,7 @@ describe.only('name', function () {
})
it('should resolve an ipns path correctly', function (done) {
- node.files.add(fixture, (err, res) => {
+ node.add(fixture, (err, res) => {
expect(err).to.not.exist()
node.name.publish(`/ipfs/${res[0].hash}`, (err, res) => {
expect(err).to.not.exist()
From c9f23dfbe309fb80c9efaf23ed042c2290cb6f8d Mon Sep 17 00:00:00 2001
From: Alan Shaw
Date: Tue, 20 Nov 2018 09:26:07 +0000
Subject: [PATCH 10/11] fix: name tests
License: MIT
Signed-off-by: Alan Shaw
---
test/core/name.js | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/test/core/name.js b/test/core/name.js
index 245c4f36b5..08bb9b06da 100644
--- a/test/core/name.js
+++ b/test/core/name.js
@@ -21,7 +21,7 @@ const df = DaemonFactory.create({ type: 'proc' })
const ipfsRef = '/ipfs/QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU'
-describe.only('name', function () {
+describe('name', function () {
if (!isNode) {
return
}
@@ -403,7 +403,7 @@ describe.only('name', function () {
node.add(fixture, (err, res) => {
expect(err).to.not.exist()
- node.name.publish(`/ipfs/${res[0].hash}`, (err, res) => {
+ node.name.publish(`/ipfs/${res[0].hash}`, (err) => {
expect(err).to.not.exist()
ipnsPath.resolvePath(node, `/ipfs/${res[0].hash}`, (err, value) => {
@@ -418,7 +418,7 @@ describe.only('name', function () {
it('should resolve an ipns path correctly', function (done) {
node.add(fixture, (err, res) => {
expect(err).to.not.exist()
- node.name.publish(`/ipfs/${res[0].hash}`, (err, res) => {
+ node.name.publish(`/ipfs/${res[0].hash}`, (err) => {
expect(err).to.not.exist()
ipnsPath.resolvePath(node, `/ipns/${nodeId}`, (err, value) => {
expect(err).to.not.exist()
From d65560156e94b031ceae787bf9ddeb5f0265e368 Mon Sep 17 00:00:00 2001
From: Alan Shaw
Date: Tue, 20 Nov 2018 10:14:39 +0000
Subject: [PATCH 11/11] fix: temporarily pin ipfs-repo to 0.25.0 as 0.25.1 is
breaking
https://github.com/ipfs/js-ipfs-repo/pull/181#issuecomment-440218312
License: MIT
Signed-off-by: Alan Shaw
---
package.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/package.json b/package.json
index 0004b50785..ddf616955e 100644
--- a/package.json
+++ b/package.json
@@ -109,7 +109,7 @@
"ipfs-http-response": "~0.2.1",
"ipfs-mfs": "~0.5.2",
"ipfs-multipart": "~0.1.0",
- "ipfs-repo": "~0.25.0",
+ "ipfs-repo": "0.25.0",
"ipfs-unixfs": "~0.1.16",
"ipfs-unixfs-engine": "~0.34.0",
"ipld": "~0.20.1",