diff --git a/.travis.yml b/.travis.yml
index 19ece97bfc..995a49a2f2 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -18,7 +18,7 @@ os:
- osx
- windows
-script: npx nyc -s npm run test:node --timeout=10000 -- --bail
+script: npx nyc -s npx aegir test -t node --timeout 10000 --bail
after_success: npx nyc report --reporter=text-lcov > coverage.lcov && npx codecov
jobs:
@@ -47,12 +47,12 @@ jobs:
- stage: test
name: electron-main
script:
- - xvfb-run npx aegir test -t electron-main -- --bail
+ - xvfb-run npx aegir test -t electron-main -- --bail --timeout 10000
- stage: test
name: electron-renderer
script:
- - xvfb-run npx aegir test -t electron-renderer -- --bail
+ - xvfb-run npx aegir test -t electron-renderer -- --bail --timeout 10000
notifications:
email: false
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 444001f7c9..18a8c00fba 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,8 @@
+
+## [0.36.4](https://github.com/ipfs/js-ipfs/compare/v0.36.3...v0.36.4) (2019-06-18)
+
+
+
## [0.36.3](https://github.com/ipfs/js-ipfs/compare/v0.36.2...v0.36.3) (2019-05-30)
diff --git a/README.md b/README.md
index fabaf55e07..82c538d27b 100644
--- a/README.md
+++ b/README.md
@@ -1008,7 +1008,7 @@ Listing of the main packages used in the IPFS ecosystem. There are also three sp
| [`ipfs-repo`](//github.com/ipfs/js-ipfs-repo) | [](//github.com/ipfs/js-ipfs-repo/releases) | [](https://david-dm.org/ipfs/js-ipfs-repo) | [](https://travis-ci.com/ipfs/js-ipfs-repo) | [](https://codecov.io/gh/ipfs/js-ipfs-repo) | [Jacob Heun](mailto:jacobheun@gmail.com) |
| **Exchange** |
| [`ipfs-block-service`](//github.com/ipfs/js-ipfs-block-service) | [](//github.com/ipfs/js-ipfs-block-service/releases) | [](https://david-dm.org/ipfs/js-ipfs-block-service) | [](https://travis-ci.com/ipfs/js-ipfs-block-service) | [](https://codecov.io/gh/ipfs/js-ipfs-block-service) | [Volker Mische](mailto:volker.mische@gmail.com) |
-| [`ipfs-bitswap`](//github.com/ipfs/js-ipfs-bitswap) | [](//github.com/ipfs/js-ipfs-bitswap/releases) | [](https://david-dm.org/ipfs/js-ipfs-bitswap) | [](https://travis-ci.com/ipfs/js-ipfs-bitswap) | [](https://codecov.io/gh/ipfs/js-ipfs-bitswap) | [Volker Mische](mailto:volker.mische@gmail.com) |
+| [`ipfs-bitswap`](//github.com/ipfs/js-ipfs-bitswap) | [](//github.com/ipfs/js-ipfs-bitswap/releases) | [](https://david-dm.org/ipfs/js-ipfs-bitswap) | [](https://travis-ci.com/ipfs/js-ipfs-bitswap) | [](https://codecov.io/gh/ipfs/js-ipfs-bitswap) | [Dirk McCormick](mailto:dirk@protocol.ai) |
| **libp2p** |
| [`libp2p`](//github.com/libp2p/js-libp2p) | [](//github.com/libp2p/js-libp2p/releases) | [](https://david-dm.org/libp2p/js-libp2p) | [](https://travis-ci.com/libp2p/js-libp2p) | [](https://codecov.io/gh/libp2p/js-libp2p) | [Jacob Heun](mailto:jacobheun@gmail.com) |
| [`libp2p-circuit`](//github.com/libp2p/js-libp2p-circuit) | [](//github.com/libp2p/js-libp2p-circuit/releases) | [](https://david-dm.org/libp2p/js-libp2p-circuit) | [](https://travis-ci.com/libp2p/js-libp2p-circuit) | [](https://codecov.io/gh/libp2p/js-libp2p-circuit) | [Jacob Heun](mailto:jacobheun@gmail.com) |
diff --git a/examples/README.md b/examples/README.md
index 9bd33aa125..73e3769e0b 100644
--- a/examples/README.md
+++ b/examples/README.md
@@ -8,7 +8,6 @@ Let us know if you find any issue or if you want to contribute and add a new tut
- [Tutorial: IPFS 101, spawn a node and add a file to IPFS](./ipfs-101)
- [Tutorial: Build a tiny browser app to exchange files between nodes](./exchange-files-in-browser)
-- [Tutorial: Interact with IPFS directly from your Terminal](./ipfs-cli-fun)
- [Tutorial: Resolve through IPLD graphs with the dag API](./traverse-ipld-graphs)
- [Tutorial: Use IPFS to explore the Ethereum BlockChain](./explore-ethereum-blockchain)
- [Tutorial (Video): How to build an application with IPFS PubSub Room](https://www.youtube.com/watch?v=Nv_Teb--1zg)
diff --git a/package.json b/package.json
index 60225e8303..5cda27c2fa 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "ipfs",
- "version": "0.36.3",
+ "version": "0.36.4",
"description": "JavaScript implementation of the IPFS specification",
"keywords": [
"IPFS"
@@ -76,23 +76,27 @@
"cid-tool": "~0.3.0",
"cids": "~0.7.1",
"class-is": "^1.1.0",
+ "clear-module": "^3.2.0",
"datastore-core": "~0.6.0",
"datastore-pubsub": "~0.1.1",
"debug": "^4.1.0",
"dlv": "^1.1.3",
+ "dns-packet": "^5.2.1",
+ "dns-socket": "^4.2.0",
"err-code": "^1.1.2",
- "file-type": "^11.1.0",
+ "file-type": "^12.0.0",
"fnv1a": "^1.0.1",
"fsm-event": "^2.1.0",
"get-folder-size": "^2.0.0",
"glob": "^7.1.3",
"hapi-pino": "^6.0.0",
+ "hashlru": "^2.3.0",
"human-to-milliseconds": "^1.0.0",
"interface-datastore": "~0.6.0",
"ipfs-bitswap": "~0.24.1",
"ipfs-block": "~0.8.1",
"ipfs-block-service": "~0.15.1",
- "ipfs-http-client": "^32.0.0",
+ "ipfs-http-client": "^32.0.1",
"ipfs-http-response": "~0.3.0",
"ipfs-mfs": "~0.11.4",
"ipfs-multipart": "~0.1.0",
@@ -110,6 +114,7 @@
"ipld-raw": "^4.0.0",
"ipld-zcash": "~0.3.0",
"ipns": "~0.5.2",
+ "is-domain-name": "^1.0.1",
"is-ipfs": "~0.6.1",
"is-pull-stream": "~0.0.0",
"is-stream": "^2.0.0",
@@ -117,6 +122,8 @@
"just-flatten-it": "^2.1.0",
"just-safe-set": "^2.1.0",
"kind-of": "^6.0.2",
+ "ky": "^0.11.1",
+ "ky-universal": "^0.2.1",
"libp2p": "~0.25.3",
"libp2p-bootstrap": "~0.9.3",
"libp2p-crypto": "~0.16.0",
@@ -141,6 +148,8 @@
"multihashes": "~0.4.14",
"multihashing-async": "~0.6.0",
"node-fetch": "^2.3.0",
+ "p-any": "^2.1.0",
+ "p-settle": "^3.1.0",
"peer-book": "~0.9.0",
"peer-id": "~0.12.0",
"peer-info": "~0.15.0",
@@ -155,7 +164,7 @@
"pull-ndjson": "~0.1.1",
"pull-pushable": "^2.2.0",
"pull-sort": "^1.0.1",
- "pull-stream": "^3.6.9",
+ "pull-stream": "^3.6.12",
"pull-stream-to-async-iterator": "^1.0.1",
"pull-stream-to-stream": "^1.3.4",
"pull-traverse": "^1.0.3",
@@ -176,6 +185,7 @@
"aegir": "^19.0.3",
"base64url": "^3.0.1",
"chai": "^4.2.0",
+ "clear-module": "^3.2.0",
"delay": "^4.1.0",
"detect-node": "^2.0.4",
"dir-compare": "^1.4.0",
@@ -183,8 +193,8 @@
"execa": "^1.0.0",
"form-data": "^2.3.3",
"hat": "0.0.3",
- "interface-ipfs-core": "~0.104.0",
- "ipfsd-ctl": "~0.42.0",
+ "interface-ipfs-core": "~0.105.0",
+ "ipfsd-ctl": "~0.43.0",
"libp2p-websocket-star": "~0.10.2",
"ncp": "^2.0.0",
"qs": "^6.5.2",
@@ -248,8 +258,8 @@
"Henrique Dias ",
"Henry Rodrick ",
"Heo Sangmin ",
- "Hugo Dias ",
"Hugo Dias ",
+ "Hugo Dias ",
"Irakli Gozalishvili ",
"Jacob Heun ",
"Jacob Heun ",
@@ -282,6 +292,7 @@
"Paulo Rodrigues ",
"Pedro Teixeira ",
"Portia Burton ",
+ "Prabhakar Poudel ",
"Raoul Millais ",
"RasmusErik Voel Jensen ",
"Richard Littauer ",
diff --git a/src/cli/commands/daemon.js b/src/cli/commands/daemon.js
index 570cd56a86..3ec0bb98b8 100644
--- a/src/cli/commands/daemon.js
+++ b/src/cli/commands/daemon.js
@@ -20,6 +20,16 @@ module.exports = {
type: 'boolean',
default: false
})
+ .option('enable-ipns-experiment', {
+ type: 'boolean',
+ default: false,
+ desc: 'EXPERIMENTAL ipns routers.'
+ })
+ .option('experimental-ipns-alias', {
+ type: 'string',
+ default: '',
+ desc: 'EXPERIMENTAL human readable alias for ipns subdomains.'
+ })
.option('offline', {
type: 'boolean',
desc: 'Run offline. Do not connect to the rest of the network but provide local API.',
@@ -54,9 +64,13 @@ module.exports = {
preload: { enabled: argv.enablePreload },
EXPERIMENTAL: {
pubsub: argv.enablePubsubExperiment,
+ ipnsDNS: argv.enableIpnsExperiment,
ipnsPubsub: argv.enableNamesysPubsub,
dht: argv.enableDhtExperiment,
sharding: argv.enableShardingExperiment
+ },
+ ipns: {
+ alias: argv.experimentalIpnsAlias
}
})
diff --git a/src/cli/commands/name/publish.js b/src/cli/commands/name/publish.js
index 8452f23248..b84227a5d6 100644
--- a/src/cli/commands/name/publish.js
+++ b/src/cli/commands/name/publish.js
@@ -1,6 +1,6 @@
'use strict'
-const print = require('../../utils').print
+const { print } = require('../../utils')
module.exports = {
command: 'publish ',
@@ -11,21 +11,25 @@ module.exports = {
resolve: {
alias: 'r',
describe: 'Resolve given path before publishing. Default: true.',
- default: true
+ default: true,
+ type: 'boolean'
},
lifetime: {
alias: 't',
describe: 'Time duration that the record will be valid for. Default: 24h.',
- default: '24h'
+ default: '24h',
+ type: 'string'
},
key: {
alias: 'k',
describe: 'Name of the key to be used, as listed by "ipfs key list -l". Default: self.',
- default: 'self'
+ default: 'self',
+ type: 'string'
},
ttl: {
describe: 'Time duration this record should be cached for (caution: experimental).',
- default: ''
+ default: '',
+ type: 'string'
}
},
@@ -33,14 +37,8 @@ module.exports = {
argv.resolve((async () => {
// yargs-promise adds resolve/reject properties to argv
// resolve should use the alias as resolve will always be overwritten to a function
- let resolve = true
-
- if (argv.r === false || argv.r === 'false') {
- resolve = false
- }
-
const opts = {
- resolve,
+ resolve: argv.r,
lifetime: argv.lifetime,
key: argv.key,
ttl: argv.ttl
diff --git a/src/cli/commands/name/resolve.js b/src/cli/commands/name/resolve.js
index e8871a1d17..ca1dd219a3 100644
--- a/src/cli/commands/name/resolve.js
+++ b/src/cli/commands/name/resolve.js
@@ -18,7 +18,7 @@ module.exports = {
type: 'boolean',
alias: 'r',
describe: 'Resolve until the result is not an IPNS name. Default: false.',
- default: false
+ default: true
}
},
@@ -32,11 +32,7 @@ module.exports = {
const ipfs = await argv.getIpfs()
const result = await ipfs.name.resolve(argv.name, opts)
- if (result && result.path) {
- print(result.path)
- } else {
- print(result)
- }
+ print(result)
})())
}
}
diff --git a/src/core/components/name.js b/src/core/components/name.js
index 239d8ecc32..aee8dfba29 100644
--- a/src/core/components/name.js
+++ b/src/core/components/name.js
@@ -7,6 +7,9 @@ const parallel = require('async/parallel')
const human = require('human-to-milliseconds')
const crypto = require('libp2p-crypto')
const errcode = require('err-code')
+const mergeOptions = require('merge-options')
+const mh = require('multihashes')
+const isDomain = require('is-domain-name')
const log = debug('ipfs:name')
log.error = debug('ipfs:name:error')
@@ -35,6 +38,28 @@ const keyLookup = (ipfsNode, kname, callback) => {
})
}
+const appendRemainder = (cb, remainder) => {
+ return (err, result) => {
+ if (err) {
+ return cb(err)
+ }
+ if (remainder.length) {
+ return cb(null, result + '/' + remainder.join('/'))
+ }
+ return cb(null, result)
+ }
+}
+
+/**
+ * @typedef { import("../index") } IPFS
+ */
+
+/**
+ * IPNS - Inter-Planetary Naming System
+ *
+ * @param {IPFS} self
+ * @returns {Object}
+ */
module.exports = function name (self) {
return {
/**
@@ -125,22 +150,15 @@ module.exports = function name (self) {
options = {}
}
- options = options || {}
- const nocache = options.nocache && options.nocache.toString() === 'true'
- const recursive = options.recursive && options.recursive.toString() === 'true'
+ options = mergeOptions({
+ nocache: false,
+ recursive: true
+ }, options)
const offline = self._options.offline
- if (!self.isOnline() && !offline) {
- const errMsg = utils.OFFLINE_ERROR
-
- log.error(errMsg)
- return callback(errcode(errMsg, 'OFFLINE_ERROR'))
- }
-
// TODO: params related logic should be in the core implementation
-
- if (offline && nocache) {
+ if (offline && options.nocache) {
const error = 'cannot specify both offline and nocache'
log.error(error)
@@ -156,12 +174,28 @@ module.exports = function name (self) {
name = `/ipns/${name}`
}
- const resolveOptions = {
- nocache,
- recursive
+ const [ namespace, hash, ...remainder ] = name.slice(1).split('/')
+ try {
+ mh.fromB58String(hash)
+ } catch (err) {
+ // lets check if we have a domain ex. /ipns/ipfs.io and resolve with dns
+ if (isDomain(hash)) {
+ return self.dns(hash, options, appendRemainder(callback, remainder))
+ }
+
+ log.error(err)
+ return callback(errcode(new Error('Invalid IPNS name.'), 'ERR_IPNS_INVALID_NAME'))
}
- self._ipns.resolve(name, resolveOptions, callback)
+ // multihash is valid lets resolve with IPNS
+ // IPNS resolve needs a online daemon
+ if (!self.isOnline() && !offline) {
+ const errMsg = utils.OFFLINE_ERROR
+
+ log.error(errMsg)
+ return callback(errcode(errMsg, 'OFFLINE_ERROR'))
+ }
+ self._ipns.resolve(`/${namespace}/${hash}`, options, appendRemainder(callback, remainder))
}),
pubsub: namePubsub(self)
}
diff --git a/src/core/config.js b/src/core/config.js
index 2fb66bb558..4cc4eb99a2 100644
--- a/src/core/config.js
+++ b/src/core/config.js
@@ -46,9 +46,10 @@ const configSchema = s({
EXPERIMENTAL: optional(s({
pubsub: 'boolean?',
ipnsPubsub: 'boolean?',
+ ipnsDNS: 'boolean?',
sharding: 'boolean?',
dht: 'boolean?'
- })),
+ }, { dht: false, pubsub: false, ipnsDNS: false, ipnsPubsub: false, sharding: false })),
connectionManager: 'object?',
config: optional(s({
API: 'object?',
@@ -69,9 +70,11 @@ const configSchema = s({
Bootstrap: optional(s(['multiaddr-ipfs']))
})),
ipld: 'object?',
+ ipns: 'object?',
libp2p: optional(union(['function', 'object'])) // libp2p validates this
}, {
- repoOwner: true
+ repoOwner: true,
+ ipns: {}
})
const validate = (opts) => {
diff --git a/src/core/index.js b/src/core/index.js
index d457cb6149..3245a6cb9a 100644
--- a/src/core/index.js
+++ b/src/core/index.js
@@ -26,7 +26,16 @@ const defaultRepo = require('./runtime/repo-nodejs')
const preload = require('./preload')
const mfsPreload = require('./mfs-preload')
const ipldOptions = require('./runtime/ipld-nodejs')
-
+/**
+ * @typedef { import("./ipns/index") } IPNS
+ */
+
+/**
+ *
+ *
+ * @class IPFS
+ * @extends {EventEmitter}
+ */
class IPFS extends EventEmitter {
constructor (options) {
super()
@@ -76,6 +85,7 @@ class IPFS extends EventEmitter {
this._ipld = new Ipld(ipldOptions(this._blockService, this._options.ipld, this.log))
this._preload = preload(this)
this._mfsPreload = mfsPreload(this)
+ /** @type {IPNS} */
this._ipns = undefined
// eslint-disable-next-line no-console
this._print = this._options.silent ? this.log : console.log
diff --git a/src/core/ipns/index.js b/src/core/ipns/index.js
index a064ece4d5..d7405ca3e7 100644
--- a/src/core/ipns/index.js
+++ b/src/core/ipns/index.js
@@ -2,7 +2,6 @@
const { createFromPrivKey } = require('peer-id')
const series = require('async/series')
-const Receptacle = require('receptacle')
const errcode = require('err-code')
const debug = require('debug')
@@ -13,7 +12,8 @@ const IpnsPublisher = require('./publisher')
const IpnsRepublisher = require('./republisher')
const IpnsResolver = require('./resolver')
const path = require('./path')
-
+const { normalizePath } = require('../utils')
+const TLRU = require('../../utils/tlru')
const defaultRecordTtl = 60 * 1000
class IPNS {
@@ -21,12 +21,19 @@ class IPNS {
this.publisher = new IpnsPublisher(routing, datastore)
this.republisher = new IpnsRepublisher(this.publisher, datastore, peerInfo, keychain, options)
this.resolver = new IpnsResolver(routing)
- this.cache = new Receptacle({ max: 1000 }) // Create an LRU cache with max 1000 items
+ this.cache = new TLRU(1000)
this.routing = routing
}
// Publish
- publish (privKey, value, lifetime, callback) {
+ publish (privKey, value, lifetime = IpnsPublisher.defaultRecordLifetime, callback) {
+ try {
+ value = normalizePath(value)
+ } catch (err) {
+ log.error(err)
+ return callback(err)
+ }
+
series([
(cb) => createFromPrivKey(privKey.bytes, cb),
(cb) => this.publisher.publishWithEOL(privKey, value, lifetime, cb)
@@ -38,12 +45,12 @@ class IPNS {
log(`IPNS value ${value} was published correctly`)
- // Add to cache
+ // // Add to cache
const id = results[0].toB58String()
const ttEol = parseFloat(lifetime)
const ttl = (ttEol < defaultRecordTtl) ? ttEol : defaultRecordTtl
- this.cache.set(id, value, { ttl: ttl })
+ this.cache.set(id, value, ttl)
log(`IPNS value ${value} was cached correctly`)
@@ -77,9 +84,7 @@ class IPNS {
const result = this.cache.get(id)
if (result) {
- return callback(null, {
- path: result
- })
+ return callback(null, result)
}
}
@@ -91,18 +96,17 @@ class IPNS {
log(`IPNS record from ${name} was resolved correctly`)
- callback(null, {
- path: result
- })
+ callback(null, result)
})
}
// Initialize keyspace
// sets the ipns record for the given key to point to an empty directory
initializeKeyspace (privKey, value, callback) {
- this.publisher.publish(privKey, value, callback)
+ this.publish(privKey, value, IpnsPublisher.defaultRecordLifetime, callback)
}
}
-exports = module.exports = IPNS
-exports.path = path
+IPNS.path = path
+
+module.exports = IPNS
diff --git a/src/core/ipns/publisher.js b/src/core/ipns/publisher.js
index 8caa9c8dba..12171e3b42 100644
--- a/src/core/ipns/publisher.js
+++ b/src/core/ipns/publisher.js
@@ -11,7 +11,7 @@ log.error = debug('ipfs:ipns:publisher:error')
const ipns = require('ipns')
-const defaultRecordTtl = 60 * 60 * 1000
+const defaultRecordLifetime = 60 * 60 * 1000
// IpnsPublisher is capable of publishing and resolving names to the IPFS routing system.
class IpnsPublisher {
@@ -46,7 +46,7 @@ class IpnsPublisher {
// Accepts a keypair, as well as a value (ipfsPath), and publishes it out to the routing system
publish (privKey, value, callback) {
- this.publishWithEOL(privKey, value, defaultRecordTtl, callback)
+ this.publishWithEOL(privKey, value, defaultRecordLifetime, callback)
}
_putRecordToRouting (record, peerId, callback) {
@@ -79,7 +79,6 @@ class IpnsPublisher {
(cb) => this._publishPublicKey(keys.routingPubKey, publicKey, peerId, cb)
], (err) => {
if (err) {
- log.error(err)
return callback(err)
}
@@ -108,13 +107,10 @@ class IpnsPublisher {
// Add record to routing (buffer key)
this._routing.put(key.toBuffer(), entryData, (err, res) => {
if (err) {
- const errMsg = `ipns record for ${key.toString()} could not be stored in the routing`
-
- log.error(errMsg)
- return callback(errcode(new Error(errMsg), 'ERR_PUTTING_TO_ROUTING'))
+ return callback(errcode(new Error(`ipns record for /ipns/${peerId.toB58String()} could not be stored in the routing`), 'ERR_PUTTING_TO_ROUTING'))
}
- log(`ipns record for ${key.toString()} was stored in the routing`)
+ log(`ipns record for /ipns/${peerId.toB58String()} was stored in the routing`)
callback(null, res)
})
}
@@ -137,13 +133,10 @@ class IpnsPublisher {
// Add public key to routing (buffer key)
this._routing.put(key.toBuffer(), publicKey.bytes, (err, res) => {
if (err) {
- const errMsg = `public key for ${key.toString()} could not be stored in the routing`
-
- log.error(errMsg)
- return callback(errcode(new Error(errMsg), 'ERR_PUTTING_TO_ROUTING'))
+ return callback(errcode(new Error(`public key for /ipns/${peerId.toB58String()} could not be stored in the routing`), 'ERR_PUTTING_TO_ROUTING'))
}
- log(`public key for ${key.toString()} was stored in the routing`)
+ log(`public key for /ipns/${peerId.toB58String()} was stored in the routing`)
callback(null, res)
})
}
@@ -269,4 +262,5 @@ class IpnsPublisher {
}
}
+IpnsPublisher.defaultRecordLifetime = defaultRecordLifetime
exports = module.exports = IpnsPublisher
diff --git a/src/core/ipns/routing/config.js b/src/core/ipns/routing/config.js
index 7faa469258..664963bf03 100644
--- a/src/core/ipns/routing/config.js
+++ b/src/core/ipns/routing/config.js
@@ -1,32 +1,44 @@
'use strict'
-const { TieredDatastore } = require('datastore-core')
const get = require('dlv')
-
+const { TieredDatastore } = require('datastore-core')
const PubsubDatastore = require('./pubsub-datastore')
const OfflineDatastore = require('./offline-datastore')
+const DnsDatastore = require('./experimental/dns-datastore')
+const MDnsDatastore = require('./experimental/mdns-datastore')
+// const WorkersDatastore = require('./experimental/workers-datastore')
+const ExperimentalTieredDatastore = require('./experimental/tiered-datastore')
+/**
+ * @typedef { import("../../index") } IPFS
+ */
+/**
+ * IPNS routing config
+ *
+ * @param {IPFS} ipfs
+ * @returns {function}
+ */
module.exports = (ipfs) => {
// Setup online routing for IPNS with a tiered routing composed by a DHT and a Pubsub router (if properly enabled)
const ipnsStores = []
+ if (ipfs._options.EXPERIMENTAL.ipnsDNS) {
+ // something is wrong with the workers code disabled for now
+ // ipnsStores.push(new WorkersDatastore(ipfs._options.ipns))
+ ipnsStores.push(new DnsDatastore(ipfs._options.ipns))
+ ipnsStores.push(new MDnsDatastore(ipfs._options.ipns))
+ return new ExperimentalTieredDatastore(ipnsStores)
+ }
- // Add IPNS pubsub if enabled
- let pubsubDs
- if (get(ipfs._options, 'EXPERIMENTAL.ipnsPubsub', false)) {
- const pubsub = ipfs.libp2p.pubsub
- const localDatastore = ipfs._repo.datastore
- const peerId = ipfs._peerInfo.id
-
- pubsubDs = new PubsubDatastore(pubsub, localDatastore, peerId)
- ipnsStores.push(pubsubDs)
+ // // Add IPNS pubsub if enabled
+ if (ipfs._options.EXPERIMENTAL.ipnsPubsub) {
+ ipnsStores.push(new PubsubDatastore(ipfs.libp2p.pubsub, ipfs._repo.datastore, ipfs._peerInfo.id))
}
- // DHT should not be added as routing if we are offline or it is disabled
- if (get(ipfs._options, 'offline') || !get(ipfs._options, 'libp2p.dht.enabled', false)) {
- const offlineDatastore = new OfflineDatastore(ipfs._repo)
- ipnsStores.push(offlineDatastore)
- } else {
+ // Add DHT if we are online
+ if (get(ipfs._options, 'offline') || !get(ipfs._options, 'libp2p.config.dht.enabled', false)) {
ipnsStores.push(ipfs.libp2p.dht)
+ } else {
+ ipnsStores.push(new OfflineDatastore(ipfs._repo))
}
// Create ipns routing with a set of datastores
diff --git a/src/core/ipns/routing/experimental/dns-datastore.js b/src/core/ipns/routing/experimental/dns-datastore.js
new file mode 100644
index 0000000000..b633046643
--- /dev/null
+++ b/src/core/ipns/routing/experimental/dns-datastore.js
@@ -0,0 +1,76 @@
+/* eslint-disable no-console */
+'use strict'
+
+const ky = require('ky-universal').default
+const errcode = require('err-code')
+const debug = require('debug')
+const { dohBinary, keyToBase32 } = require('./utils')
+
+const log = debug('ipfs:ipns:dns-datastore')
+log.error = debug('ipfs:ipns:dns-datastore:error')
+
+class DNSDataStore {
+ constructor (options) {
+ this.options = options
+ }
+
+ /**
+ * Put a key value pair into the datastore
+ * @param {Buffer} key identifier of the value.
+ * @param {Buffer} value value to be stored.
+ * @returns {Promise}
+ */
+ async put (key, value) {
+ const start = Date.now()
+ if (key.toString().startsWith('/pk/')) {
+ return
+ }
+ if (!Buffer.isBuffer(key)) {
+ throw errcode(new Error('DNS datastore key must be a buffer'), 'ERR_INVALID_KEY')
+ }
+ if (!Buffer.isBuffer(value)) {
+ throw errcode(new Error(`DNS datastore value must be a buffer`), 'ERR_INVALID_VALUE')
+ }
+
+ const keyStr = keyToBase32(key)
+ const data = await ky
+ .put(
+ 'https://ipns.dev',
+ {
+ json: {
+ key: keyStr,
+ record: value.toString('base64'),
+ subdomain: true,
+ alias: this.options.alias
+ }
+ }
+ )
+ .json()
+
+ console.log(`
+ DNS Store
+ Domain: ipns.dev
+ Key: ${keyStr}
+ Subdomain: ${data.subdomain}
+ Alias: ${data.alias}
+ Time: ${(Date.now() - start)}ms
+ `)
+ }
+
+ /**
+ * Get a value from the local datastore indexed by the received key properly encoded.
+ * @param {Buffer} key identifier of the value to be obtained.
+ * @returns {Promise}
+ */
+ get (key) {
+ if (!Buffer.isBuffer(key)) {
+ throw errcode(new Error(`DNS datastore key must be a buffer`), 'ERR_INVALID_KEY')
+ }
+ // https://dns.google.com/experimental
+ // https://cloudflare-dns.com/dns-query
+ // https://mozilla.cloudflare-dns.com/dns-query
+ return dohBinary('https://cloudflare-dns.com/dns-query', 'dns.ipns.dev', key)
+ }
+}
+
+module.exports = DNSDataStore
diff --git a/src/core/ipns/routing/experimental/mdns-datastore.js b/src/core/ipns/routing/experimental/mdns-datastore.js
new file mode 100644
index 0000000000..80c78a0955
--- /dev/null
+++ b/src/core/ipns/routing/experimental/mdns-datastore.js
@@ -0,0 +1,67 @@
+/* eslint-disable no-console */
+'use strict'
+
+const ky = require('ky-universal').default
+const errcode = require('err-code')
+const debug = require('debug')
+const { dohBinary, keyToBase32 } = require('./utils')
+
+const log = debug('ipfs:ipns:mdns-datastore')
+log.error = debug('ipfs:ipns:mdns-datastore:error')
+
+// DNS datastore aims to mimic the same encoding as routing when storing records
+// to the local datastore
+class MDNSDataStore {
+ constructor (options) {
+ this.options = options
+ }
+ /**
+ * Put a key value pair into the datastore
+ * @param {Buffer} key identifier of the value.
+ * @param {Buffer} value value to be stored.
+ * @returns {Promise}
+ */
+ async put (key, value) {
+ const start = Date.now()
+ if (key.toString().startsWith('/pk/')) {
+ return
+ }
+ if (!Buffer.isBuffer(key)) {
+ throw errcode(new Error('MDNS datastore key must be a buffer'), 'ERR_INVALID_KEY')
+ }
+ if (!Buffer.isBuffer(value)) {
+ throw errcode(new Error(`MDNS datastore value must be a buffer`), 'ERR_INVALID_VALUE')
+ }
+
+ const keyStr = keyToBase32(key)
+ await ky.put(
+ 'http://ipns.local:8000',
+ {
+ json: {
+ key: keyStr,
+ record: value.toString('base64')
+ }
+ })
+ console.log(`
+ Local Store
+ Domain: ipns.local
+ Key: ${keyStr}
+ Time: ${(Date.now() - start)}ms
+ `)
+ }
+
+ /**
+ * Get a value from the local datastore indexed by the received key properly encoded.
+ * @param {Buffer} key identifier of the value to be obtained.
+ * @returns {Promise}
+ */
+ get (key) {
+ if (!Buffer.isBuffer(key)) {
+ throw errcode(new Error(`MDNS datastore key must be a buffer`), 'ERR_INVALID_KEY')
+ }
+
+ return dohBinary('http://ipns.local:8000/dns-query', 'ipns.local', key)
+ }
+}
+
+module.exports = MDNSDataStore
diff --git a/src/core/ipns/routing/experimental/tiered-datastore.js b/src/core/ipns/routing/experimental/tiered-datastore.js
new file mode 100644
index 0000000000..aa571ba1be
--- /dev/null
+++ b/src/core/ipns/routing/experimental/tiered-datastore.js
@@ -0,0 +1,45 @@
+'use strict'
+
+const pany = require('p-any')
+const pSettle = require('p-settle')
+const debug = require('debug')
+const Errors = require('interface-datastore').Errors
+
+const log = debug('ipfs:ipns:tiered-datastore')
+log.error = debug('ipfs:ipns:tiered-datastore:error')
+
+class TieredDatastore {
+ constructor (stores) {
+ this.stores = stores.slice()
+ }
+
+ put (key, value, callback) {
+ pSettle(this.stores.map(s => s.put(key, value)))
+ .then(results => {
+ let fulfilled = false
+ results.forEach(r => {
+ if (r.isFulfilled) {
+ fulfilled = true
+ } else {
+ log.error(r.reason)
+ }
+ })
+
+ if (fulfilled) {
+ return setImmediate(() => callback())
+ }
+ setImmediate(() => callback(Errors.dbWriteFailedError()))
+ })
+ }
+
+ get (key, callback) {
+ pany(this.stores.map(s => s.get(key)))
+ .then(r => setImmediate(() => callback(null, r)))
+ .catch(err => {
+ log.error(err)
+ setImmediate(() => callback(Errors.notFoundError()))
+ })
+ }
+}
+
+module.exports = TieredDatastore
diff --git a/src/core/ipns/routing/experimental/utils.js b/src/core/ipns/routing/experimental/utils.js
new file mode 100644
index 0000000000..7c599f8292
--- /dev/null
+++ b/src/core/ipns/routing/experimental/utils.js
@@ -0,0 +1,61 @@
+/* eslint-disable no-console */
+'use strict'
+
+const ipns = require('ipns')
+const { Record } = require('libp2p-record')
+const dnsPacket = require('dns-packet')
+const Cid = require('cids')
+const errcode = require('err-code')
+const debug = require('debug')
+const ky = require('ky-universal').default
+
+const log = debug('ipfs:ipns:doh')
+log.error = debug('ipfs:ipns:doh:error')
+
+async function dohBinary (url, domain, key) {
+ const start = Date.now()
+ const keyStr = keyToBase32(key)
+ const buf = dnsPacket.encode({
+ type: 'query',
+ questions: [{
+ type: 'TXT',
+ name: `${keyStr}.${domain}`
+ }]
+ })
+
+ const result = await ky
+ .get(url, {
+ searchParams: {
+ dns: buf.toString('base64')
+ },
+ headers: {
+ accept: 'application/dns-message'
+ }
+ })
+ .arrayBuffer()
+
+ const data = dnsPacket.decode(Buffer.from(result))
+ if (!data || data.answers.length < 1) {
+ throw errcode(new Error('Record not found'), 'ERR_NOT_FOUND')
+ }
+ const record = new Record(key, Buffer.from(Buffer.concat(data.answers[0].data).toString(), 'base64'))
+ console.log(`Resolved ${keyStr}.${domain} in ${(Date.now() - start)}ms`)
+
+ return record.value
+}
+
+/**
+ * Libp2p Key to base32 encoded string
+ *
+ * @param {Buffer} key
+ * @returns {string}
+ */
+function keyToBase32 (key) {
+ const cid = new Cid(key.slice(ipns.namespaceLength))
+ return cid.toV1().toString()
+}
+
+module.exports = {
+ dohBinary,
+ keyToBase32
+}
diff --git a/src/core/ipns/routing/experimental/workers-datastore.js b/src/core/ipns/routing/experimental/workers-datastore.js
new file mode 100644
index 0000000000..b7bc382942
--- /dev/null
+++ b/src/core/ipns/routing/experimental/workers-datastore.js
@@ -0,0 +1,81 @@
+/* eslint-disable no-console */
+'use strict'
+
+const ky = require('ky-universal').default
+const errcode = require('err-code')
+const debug = require('debug')
+const { Record } = require('libp2p-record')
+const { keyToBase32 } = require('./utils')
+
+const log = debug('ipfs:ipns:workers-datastore')
+log.error = debug('ipfs:ipns:workers-datastore:error')
+
+// Workers datastore aims to mimic the same encoding as routing when storing records
+// to the local datastore
+class WorkersDataStore {
+ /**
+ * Put a key value pair into the datastore
+ * @param {Buffer} key identifier of the value.
+ * @param {Buffer} value value to be stored.
+ * @returns {Promise}
+ */
+ async put (key, value) {
+ const start = Date.now()
+ if (key.toString().startsWith('/pk/')) {
+ return
+ }
+ if (!Buffer.isBuffer(key)) {
+ throw errcode(new Error('Workers datastore key must be a buffer'), 'ERR_INVALID_KEY')
+ }
+ if (!Buffer.isBuffer(value)) {
+ throw errcode(new Error(`Workers datastore value must be a buffer`), 'ERR_INVALID_VALUE')
+ }
+
+ const keyStr = keyToBase32(key)
+ await ky.put(
+ 'https://workers.ipns.dev',
+ {
+ json: {
+ key: keyStr,
+ value: value.toString('base64')
+ }
+ })
+
+ console.log(`
+ Workers Store
+ Domain: workers.ipns.dev
+ Key: ${keyStr}
+ Time: ${(Date.now() - start)}ms
+ `)
+ }
+
+ /**
+ * Get a value from the local datastore indexed by the received key properly encoded.
+ * @param {Buffer} key identifier of the value to be obtained.
+ * @returns {Promise}
+ */
+ async get (key) {
+ const start = Date.now()
+
+ if (!Buffer.isBuffer(key)) {
+ throw errcode(new Error(`Workers datastore key must be a buffer`), 'ERR_INVALID_KEY')
+ }
+
+ const keyStr = keyToBase32(key)
+
+ const data = await ky
+ .get('https://workers.ipns.dev', {
+ searchParams: {
+ key: keyStr
+ }
+ })
+ .text()
+
+ const record = new Record(key, Buffer.from(data, 'base64'))
+ console.log(`Resolved ${keyStr} with workers in: ${(Date.now() - start)}ms`)
+
+ return record.value
+ }
+}
+
+module.exports = WorkersDataStore
diff --git a/src/http/api/resources/name.js b/src/http/api/resources/name.js
index 7c8206444c..02b1eb7326 100644
--- a/src/http/api/resources/name.js
+++ b/src/http/api/resources/name.js
@@ -7,7 +7,7 @@ exports.resolve = {
query: Joi.object().keys({
arg: Joi.string(),
nocache: Joi.boolean().default(false),
- recursive: Joi.boolean().default(false)
+ recursive: Joi.boolean().default(true)
}).unknown()
},
async handler (request, h) {
@@ -17,7 +17,7 @@ exports.resolve = {
const res = await ipfs.name.resolve(arg, request.query)
return h.response({
- Path: res.path
+ Path: res
})
}
}
diff --git a/src/utils/tlru.js b/src/utils/tlru.js
new file mode 100644
index 0000000000..ba3b26e8c6
--- /dev/null
+++ b/src/utils/tlru.js
@@ -0,0 +1,87 @@
+'use strict'
+const hashlru = require('hashlru')
+
+/**
+ * Time Aware Least Recent Used Cache
+ * @see https://arxiv.org/pdf/1801.00390
+ * @todo move this to ipfs-utils or it's own package
+ *
+ * @class TLRU
+ */
+class TLRU {
+ /**
+ * Creates an instance of TLRU.
+ *
+ * @param {number} maxSize
+ * @memberof TLRU
+ */
+ constructor (maxSize) {
+ this.lru = hashlru(maxSize)
+ }
+
+ /**
+ * Get the value from the a key
+ *
+ * @param {string} key
+ * @returns {any}
+ * @memberof TLRU
+ */
+ get (key) {
+ const value = this.lru.get(key)
+ if (value) {
+ if ((value.expire) && (value.expire < Date.now())) {
+ this.lru.remove(key)
+ return undefined
+ }
+ }
+ return value.value
+ }
+
+ /**
+ * Set a key value pair
+ *
+ * @param {string} key
+ * @param {any} value
+ * @param {number} ttl - in miliseconds
+ * @memberof TLRU
+ */
+ set (key, value, ttl) {
+ this.lru.set(key, { value, expire: Date.now() + ttl })
+ }
+
+ /**
+ * Find if the cache has the key
+ *
+ * @param {string} key
+ * @returns {boolean}
+ * @memberof TLRU
+ */
+ has (key) {
+ const value = this.get(key)
+ if (value) {
+ return true
+ }
+ return false
+ }
+
+ /**
+ * Remove key
+ *
+ * @param {string} key
+ * @memberof TLRU
+ */
+ remove (key) {
+ this.lru.remove(key)
+ }
+
+ /**
+ * Clears the cache
+ *
+ * @memberof TLRU
+ */
+ clear () {
+ this.lru.clear()
+ }
+}
+
+module.exports = TLRU
diff --git a/test/cli/name-pubsub.js b/test/cli/name-pubsub.js
index fef64295de..cfeb5cf140 100644
--- a/test/cli/name-pubsub.js
+++ b/test/cli/name-pubsub.js
@@ -14,9 +14,6 @@ const ipfsExec = require('../utils/ipfs-exec')
const DaemonFactory = require('ipfsd-ctl')
const df = DaemonFactory.create({ type: 'js' })
-const checkAll = (bits) => string => bits.every(bit => string.includes(bit))
-const emptyDirCid = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'
-
const spawnDaemon = (callback) => {
df.spawn({
exec: path.resolve(`${__dirname}/../../src/cli/bin.js`),
@@ -165,53 +162,6 @@ describe('name-pubsub', () => {
})
})
})
-
- describe('pubsub records', () => {
- let cidAdded
-
- before(function (done) {
- this.timeout(50 * 1000)
- ipfsA(`add ${path.resolve(`${__dirname}/../../src/init-files/init-docs/readme`)}`)
- .then((out) => {
- cidAdded = out.split(' ')[1]
- done()
- })
- })
-
- it('should publish the received record to the subscriber', function () {
- this.timeout(80 * 1000)
-
- return ipfsB(`name resolve ${nodeBId.id}`)
- .then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([emptyDirCid])) // Empty dir received (subscribed)
-
- return ipfsA(`name resolve ${nodeBId.id}`)
- })
- .catch((err) => {
- expect(err).to.exist() // Not available (subscribed now)
-
- return ipfsB(`name publish ${cidAdded}`)
- })
- .then((res) => {
- // published to IpfsB and published through pubsub to ipfsa
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([cidAdded, nodeBId.id]))
-
- return ipfsB(`name resolve ${nodeBId.id}`)
- })
- .then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([cidAdded]))
-
- return ipfsA(`name resolve ${nodeBId.id}`)
- })
- .then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([cidAdded])) // value propagated to node B
- })
- })
- })
})
describe('disabled', () => {
diff --git a/test/cli/name.js b/test/cli/name.js
index 1c0638503f..984d777da8 100644
--- a/test/cli/name.js
+++ b/test/cli/name.js
@@ -1,305 +1,59 @@
-/* eslint max-nested-callbacks: ["error", 6] */
/* eslint-env mocha */
'use strict'
-const chai = require('chai')
-const dirtyChai = require('dirty-chai')
-const expect = chai.expect
-chai.use(dirtyChai)
-const path = require('path')
-const hat = require('hat')
-const ipfsExec = require('../utils/ipfs-exec')
-
-const DaemonFactory = require('ipfsd-ctl')
-const df = DaemonFactory.create({ type: 'js' })
-
-const checkAll = (bits) => string => bits.every(bit => string.includes(bit))
+const sinon = require('sinon')
+const YargsPromise = require('yargs-promise')
+const clearModule = require('clear-module')
describe('name', () => {
- describe('working locally', () => {
- const passPhrase = hat()
- const pass = '--pass ' + passPhrase
- const name = 'test-key-' + hat()
-
- let ipfs
- let ipfsd
-
- let cidAdded
- let nodeId
- let keyId
-
- before(function (done) {
- this.timeout(80 * 1000)
-
- df.spawn({
- exec: path.resolve(`${__dirname}/../../src/cli/bin.js`),
- config: {
- Bootstrap: []
- },
- args: ['--pass', passPhrase, '--offline'],
- initOptions: { bits: 512 }
- }, (err, _ipfsd) => {
- expect(err).to.not.exist()
-
- ipfsd = _ipfsd
- ipfs = ipfsExec(_ipfsd.repoPath)
-
- ipfs(`${pass} key gen ${name} --type rsa --size 2048`)
- .then((out) => {
- expect(out).to.include(name)
- keyId = out.split(' ')[1]
-
- return ipfs('id')
- })
- .then((res) => {
- const id = JSON.parse(res)
- expect(id).to.have.property('id')
- nodeId = id.id
-
- return ipfs(`add ${path.resolve(`${__dirname}/../../src/init-files/init-docs/readme`)}`)
- })
- .then((out) => {
- cidAdded = out.split(' ')[1]
- done()
- })
- })
- })
-
- after(function (done) {
- if (ipfsd) {
- ipfsd.stop(() => done())
- } else {
- done()
- }
- })
-
- it('should publish correctly when the file was already added', function () {
- this.timeout(70 * 1000)
-
- return ipfs(`name publish ${cidAdded}`).then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([cidAdded, nodeId]))
- })
- })
-
- it('should publish and resolve an entry with the default options', function () {
- this.timeout(70 * 1000)
-
- return ipfs(`name publish ${cidAdded}`)
- .then((res) => {
- expect(res).to.exist()
-
- return ipfs('name resolve')
- })
- .then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([cidAdded]))
- })
- })
-
- it('should publish correctly when the file was not added but resolve is disabled', function () {
- this.timeout(70 * 1000)
-
- const notAddedCid = 'QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU'
-
- return ipfs(`name publish ${notAddedCid} --resolve false`).then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([notAddedCid, nodeId]))
- })
- })
-
- it('should not get the entry correctly if its validity time expired', function () {
- this.timeout(70 * 1000)
-
- return ipfs(`name publish ${cidAdded} --lifetime 10ns`)
- .then((res) => {
- expect(res).to.exist()
-
- setTimeout(function () {
- return ipfs('name resolve')
- .then((res) => {
- expect(res).to.not.exist()
- })
- .catch((err) => {
- expect(err).to.exist()
- })
- }, 1)
- })
- })
-
- it('should publish correctly when a new key is used', function () {
- this.timeout(70 * 1000)
-
- return ipfs(`name publish ${cidAdded} --key ${name}`).then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([cidAdded, keyId]))
- })
- })
-
- it('should return the immediate pointing record, unless using the recursive parameter', function () {
- this.timeout(90 * 1000)
-
- return ipfs(`name publish ${cidAdded}`)
- .then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([cidAdded, nodeId]))
-
- return ipfs(`name publish /ipns/${nodeId} --key ${name}`)
- })
- .then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([nodeId, keyId]))
-
- return ipfs(`name resolve ${keyId}`)
- })
- .then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([nodeId]))
- })
- })
-
- it('should go recursively until finding an ipfs hash', function () {
- this.timeout(90 * 1000)
-
- return ipfs(`name publish ${cidAdded}`)
- .then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([cidAdded, nodeId]))
-
- return ipfs(`name publish /ipns/${nodeId} --key ${name}`)
- })
- .then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([nodeId, keyId]))
-
- return ipfs(`name resolve ${keyId} --recursive`)
- })
- .then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([cidAdded]))
- })
- })
+ let cli
+ let cliUtils
+ beforeEach(() => {
+ cliUtils = require('../../src/cli/utils')
+ cli = new YargsPromise(require('../../src/cli/parser'))
+ })
+ afterEach(() => {
+ sinon.restore()
+ // TODO: the lines below shouldn't be necessary, cli needs refactor to simplify testability
+ // Force the next require to not use require cache
+ clearModule('../../src/cli/utils')
+ clearModule('../../src/cli/parser')
})
- describe('using dht', () => {
- const passPhrase = hat()
- const pass = '--pass ' + passPhrase
- const name = 'test-key-' + hat()
-
- let ipfs
- let ipfsd
-
- let cidAdded
- let nodeId
- let keyId
-
- before(function (done) {
- this.timeout(80 * 1000)
-
- df.spawn({
- exec: path.resolve(`${__dirname}/../../src/cli/bin.js`),
- config: {
- Bootstrap: [],
- Discovery: {
- MDNS: {
- Enabled: false
- },
- webRTCStar: {
- Enabled: false
- }
- }
- },
- args: ['--pass', passPhrase],
- initOptions: { bits: 512 }
- }, (err, _ipfsd) => {
- expect(err).to.not.exist()
-
- ipfsd = _ipfsd
- ipfs = ipfsExec(_ipfsd.repoPath)
-
- ipfs(`${pass} key gen ${name} --type rsa --size 2048`)
- .then((out) => {
- expect(out).to.include(name)
- keyId = out.split(' ')[1]
-
- return ipfs('id')
- })
- .then((res) => {
- const id = JSON.parse(res)
- expect(id).to.have.property('id')
- nodeId = id.id
-
- return ipfs(`add ${path.resolve(`${__dirname}/../../src/init-files/init-docs/readme`)}`)
- })
- .then((out) => {
- cidAdded = out.split(' ')[1]
- done()
- })
- })
- })
-
- after(function (done) {
- if (ipfsd) {
- ipfsd.stop(() => done())
- } else {
- done()
- }
- })
-
- it('should publish and resolve an entry with the default options', function () {
- this.timeout(70 * 1000)
-
- return ipfs(`name publish ${cidAdded}`)
- .then((res) => {
- expect(res).to.exist()
-
- return ipfs('name resolve')
- })
- .then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([cidAdded]))
- })
- })
-
- it('should not get the entry correctly if its validity time expired', function () {
- this.timeout(70 * 1000)
-
- return ipfs(`name publish ${cidAdded} --lifetime 10ns`)
- .then((res) => {
- expect(res).to.exist()
-
- setTimeout(function () {
- return ipfs('name resolve')
- .then((res) => {
- expect(res).to.not.exist()
- })
- .catch((err) => {
- expect(err).to.exist()
- })
- }, 1)
- })
- })
+ it('resolve', async () => {
+ const resolveFake = sinon.fake()
- it('should return the immediate pointing record, unless using the recursive parameter', function () {
- this.timeout(90 * 1000)
+ sinon
+ .stub(cliUtils, 'getIPFS')
+ .callsArgWith(1, null, { name: { resolve: resolveFake } })
- return ipfs(`name publish ${cidAdded}`)
- .then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([cidAdded, nodeId]))
+ // TODO: the lines below shouldn't be necessary, cli needs refactor to simplify testability
+ // Force the next require to not use require cache
+ clearModule('../../src/cli/commands/name/resolve.js')
- return ipfs(`name publish /ipns/${nodeId} --key ${name}`)
- })
- .then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([nodeId, keyId]))
+ await cli.parse(`name resolve test`)
+ sinon.assert.calledWith(resolveFake, 'test', { nocache: false, recursive: true })
+ })
- return ipfs(`name resolve ${keyId}`)
- })
- .then((res) => {
- expect(res).to.exist()
- expect(res).to.satisfy(checkAll([nodeId]))
- })
+ it('publish', async () => {
+ const publishFake = sinon.fake.returns({ name: 'name', value: 'value' })
+ const printSpy = sinon.spy(cliUtils, 'print')
+
+ sinon
+ .stub(cliUtils, 'getIPFS')
+ .callsArgWith(1, null, { name: { publish: publishFake } })
+
+ // TODO: the lines below shouldn't be necessary, cli needs refactor to simplify testability
+ // Force the next require to not use require cache
+ clearModule('../../src/cli/commands/name/publish.js')
+
+ await cli.parse(`name publish test --silent`)
+ sinon.assert.calledWith(printSpy, 'Published to name: value')
+ sinon.assert.calledWith(publishFake, 'test', {
+ resolve: true,
+ lifetime: '24h',
+ key: 'self',
+ ttl: ''
})
})
})
diff --git a/test/core/files-regular-utils.js b/test/core/files-regular-utils.js
index 3b86020d3f..380ff3f3e8 100644
--- a/test/core/files-regular-utils.js
+++ b/test/core/files-regular-utils.js
@@ -12,47 +12,38 @@ describe('files-regular/utils', () => {
describe('parseChunkerString', () => {
it('handles an empty string', () => {
const options = utils.parseChunkerString('')
- expect(options).to.have.property('chunker').to.equal('fixed')
+ expect(options.chunker).to.equal('fixed')
})
it('handles a null chunker string', () => {
const options = utils.parseChunkerString(null)
- expect(options).to.have.property('chunker').to.equal('fixed')
+ expect(options.chunker).to.equal('fixed')
})
it('parses a fixed size string', () => {
const options = utils.parseChunkerString('size-512')
- expect(options).to.have.property('chunker').to.equal('fixed')
- expect(options)
- .to.have.property('chunkerOptions')
- .to.have.property('maxChunkSize')
- .to.equal(512)
+ expect(options.chunker).to.equal('fixed')
+ expect(options.chunkerOptions.maxChunkSize).to.equal(512)
})
it('parses a rabin string without size', () => {
const options = utils.parseChunkerString('rabin')
- expect(options).to.have.property('chunker').to.equal('rabin')
- expect(options)
- .to.have.property('chunkerOptions')
- .to.have.property('avgChunkSize')
+ expect(options.chunker).to.equal('rabin')
+ expect(options.chunkerOptions.avgChunkSize).to.equal(262144)
})
it('parses a rabin string with only avg size', () => {
const options = utils.parseChunkerString('rabin-512')
- expect(options).to.have.property('chunker').to.equal('rabin')
- expect(options)
- .to.have.property('chunkerOptions')
- .to.have.property('avgChunkSize')
- .to.equal(512)
+ expect(options.chunker).to.equal('rabin')
+ expect(options.chunkerOptions.avgChunkSize).to.equal(512)
})
it('parses a rabin string with min, avg, and max', () => {
const options = utils.parseChunkerString('rabin-42-92-184')
- expect(options).to.have.property('chunker').to.equal('rabin')
- expect(options).to.have.property('chunkerOptions')
- expect(options.chunkerOptions).to.have.property('minChunkSize').to.equal(42)
- expect(options.chunkerOptions).to.have.property('avgChunkSize').to.equal(92)
- expect(options.chunkerOptions).to.have.property('maxChunkSize').to.equal(184)
+ expect(options.chunker).to.equal('rabin')
+ expect(options.chunkerOptions.minChunkSize).to.equal(42)
+ expect(options.chunkerOptions.avgChunkSize).to.equal(92)
+ expect(options.chunkerOptions.maxChunkSize).to.equal(184)
})
it('throws an error for unsupported chunker type', () => {
diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js
index df572c19e2..a091dd0ea4 100644
--- a/test/core/interface.spec.js
+++ b/test/core/interface.spec.js
@@ -4,26 +4,10 @@
const tests = require('interface-ipfs-core')
const CommonFactory = require('../utils/interface-common-factory')
const isNode = require('detect-node')
-const dnsFetchStub = require('../utils/dns-fetch-stub')
describe('interface-ipfs-core tests', function () {
this.timeout(20 * 1000)
- // ipfs.dns in the browser calls out to https://ipfs.io/api/v0/dns.
- // The following code stubs self.fetch to return a static CID for calls
- // to https://ipfs.io/api/v0/dns?arg=ipfs.io.
- if (!isNode) {
- const fetch = self.fetch
-
- before(() => {
- self.fetch = dnsFetchStub(fetch)
- })
-
- after(() => {
- self.fetch = fetch
- })
- }
-
const defaultCommonFactory = CommonFactory.create()
tests.bitswap(defaultCommonFactory, { skip: !isNode })
@@ -107,29 +91,13 @@ describe('interface-ipfs-core tests', function () {
{
name: 'should resolve IPNS link recursively',
reason: 'TODO: IPNS resolve not yet implemented https://github.com/ipfs/js-ipfs/issues/1918'
- },
- {
- name: 'should recursively resolve ipfs.io',
- reason: 'TODO: ipfs.io dnslink=/ipns/website.ipfs.io & IPNS resolve not yet implemented https://github.com/ipfs/js-ipfs/issues/1918'
}
]
})
tests.name(CommonFactory.create({
spawnOptions: {
- args: ['--pass ipfs-is-awesome-software', '--offline'],
- initOptions: { bits: 512 },
- config: {
- Bootstrap: [],
- Discovery: {
- MDNS: {
- Enabled: false
- },
- webRTCStar: {
- Enabled: false
- }
- }
- }
+ args: ['--pass ipfs-is-awesome-software', '--offline']
}
}))
diff --git a/test/core/kad-dht.node.js b/test/core/kad-dht.node.js
index d07be33f20..d06392abbc 100644
--- a/test/core/kad-dht.node.js
+++ b/test/core/kad-dht.node.js
@@ -32,7 +32,7 @@ function createNode (callback) {
}, callback)
}
-describe('kad-dht is routing content and peers correctly', () => {
+describe.skip('kad-dht is routing content and peers correctly', () => {
let nodeA
let nodeB
let nodeC
diff --git a/test/core/name-pubsub.js b/test/core/name-pubsub.js
index 575fb54fad..884d0a55b7 100644
--- a/test/core/name-pubsub.js
+++ b/test/core/name-pubsub.js
@@ -18,6 +18,7 @@ const isNode = require('detect-node')
const ipns = require('ipns')
const IPFS = require('../../src')
const waitFor = require('../utils/wait-for')
+const delay = require('interface-ipfs-core/src/utils/delay')
const DaemonFactory = require('ipfsd-ctl')
const df = DaemonFactory.create({ type: 'proc' })
@@ -34,6 +35,7 @@ describe('name-pubsub', function () {
let nodeA
let nodeB
let idA
+ let idB
const createNode = (callback) => {
df.spawn({
@@ -73,6 +75,7 @@ describe('name-pubsub', function () {
expect(err).to.not.exist()
idA = ids[0]
+ idB = ids[1]
nodeA.swarm.connect(ids[1].addresses[0], done)
})
})
@@ -130,10 +133,36 @@ describe('name-pubsub', function () {
expect(err).to.not.exist()
expect(res).to.exist()
- expect(res[5]).to.exist()
- expect(res[5].path).to.equal(ipfsRef)
+ expect(res[5]).to.equal(ipfsRef)
done()
})
})
})
+
+ it('should self resolve, publish and then resolve correctly', async function () {
+ this.timeout(6000)
+ const emptyDirCid = '/ipfs/QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'
+ const [{ path }] = await nodeA.add(Buffer.from('pubsub records'))
+
+ const resolvesEmpty = await nodeB.name.resolve(idB.id)
+ expect(resolvesEmpty).to.be.eq(emptyDirCid)
+
+ try {
+ await nodeA.name.resolve(idB.id)
+ } catch (error) {
+ expect(error).to.exist()
+ }
+
+ const publish = await nodeB.name.publish(path)
+ expect(publish).to.be.eql({
+ name: idB.id,
+ value: `/ipfs/${path}`
+ })
+
+ const resolveB = await nodeB.name.resolve(idB.id)
+ expect(resolveB).to.be.eq(`/ipfs/${path}`)
+ await delay(5000)
+ const resolveA = await nodeA.name.resolve(idB.id)
+ expect(resolveA).to.be.eq(`/ipfs/${path}`)
+ })
})
diff --git a/test/core/name.js b/test/core/name.spec.js
similarity index 80%
rename from test/core/name.js
rename to test/core/name.spec.js
index 99b8257251..1191eba711 100644
--- a/test/core/name.js
+++ b/test/core/name.spec.js
@@ -9,11 +9,9 @@ const expect = chai.expect
chai.use(dirtyChai)
const sinon = require('sinon')
-const fs = require('fs')
const parallel = require('async/parallel')
const series = require('async/series')
-const isNode = require('detect-node')
const IPFS = require('../../src')
const ipnsPath = require('../../src/core/ipns/path')
const ipnsRouting = require('../../src/core/ipns/routing/config')
@@ -34,120 +32,13 @@ const publishAndResolve = (publisher, resolver, ipfsRef, publishOpts, nodeId, re
expect(err).to.not.exist()
expect(res[0]).to.exist()
expect(res[1]).to.exist()
- expect(res[1].path).to.equal(ipfsRef)
+ expect(res[1]).to.equal(ipfsRef)
callback()
})
}
describe('name', function () {
- if (!isNode) {
- return
- }
-
- describe('working locally', function () {
- let node
- let nodeId
- let ipfsd
-
- before(function (done) {
- this.timeout(50 * 1000)
- df.spawn({
- exec: IPFS,
- args: [`--pass ${hat()}`, '--offline'],
- config: { Bootstrap: [] }
- }, (err, _ipfsd) => {
- expect(err).to.not.exist()
- ipfsd = _ipfsd
- node = _ipfsd.api
-
- node.id().then((res) => {
- expect(res.id).to.exist()
-
- nodeId = res.id
- done()
- })
- })
- })
-
- after((done) => ipfsd.stop(done))
-
- it('should publish and then resolve correctly with the default options', function (done) {
- this.timeout(50 * 1000)
-
- publishAndResolve(node, node, ipfsRef, { resolve: false }, nodeId, {}, done)
- })
-
- it('should publish correctly with the lifetime option and resolve', function (done) {
- this.timeout(50 * 1000)
-
- const publishOpts = {
- resolve: false,
- lifetime: '2h'
- }
-
- publishAndResolve(node, node, ipfsRef, publishOpts, nodeId, {}, done)
- })
-
- it('should not get the entry correctly if its validity time expired', function (done) {
- this.timeout(50 * 1000)
-
- node.name.publish(ipfsRef, { resolve: false, lifetime: '1ms' }, (err, res) => {
- expect(err).to.not.exist()
- expect(res).to.exist()
-
- setTimeout(function () {
- node.name.resolve(nodeId, (err) => {
- expect(err).to.exist()
- done()
- })
- }, 2)
- })
- })
-
- it('should recursively resolve to an IPFS hash', function (done) {
- this.timeout(90 * 1000)
- const keyName = hat()
-
- node.key.gen(keyName, { type: 'rsa', size: 2048 }, function (err, key) {
- expect(err).to.not.exist()
- series([
- (cb) => node.name.publish(ipfsRef, { resolve: false }, cb),
- (cb) => node.name.publish(`/ipns/${nodeId}`, { resolve: false, key: keyName }, cb),
- (cb) => node.name.resolve(key.id, { recursive: true }, cb)
- ], (err, res) => {
- expect(err).to.not.exist()
- expect(res[2]).to.exist()
- expect(res[2].path).to.equal(ipfsRef)
- done()
- })
- })
- })
-
- it('should not recursively resolve to an IPFS hash if the option recursive is not provided', function (done) {
- this.timeout(90 * 1000)
- const keyName = hat()
-
- node.key.gen(keyName, { type: 'rsa', size: 2048 }, function (err, key) {
- expect(err).to.not.exist()
- series([
- (cb) => node.name.publish(ipfsRef, { resolve: false }, cb),
- (cb) => node.name.publish(`/ipns/${nodeId}`, { resolve: false, key: keyName }, cb),
- (cb) => node.name.resolve(key.id, cb)
- ], (err, res) => {
- expect(err).to.not.exist()
- expect(res[2]).to.exist()
- expect(res[2].path).to.equal(`/ipns/${nodeId}`)
- done()
- })
- })
- })
- })
-
describe('republisher', function () {
- if (!isNode) {
- return
- }
-
let node
let ipfsd
@@ -277,7 +168,7 @@ describe('name', function () {
], (err, res) => {
expect(err).to.not.exist()
expect(res[2]).to.exist()
- expect(res[2].path).to.equal(ipfsRef)
+ expect(res[2]).to.equal(ipfsRef)
done()
})
})
@@ -285,10 +176,6 @@ describe('name', function () {
})
describe('errors', function () {
- if (!isNode) {
- return
- }
-
let node
let nodeId
let ipfsd
@@ -460,20 +347,15 @@ describe('name', function () {
})
describe('ipns.path', function () {
- const path = 'test/fixtures/planets/solar-system.md'
const fixture = {
- path,
- content: fs.readFileSync(path)
+ path: 'test/fixtures/planets/solar-system.md',
+ content: Buffer.from('ipns.path')
}
let node
let ipfsd
let nodeId
- if (!isNode) {
- return
- }
-
before(function (done) {
this.timeout(40 * 1000)
df.spawn({
diff --git a/test/core/node.js b/test/core/node.js
index b9297cce42..35f185f1d3 100644
--- a/test/core/node.js
+++ b/test/core/node.js
@@ -2,7 +2,6 @@
require('./circuit-relay')
require('./files-regular-utils')
-require('./name')
require('./name-pubsub')
require('./key-exchange')
require('./pin')
diff --git a/test/http-api/interface.js b/test/http-api/interface.js
index 43c6ff6a12..d1607dfb71 100644
--- a/test/http-api/interface.js
+++ b/test/http-api/interface.js
@@ -96,6 +96,30 @@ describe('interface-ipfs-core over ipfs-http-client tests', () => {
]
})
+ tests.name(CommonFactory.create({
+ spawnOptions: {
+ args: ['--pass ipfs-is-awesome-software', '--offline']
+ }
+ }))
+
+ tests.namePubsub(CommonFactory.create({
+ spawnOptions: {
+ args: ['--enable-namesys-pubsub'],
+ initOptions: { bits: 1024 },
+ config: {
+ Bootstrap: [],
+ Discovery: {
+ MDNS: {
+ Enabled: false
+ },
+ webRTCStar: {
+ Enabled: false
+ }
+ }
+ }
+ }
+ }))
+
tests.object(defaultCommonFactory)
tests.pin(defaultCommonFactory)
diff --git a/test/utils/dns-fetch-stub.js b/test/utils/dns-fetch-stub.js
deleted file mode 100644
index a1e24a122c..0000000000
--- a/test/utils/dns-fetch-stub.js
+++ /dev/null
@@ -1,16 +0,0 @@
-'use strict'
-
-// Create a fetch stub with a fall through to the provided fetch implementation
-// if the URL doesn't match https://ipfs.io/api/v0/dns?arg=ipfs.io.
-module.exports = (fetch) => {
- return function () {
- if (arguments[0].startsWith('https://ipfs.io/api/v0/dns?arg=ipfs.io')) {
- return Promise.resolve({
- json: () => Promise.resolve({
- Path: '/ipfs/QmYNQJoKGNHTpPxCBPh9KkDpaExgd2duMa3aF6ytMpHdao'
- })
- })
- }
- return fetch.apply(this, arguments)
- }
-}