diff --git a/.aegir.js b/.aegir.js index 47a415a..e5e14f7 100644 --- a/.aegir.js +++ b/.aegir.js @@ -4,17 +4,7 @@ const path = require('path') const esbuild = { // this will inject all the named exports from 'node-globals.js' as globals - inject: [path.join(__dirname, 'scripts/node-globals.js')], - plugins: [ - { - name: 'node built ins', // this will make the bundler resolve node builtins to the respective browser polyfill - setup (build) { - build.onResolve({ filter: /^stream$/ }, () => { - return { path: require.resolve('readable-stream') } - }) - } - } - ] + inject: [path.join(__dirname, 'scripts/node-globals.js')] } /** @type {import('aegir').PartialOptions} */ @@ -27,6 +17,6 @@ module.exports = { } }, build: { - config: esbuild + bundlesizeMax: '37kB' } } diff --git a/migrations/migration-10/index.js b/migrations/migration-10/index.js index b91f084..b1896f2 100644 --- a/migrations/migration-10/index.js +++ b/migrations/migration-10/index.js @@ -1,7 +1,6 @@ 'use strict' const { - createStore, findLevelJs } = require('../../src/utils') const fromString = require('uint8arrays/from-string') @@ -10,6 +9,7 @@ const toString = require('uint8arrays/to-string') /** * @typedef {import('../../src/types').Migration} Migration * @typedef {import('interface-datastore').Datastore} Datastore + * @typedef {import('interface-blockstore').Blockstore} Blockstore * @typedef {import('../../src/types').MigrationProgressCallback} MigrationProgressCallback * * @typedef {{ type: 'del', key: string | Uint8Array } | { type: 'put', key: string | Uint8Array, value: Uint8Array }} Operation @@ -78,18 +78,32 @@ async function keysToStrings (name, store, onProgress = () => {}) { } /** - * - * @param {string} repoPath - * @param {any} repoOptions + * @param {any} store + * @returns {Datastore} + */ +function unwrap (store) { + if (store.child) { + return unwrap(store.child) + } + + return store +} + +/** + * @param {import('../../src/types').Backends} backends * @param {MigrationProgressCallback} onProgress * @param {*} fn */ -async function process (repoPath, repoOptions, onProgress, fn) { - const datastores = Object.keys(repoOptions.storageBackends) - .filter(key => repoOptions.storageBackends[key].name === 'LevelDatastore') - .map(name => ({ - name, - store: createStore(repoPath, name, repoOptions) +async function process (backends, onProgress, fn) { + /** + * @type {{ name: string, store: Datastore }[]} + */ + const datastores = Object.entries(backends) + .map(([key, backend]) => ({ key, backend: unwrap(backend) })) + .filter(({ key, backend }) => backend.constructor.name === 'LevelDatastore') + .map(({ key, backend }) => ({ + name: key, + store: backend })) onProgress(0, `Migrating ${datastores.length} dbs`) @@ -120,11 +134,11 @@ async function process (repoPath, repoOptions, onProgress, fn) { module.exports = { version: 10, description: 'Migrates datastore-level keys to binary', - migrate: (repoPath, repoOptions, onProgress = () => {}) => { - return process(repoPath, repoOptions, onProgress, keysToBinary) + migrate: (backends, onProgress = () => {}) => { + return process(backends, onProgress, keysToBinary) }, - revert: (repoPath, repoOptions, onProgress = () => {}) => { - return process(repoPath, repoOptions, onProgress, keysToStrings) + revert: (backends, onProgress = () => {}) => { + return process(backends, onProgress, keysToStrings) } } diff --git a/migrations/migration-8/index.js b/migrations/migration-8/index.js index 5609946..bf3c7de 100644 --- a/migrations/migration-8/index.js +++ b/migrations/migration-8/index.js @@ -1,60 +1,82 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const Key = require('interface-datastore').Key -const mb = require('multibase') const log = require('debug')('ipfs:repo:migrator:migration-8') -const uint8ArrayToString = require('uint8arrays/to-string') -const { createStore } = require('../../src/utils') + const length = require('it-length') +const { base32 } = require('multiformats/bases/base32') +const raw = require('multiformats/codecs/raw') +const mhd = require('multiformats/hashes/digest') /** * @typedef {import('../../src/types').Migration} Migration + * @typedef {import('interface-datastore').Datastore} Datastore + */ + +/** + * @param {*} blockstore + * @returns {Datastore} */ + function unwrap (blockstore) { + if (blockstore.child) { + return unwrap(blockstore.child) + } + + return blockstore +} /** * @param {Key} key */ function keyToMultihash (key) { - const buf = mb.decode(`b${key.toString().slice(1)}`) - - // Extract multihash from CID - let multihash = new CID(buf).multihash + try { + const buf = base32.decode(`b${key.toString().toLowerCase().slice(1)}`) - // Encode and slice off multibase codec - multihash = mb.encode('base32', multihash).slice(1) + // Extract multihash from CID + let multihash = CID.decode(buf).multihash.bytes - // Should be uppercase for interop with go - const multihashStr = uint8ArrayToString(multihash).toUpperCase() + // Encode and slice off multibase codec + // Should be uppercase for interop with go + const multihashStr = base32.encode(multihash).slice(1).toUpperCase() - return new Key(`/${multihashStr}`, false) + return new Key(`/${multihashStr}`, false) + } catch (err) { + return key + } } /** * @param {Key} key */ function keyToCid (key) { - const buf = mb.decode(`b${key.toString().slice(1)}`) + try { + const buf = base32.decode(`b${key.toString().toLowerCase().slice(1)}`) + const digest = mhd.decode(buf) - // CID to Key - const multihash = mb.encode('base32', new CID(1, 'raw', buf).bytes).slice(1) + // CID to Key + const multihash = base32.encode(CID.createV1(raw.code, digest).bytes).slice(1) - return new Key(`/${uint8ArrayToString(multihash)}`.toUpperCase(), false) + return new Key(`/${multihash.toUpperCase()}`, false) + } catch { + return key + } } /** - * @param {string} repoPath - * @param {*} repoOptions + * @param {import('../../src/types').Backends} backends * @param {(percent: number, message: string) => void} onProgress * @param {(key: Key) => Key} keyFunction */ -async function process (repoPath, repoOptions, onProgress, keyFunction) { - const blockstore = createStore(repoPath, 'blocks', repoOptions) +async function process (backends, onProgress, keyFunction) { + const blockstore = backends.blocks await blockstore.open() + const unwrapped = unwrap(blockstore) + let blockCount - blockCount = await length(blockstore.queryKeys({ + blockCount = await length(unwrapped.queryKeys({ filters: [(key) => { const newKey = keyFunction(key) @@ -65,15 +87,16 @@ async function process (repoPath, repoOptions, onProgress, keyFunction) { try { let counter = 0 - for await (const block of blockstore.query({})) { + for await (const block of unwrapped.query({})) { const newKey = keyFunction(block.key) // If the Key is base32 CIDv0 then there's nothing to do if(newKey.toString() !== block.key.toString()) { counter += 1 - log(`Migrating Block from ${block.key} to ${newKey}`) - await blockstore.delete(block.key) - await blockstore.put(newKey, block.value) + log(`Migrating Block from ${block.key} to ${newKey}`, await unwrapped.has(block.key)) + + await unwrapped.delete(block.key) + await unwrapped.put(newKey, block.value) onProgress((counter / blockCount) * 100, `Migrated Block from ${block.key} to ${newKey}`) } @@ -87,10 +110,10 @@ async function process (repoPath, repoOptions, onProgress, keyFunction) { module.exports = { version: 8, description: 'Transforms key names into base32 encoding and converts Block store to use bare multihashes encoded as base32', - migrate: (repoPath, repoOptions, onProgress = () => {}) => { - return process(repoPath, repoOptions, onProgress, keyToMultihash) + migrate: (backends, onProgress = () => {}) => { + return process(backends, onProgress, keyToMultihash) }, - revert: (repoPath, repoOptions, onProgress = () => {}) => { - return process(repoPath, repoOptions, onProgress, keyToCid) + revert: (backends, onProgress = () => {}) => { + return process(backends, onProgress, keyToCid) } } diff --git a/migrations/migration-9/index.js b/migrations/migration-9/index.js index aaa670d..76ce228 100644 --- a/migrations/migration-9/index.js +++ b/migrations/migration-9/index.js @@ -1,24 +1,25 @@ 'use strict' -const CID = require('cids') -const dagpb = require('ipld-dag-pb') +const { CID } = require('multiformats/cid') +const dagPb = require('@ipld/dag-pb') const cbor = require('cborg') -const multicodec = require('multicodec') -const multibase = require('multibase') const pinset = require('./pin-set') -const { createStore } = require('../../src/utils') const { cidToKey, PIN_DS_KEY, PinTypes } = require('./utils') const length = require('it-length') +const { sha256 } = require('multiformats/hashes/sha2') +const mhd = require('multiformats/hashes/digest') +const { base32 } = require('multiformats/bases/base32') /** * @typedef {import('../../src/types').Migration} Migration * @typedef {import('../../src/types').MigrationProgressCallback} MigrationProgressCallback * @typedef {import('interface-datastore').Datastore} Datastore - * @typedef {import('multicodec').CodecCode} CodecCode + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion */ /** - * @param {Datastore} blockstore + * @param {Blockstore} blockstore * @param {Datastore} datastore * @param {Datastore} pinstore * @param {MigrationProgressCallback} onProgress @@ -29,9 +30,9 @@ async function pinsToDatastore (blockstore, datastore, pinstore, onProgress) { } const mh = await datastore.get(PIN_DS_KEY) - const cid = new CID(mh) - const pinRootBuf = await blockstore.get(cidToKey(cid)) - const pinRoot = dagpb.util.deserialize(pinRootBuf) + const cid = CID.decode(mh) + const pinRootBuf = await blockstore.get(cid) + const pinRoot = dagPb.decode(pinRootBuf) let counter = 0 let pinCount @@ -40,7 +41,7 @@ async function pinsToDatastore (blockstore, datastore, pinstore, onProgress) { for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.recursive)) { counter++ - /** @type {{ depth: number, version?: CID.CIDVersion, codec?: CodecCode }} */ + /** @type {{ depth: number, version?: CIDVersion, codec?: number }} */ const pin = { depth: Infinity } @@ -49,8 +50,8 @@ async function pinsToDatastore (blockstore, datastore, pinstore, onProgress) { pin.version = cid.version } - if (cid.codec !== 'dag-pb') { - pin.codec = multicodec.getNumber(cid.codec) + if (cid.code !== dagPb.code) { + pin.codec = cid.code } await pinstore.put(cidToKey(cid), cbor.encode(pin)) @@ -61,7 +62,7 @@ async function pinsToDatastore (blockstore, datastore, pinstore, onProgress) { for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.direct)) { counter++ - /** @type {{ depth: number, version?: CID.CIDVersion, codec?: CodecCode }} */ + /** @type {{ depth: number, version?: CIDVersion, codec?: number }} */ const pin = { depth: 0 } @@ -70,8 +71,8 @@ async function pinsToDatastore (blockstore, datastore, pinstore, onProgress) { pin.version = cid.version } - if (cid.codec !== 'dag-pb') { - pin.codec = multicodec.getNumber(cid.codec) + if (cid.code !== dagPb.code) { + pin.codec = cid.code } await pinstore.put(cidToKey(cid), cbor.encode(pin)) @@ -79,12 +80,12 @@ async function pinsToDatastore (blockstore, datastore, pinstore, onProgress) { onProgress((counter / pinCount) * 100, `Migrated direct pin ${cid}`) } - await blockstore.delete(cidToKey(cid)) + await blockstore.delete(cid) await datastore.delete(PIN_DS_KEY) } /** - * @param {Datastore} blockstore + * @param {Blockstore} blockstore * @param {Datastore} datastore * @param {Datastore} pinstore * @param {MigrationProgressCallback} onProgress @@ -98,7 +99,11 @@ async function pinsToDAG (blockstore, datastore, pinstore, onProgress) { for await (const { key, value } of pinstore.query({})) { counter++ const pin = cbor.decode(value) - const cid = new CID(pin.version || 0, pin.codec && multicodec.getName(pin.codec) || 'dag-pb', multibase.decode('b' + key.toString().split('/').pop())) + const cid = CID.create( + pin.version || 0, + pin.codec || dagPb.code, + mhd.decode(base32.decode('b' + key.toString().toLowerCase().split('/').pop())) + ) if (pin.depth === 0) { onProgress((counter / pinCount) * 100, `Reverted direct pin ${cid}`) @@ -112,28 +117,29 @@ async function pinsToDAG (blockstore, datastore, pinstore, onProgress) { } onProgress(100, 'Updating pin root') - const pinRoot = new dagpb.DAGNode(new Uint8Array(), [ - await pinset.storeSet(blockstore, PinTypes.recursive, recursivePins), - await pinset.storeSet(blockstore, PinTypes.direct, directPins) - ]) - const buf = pinRoot.serialize() - const cid = await dagpb.util.cid(buf, { - cidVersion: 0 - }) - await blockstore.put(cidToKey(cid), buf) - await datastore.put(PIN_DS_KEY, cid.multihash) + const pinRoot = { + Links: [ + await pinset.storeSet(blockstore, PinTypes.direct, directPins), + await pinset.storeSet(blockstore, PinTypes.recursive, recursivePins) + ] + } + const buf = dagPb.encode(pinRoot) + const digest = await sha256.digest(buf) + const cid = CID.createV0(digest) + + await blockstore.put(cid, buf) + await datastore.put(PIN_DS_KEY, cid.bytes) } /** - * @param {string} repoPath - * @param {*} repoOptions + * @param {import('../../src/types').Backends} backends * @param {MigrationProgressCallback} onProgress * @param {*} fn */ -async function process (repoPath, repoOptions, onProgress, fn) { - const blockstore = createStore(repoPath, 'blocks', repoOptions) - const datastore = createStore(repoPath, 'datastore', repoOptions) - const pinstore = createStore(repoPath, 'pins', repoOptions) +async function process (backends, onProgress, fn) { + const blockstore = backends.blocks + const datastore = backends.datastore + const pinstore = backends.pins await blockstore.open() await datastore.open() @@ -152,10 +158,10 @@ async function process (repoPath, repoOptions, onProgress, fn) { module.exports = { version: 9, description: 'Migrates pins to datastore', - migrate: (repoPath, repoOptions, onProgress = () => {}) => { - return process(repoPath, repoOptions, onProgress, pinsToDatastore) + migrate: (backends, onProgress = () => {}) => { + return process(backends, onProgress, pinsToDatastore) }, - revert: (repoPath, repoOptions, onProgress = () => {}) => { - return process(repoPath, repoOptions, onProgress, pinsToDAG) + revert: (backends, onProgress = () => {}) => { + return process(backends, onProgress, pinsToDAG) } } diff --git a/migrations/migration-9/pin-set.js b/migrations/migration-9/pin-set.js index 10d2b39..43008b9 100644 --- a/migrations/migration-9/pin-set.js +++ b/migrations/migration-9/pin-set.js @@ -1,6 +1,6 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const { ipfs: { pin: { @@ -12,19 +12,18 @@ const { // @ts-ignore const fnv1a = require('fnv1a') const varint = require('varint') -const dagpb = require('ipld-dag-pb') -const DAGNode = require('ipld-dag-pb/src/dag-node/dagNode') -const DAGLink = require('ipld-dag-pb/src/dag-link/dagLink') -const multihash = require('multihashing-async').multihash -const { cidToKey, DEFAULT_FANOUT, MAX_ITEMS, EMPTY_KEY } = require('./utils') +const dagPb = require('@ipld/dag-pb') +const { DEFAULT_FANOUT, MAX_ITEMS, EMPTY_KEY } = require('./utils') const uint8ArrayConcat = require('uint8arrays/concat') const uint8ArrayCompare = require('uint8arrays/compare') const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayFromString = require('uint8arrays/from-string') -const uint8ArrayEquals = require('uint8arrays/equals') +const { sha256 } = require('multiformats/hashes/sha2') /** * @typedef {import('interface-datastore').Datastore} Datastore + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('@ipld/dag-pb').PBNode} PBNode * * @typedef {object} Pin * @property {CID} key @@ -32,19 +31,17 @@ const uint8ArrayEquals = require('uint8arrays/equals') */ /** - * @param {Uint8Array | CID} hash - */ -function toB58String (hash) { - return new CID(hash).toBaseEncodedString() -} - -/** - * @param {DAGNode} rootNode + * @param {PBNode} rootNode */ function readHeader (rootNode) { // rootNode.data should be a buffer of the format: // < varint(headerLength) | header | itemData... > const rootData = rootNode.Data + + if (!rootData) { + throw new Error('No data present') + } + const hdrLength = varint.decode(rootData) const vBytes = varint.decode.bytes @@ -86,15 +83,15 @@ function hash (seed, key) { const buffer = new Uint8Array(4) const dataView = new DataView(buffer.buffer) dataView.setUint32(0, seed, true) - const encodedKey = uint8ArrayFromString(toB58String(key)) + const encodedKey = uint8ArrayFromString(key.toString()) const data = uint8ArrayConcat([buffer, encodedKey], buffer.byteLength + encodedKey.byteLength) return fnv1a(uint8ArrayToString(data)) } /** - * @param {Datastore} blockstore - * @param {DAGNode} node + * @param {Blockstore} blockstore + * @param {PBNode} node * @returns {AsyncGenerator} */ async function * walkItems (blockstore, node) { @@ -107,10 +104,10 @@ async function * walkItems (blockstore, node) { // if a fanout bin is not 'empty', dig into and walk its DAGLinks const linkHash = link.Hash - if (!uint8ArrayEquals(EMPTY_KEY, linkHash.bytes)) { + if (!EMPTY_KEY.equals(linkHash)) { // walk the links of this fanout bin - const buf = await blockstore.get(cidToKey(linkHash)) - const node = dagpb.util.deserialize(buf) + const buf = await blockstore.get(linkHash) + const node = dagPb.decode(buf) yield * walkItems(blockstore, node) } @@ -124,8 +121,8 @@ async function * walkItems (blockstore, node) { } /** - * @param {Datastore} blockstore - * @param {DAGNode} rootNode + * @param {Blockstore} blockstore + * @param {PBNode} rootNode * @param {string} name */ async function * loadSet (blockstore, rootNode, name) { @@ -135,14 +132,14 @@ async function * loadSet (blockstore, rootNode, name) { throw new Error('No link found with name ' + name) } - const buf = await blockstore.get(cidToKey(link.Hash)) - const node = dagpb.util.deserialize(buf) + const buf = await blockstore.get(link.Hash) + const node = dagPb.decode(buf) yield * walkItems(blockstore, node) } /** - * @param {Datastore} blockstore + * @param {Blockstore} blockstore * @param {Pin[]} items */ function storeItems (blockstore, items) { @@ -164,14 +161,22 @@ function storeItems (blockstore, items) { const fanoutLinks = [] for (let i = 0; i < DEFAULT_FANOUT; i++) { - fanoutLinks.push(new DAGLink('', 1, EMPTY_KEY)) + fanoutLinks.push({ + Name: '', + Tsize: 1, + Hash: EMPTY_KEY + }) } if (pins.length <= MAX_ITEMS) { const nodes = pins .map(item => { return ({ - link: new DAGLink('', 1, item.key), + link: { + Name: '', + Tsize: 1, + Hash: item.key + }, data: item.data || new Uint8Array() }) }) @@ -183,7 +188,10 @@ function storeItems (blockstore, items) { const rootLinks = fanoutLinks.concat(nodes.map(item => item.link)) const rootData = uint8ArrayConcat([headerBuf, ...nodes.map(item => item.data)]) - return new DAGNode(rootData, rootLinks) + return { + Data: rootData, + Links: rootLinks + } } else { // If the array of pins is > MAX_ITEMS, we: // - distribute the pins among `DEFAULT_FANOUT` bins @@ -212,28 +220,36 @@ function storeItems (blockstore, items) { idx++ } - return new DAGNode(headerBuf, fanoutLinks) + return { + Data: headerBuf, + Links: fanoutLinks + } } /** - * @param {DAGNode} child + * @param {PBNode} child * @param {number} binIdx */ async function storeChild (child, binIdx) { - const buf = dagpb.util.serialize(child) - const cid = await dagpb.util.cid(buf, { - cidVersion: 0, - hashAlg: multihash.names['sha2-256'] - }) - await blockstore.put(cidToKey(cid), buf) + const buf = dagPb.encode(child) + const digest = await sha256.digest(buf) + const cid = CID.createV0(digest) + + await blockstore.put(cid, buf) - fanoutLinks[binIdx] = new DAGLink('', child.size, cid) + let size = child.Links.reduce((acc, curr) => acc + (curr?.Tsize || 0), 0) + buf.length + + fanoutLinks[binIdx] = { + Name: '', + Tsize: size, + Hash: cid + } } } } /** - * @param {Datastore} blockstore + * @param {Blockstore} blockstore * @param {string} type * @param {CID[]} cids */ @@ -243,15 +259,19 @@ async function storeSet (blockstore, type, cids) { key: cid } })) - const buf = rootNode.serialize() - const cid = await dagpb.util.cid(buf, { - cidVersion: 0, - hashAlg: multihash.names['sha2-256'] - }) + const buf = dagPb.encode(rootNode) + const digest = await sha256.digest(buf) + const cid = CID.createV0(digest) + + await blockstore.put(cid, buf) - await blockstore.put(cidToKey(cid), buf) + let size = rootNode.Links.reduce((acc, curr) => acc + curr.Tsize, 0) + buf.length - return new DAGLink(type, rootNode.size, cid) + return { + Name: type, + Tsize: size, + Hash: cid + } } module.exports = { diff --git a/migrations/migration-9/utils.js b/migrations/migration-9/utils.js index 68dab62..a479ccc 100644 --- a/migrations/migration-9/utils.js +++ b/migrations/migration-9/utils.js @@ -1,13 +1,13 @@ 'use strict' -const multibase = require('multibase') const { Key } = require('interface-datastore') -const multihashes = require('multihashing-async').multihash +const { base32 } = require('multiformats/bases/base32') +const { CID } = require('multiformats') const PIN_DS_KEY = new Key('/local/pins') const DEFAULT_FANOUT = 256 const MAX_ITEMS = 8192 -const EMPTY_KEY = multihashes.fromB58String('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') +const EMPTY_KEY = CID.parse('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') const PinTypes = { direct: 'direct', @@ -15,10 +15,10 @@ const PinTypes = { } /** - * @param {import('cids')} cid + * @param {import('multiformats').CID} cid */ function cidToKey (cid) { - return new Key(`/${multibase.encoding('base32upper').encode(cid.multihash)}`) + return new Key(`/${base32.encode(cid.multihash.bytes).toUpperCase().substring(1)}`) } module.exports = { diff --git a/package.json b/package.json index af17783..15bf3c6 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,6 @@ "types": "./dist/src/index.d.ts", "main": "src/index.js", "browser": { - "./src/repo/lock.js": "./src/repo/lock-memory.js", "datastore-fs": "datastore-level" }, "repository": { @@ -37,7 +36,7 @@ "test:node": "aegir test --target node", "test:browser": "aegir test --target browser", "build": "aegir build", - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "release": "aegir release --docs", "release-minor": "aegir release --type minor --docs", "release-major": "aegir release --type major --docs", @@ -46,32 +45,31 @@ "docs": "aegir docs" }, "dependencies": { - "cborg": "^1.0.4", - "cids": "^1.0.0", - "datastore-core": "^4.0.0", + "@ipld/dag-pb": "^2.0.0", + "cborg": "^1.3.1", + "datastore-core": "^5.0.0", "debug": "^4.1.0", "fnv1a": "^1.0.1", - "interface-datastore": "^4.0.0", - "ipld-dag-pb": "^0.22.1", + "interface-blockstore": "^1.0.0", + "interface-datastore": "^5.0.0", "it-length": "^1.0.1", - "multibase": "^4.0.1", - "multicodec": "^3.0.1", - "multihashing-async": "^2.0.0", + "multiformats": "^9.0.0", "proper-lockfile": "^4.1.1", "protobufjs": "^6.10.2", "uint8arrays": "^2.0.5", "varint": "^6.0.0" }, "devDependencies": { - "@ipld/car": "^0.1.3", + "@ipld/car": "^3.0.0", "@types/debug": "^4.1.5", "@types/varint": "^6.0.0", "aegir": "^33.0.0", "assert": "^2.0.0", "aws-sdk": "^2.884.0", - "datastore-fs": "^4.0.0", - "datastore-level": "^5.0.0", - "datastore-s3": "^5.0.0", + "blockstore-datastore-adapter": "1.0.0", + "datastore-fs": "^5.0.0", + "datastore-level": "^6.0.0", + "datastore-s3": "^6.0.0", "events": "^3.2.0", "it-all": "^1.0.2", "just-safe-set": "^2.1.0", @@ -79,9 +77,8 @@ "level-6": "npm:level@^6.0.0", "ncp": "^2.0.0", "npm-run-all": "^4.1.5", - "readable-stream": "^3.6.0", "rimraf": "^3.0.0", - "sinon": "^10.0.0", + "sinon": "^11.1.1", "stand-in": "^4.2.0", "util": "^0.12.3" }, diff --git a/scripts/node-globals.js b/scripts/node-globals.js index 07e0479..8c4e233 100644 --- a/scripts/node-globals.js +++ b/scripts/node-globals.js @@ -1,4 +1,3 @@ // file: node-globals.js // @ts-nocheck export const { Buffer } = require('buffer') -export const process = require('process/browser') diff --git a/src/index.js b/src/index.js index 0893bf6..0e111ff 100644 --- a/src/index.js +++ b/src/index.js @@ -3,9 +3,8 @@ const defaultMigrations = require('../migrations') const repoVersion = require('./repo/version') -const repoLock = require('./repo/lock') const errors = require('./errors') - +const { wrapBackends } = require('./utils') const log = require('debug')('ipfs:repo:migrator') /** @@ -14,9 +13,6 @@ const log = require('debug')('ipfs:repo:migrator') * @typedef {import('./types').MigrationProgressCallback} MigrationProgressCallback */ -exports.getCurrentRepoVersion = repoVersion.getVersion -exports.errors = errors - /** * Returns the version of latest migration. * If no migrations are present returns 0. @@ -33,8 +29,6 @@ function getLatestMigrationVersion (migrations) { return migrations[migrations.length - 1].version } -exports.getLatestMigrationVersion = getLatestMigrationVersion - /** * Main function to execute forward migrations. * It acquire lock on the provided path before doing any migrations. @@ -42,7 +36,8 @@ exports.getLatestMigrationVersion = getLatestMigrationVersion * Signature of the progress callback is: function(migrationObject: object, currentMigrationNumber: int, totalMigrationsCount: int) * * @param {string} path - Path to initialized (!) JS-IPFS repo - * @param {object} repoOptions - Options that are passed to migrations, that can use them to correctly construct datastore. Options are same like for IPFSRepo. + * @param {import('./types').Backends} backends + * @param {import('./types').RepoOptions} repoOptions - Options that are passed to migrations, that can use them to correctly construct datastore. Options are same like for IPFSRepo. * @param {number} toVersion - Version to which the repo should be migrated. * @param {object} [options] - Options for migration * @param {boolean} [options.ignoreLock] - Won't lock the repo for applying the migrations. Use with caution. @@ -50,7 +45,7 @@ exports.getLatestMigrationVersion = getLatestMigrationVersion * @param {boolean} [options.isDryRun] - Allows to simulate the execution of the migrations without any effect. * @param {Migration[]} [options.migrations] - Array of migrations to perform. If undefined, the bundled migrations are used. Mainly for testing purpose. */ -async function migrate (path, repoOptions, toVersion, { ignoreLock = false, onProgress, isDryRun = false, migrations }) { +async function migrate (path, backends, repoOptions, toVersion, { ignoreLock = false, onProgress, isDryRun = false, migrations }) { migrations = migrations || defaultMigrations if (!path) { @@ -69,7 +64,10 @@ async function migrate (path, repoOptions, toVersion, { ignoreLock = false, onPr throw new errors.InvalidValueError('Version has to be positive integer!') } - const currentVersion = await repoVersion.getVersion(path, repoOptions) + // make sure we can read pre-level@5 datastores + backends = wrapBackends(backends) + + const currentVersion = await repoVersion.getVersion(backends) if (currentVersion === toVersion) { log('Nothing to migrate.') @@ -85,7 +83,7 @@ async function migrate (path, repoOptions, toVersion, { ignoreLock = false, onPr let lock if (!isDryRun && !ignoreLock) { - lock = await repoLock.lock(currentVersion, path, repoOptions) + lock = await repoOptions.repoLock.lock(path) } try { @@ -109,13 +107,13 @@ async function migrate (path, repoOptions, toVersion, { ignoreLock = false, onPr progressCallback = (percent, message) => onProgress(migration.version, percent.toFixed(2), message) } - await migration.migrate(path, repoOptions, progressCallback) + await migration.migrate(backends, progressCallback) } } catch (e) { const lastSuccessfullyMigratedVersion = migration.version - 1 log(`An exception was raised during execution of migration. Setting the repo's version to last successfully migrated version: ${lastSuccessfullyMigratedVersion}`) - await repoVersion.setVersion(path, lastSuccessfullyMigratedVersion, repoOptions) + await repoVersion.setVersion(lastSuccessfullyMigratedVersion, backends) throw new Error(`During migration to version ${migration.version} exception was raised: ${e.stack || e.message || e}`) } @@ -124,7 +122,7 @@ async function migrate (path, repoOptions, toVersion, { ignoreLock = false, onPr } if (!isDryRun) { - await repoVersion.setVersion(path, toVersion || getLatestMigrationVersion(migrations), repoOptions) + await repoVersion.setVersion(toVersion || getLatestMigrationVersion(migrations), backends) } log('Repo successfully migrated', toVersion !== undefined ? `to version ${toVersion}!` : 'to latest version!') @@ -135,8 +133,6 @@ async function migrate (path, repoOptions, toVersion, { ignoreLock = false, onPr } } -exports.migrate = migrate - /** * Main function to execute backward migration (reversion). * It acquire lock on the provided path before doing any migrations. @@ -144,7 +140,8 @@ exports.migrate = migrate * Signature of the progress callback is: function(migrationObject: object, currentMigrationNumber: int, totalMigrationsCount: int) * * @param {string} path - Path to initialized (!) JS-IPFS repo - * @param {object} repoOptions - Options that are passed to migrations, that can use them to correctly construct datastore. Options are same like for IPFSRepo. + * @param {import('./types').Backends} backends + * @param {import('./types').RepoOptions} repoOptions - Options that are passed to migrations, that can use them to correctly construct datastore. Options are same like for IPFSRepo. * @param {number} toVersion - Version to which the repo will be reverted. * @param {object} [options] - Options for the reversion * @param {ProgressCallback} [options.onProgress] - Callback which will be called after each reverted migration to report progress @@ -152,7 +149,7 @@ exports.migrate = migrate * @param {boolean} [options.ignoreLock] - Won't lock the repo for reverting the migrations. Use with caution. * @param {Migration[]} [options.migrations] - Array of migrations to migrate. If undefined, the bundled migrations are used. Mainly for testing purpose. */ -async function revert (path, repoOptions, toVersion, { ignoreLock = false, onProgress, isDryRun = false, migrations }) { +async function revert (path, backends, repoOptions, toVersion, { ignoreLock = false, onProgress, isDryRun = false, migrations }) { migrations = migrations || defaultMigrations if (!path) { @@ -171,7 +168,10 @@ async function revert (path, repoOptions, toVersion, { ignoreLock = false, onPro throw new errors.InvalidValueError('Version has to be positive integer!') } - const currentVersion = await repoVersion.getVersion(path, repoOptions) + // make sure we can read pre-level@5 datastores + backends = wrapBackends(backends) + + const currentVersion = await repoVersion.getVersion(backends) if (currentVersion === toVersion) { log('Nothing to revert.') @@ -186,7 +186,7 @@ async function revert (path, repoOptions, toVersion, { ignoreLock = false, onPro let lock if (!isDryRun && !ignoreLock) { - lock = await repoLock.lock(currentVersion, path, repoOptions) + lock = await repoOptions.repoLock.lock(path) } log(`Reverting from version ${currentVersion} to ${toVersion}`) @@ -214,12 +214,12 @@ async function revert (path, repoOptions, toVersion, { ignoreLock = false, onPro progressCallback = (percent, message) => onProgress(migration.version, percent.toFixed(2), message) } - await migration.revert(path, repoOptions, progressCallback) + await migration.revert(backends, progressCallback) } } catch (e) { const lastSuccessfullyRevertedVersion = migration.version log(`An exception was raised during execution of migration. Setting the repo's version to last successfully reverted version: ${lastSuccessfullyRevertedVersion}`) - await repoVersion.setVersion(path, lastSuccessfullyRevertedVersion, repoOptions) + await repoVersion.setVersion(lastSuccessfullyRevertedVersion, backends) e.message = `During reversion to version ${migration.version} exception was raised: ${e.message}` throw e @@ -229,7 +229,7 @@ async function revert (path, repoOptions, toVersion, { ignoreLock = false, onPro } if (!isDryRun) { - await repoVersion.setVersion(path, toVersion, repoOptions) + await repoVersion.setVersion(toVersion, backends) } log(`All migrations successfully reverted to version ${toVersion}!`) @@ -240,8 +240,6 @@ async function revert (path, repoOptions, toVersion, { ignoreLock = false, onPro } } -exports.revert = revert - /** * Function checks if all migrations in given range are available. * @@ -270,3 +268,11 @@ function verifyAvailableMigrations (migrations, fromVersion, toVersion, checkRev throw new errors.InvalidValueError(`The ipfs-repo-migrations package does not have all migration to migrate from version ${fromVersion} to ${toVersion}`) } } + +module.exports = { + getCurrentRepoVersion: repoVersion.getVersion, + getLatestMigrationVersion, + errors, + migrate, + revert +} diff --git a/src/repo/init.js b/src/repo/init.js index ad0b4c1..30374ee 100644 --- a/src/repo/init.js +++ b/src/repo/init.js @@ -1,21 +1,20 @@ 'use strict' const log = require('debug')('ipfs:repo:migrator:repo:init') -const { CONFIG_KEY, VERSION_KEY, createStore } = require('../utils') +const { CONFIG_KEY, VERSION_KEY } = require('../utils') const { MissingRepoOptionsError } = require('../errors') /** - * @param {string} path - * @param {any} repoOptions + * @param {import('../types').Backends} backends */ -async function isRepoInitialized (path, repoOptions) { - if (!repoOptions) { +async function isRepoInitialized (backends) { + if (!backends) { throw new MissingRepoOptionsError('Please pass repo options when trying to open a repo') } - let root + const root = backends.root + try { - root = createStore(path, 'root', repoOptions) await root.open() const versionCheck = await root.has(VERSION_KEY) const configCheck = await root.has(CONFIG_KEY) @@ -31,7 +30,9 @@ async function isRepoInitialized (path, repoOptions) { return false } finally { if (root !== undefined) { - await root.close() + try { + await root.close() + } catch {} } } } diff --git a/src/repo/lock-memory.js b/src/repo/lock-memory.js deleted file mode 100644 index 1ce232b..0000000 --- a/src/repo/lock-memory.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict' - -const debug = require('debug') -const log = debug('ipfs:repo:migrator:repo_mem_lock') -const lockFile = 'repo.lock' - -/** - * @type {Record} - */ -const LOCKS = {} - -/** - * Lock the repo in the given dir and for given repo version. - * - * @param {number} version - * @param {string} dir - */ -exports.lock = async function lock (version, dir) { // eslint-disable-line require-await - const file = dir + '/' + lockFile - log('locking %s', file) - - if (LOCKS[file] === true) { - throw Error(`There is already present lock for: ${file}`) - } - - LOCKS[file] = true - return { - close () { - if (LOCKS[file]) { - log('releasing lock %s', file) - delete LOCKS[file] - } - } - } -} diff --git a/src/repo/lock.js b/src/repo/lock.js deleted file mode 100644 index 03574e9..0000000 --- a/src/repo/lock.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict' - -const debug = require('debug') -// @ts-ignore -const { lock: properLock } = require('proper-lockfile') -const { lock: memoryLock } = require('./lock-memory') - -const log = debug('ipfs:repo:migrator:repo_fs_lock') -const lockFile = 'repo.lock' - -/** - * Lock the repo in the given dir and given version. - * - * @param {number} version - * @param {string} dir - * @param {object} [repoOptions] - * @param {string} [repoOptions.lock] - */ -async function lock (version, dir, repoOptions) { - if (repoOptions && repoOptions.lock === 'memory') { - return memoryLock(version, dir) - } - - const file = `${dir}/${lockFile}` - log('locking %s', file) - const release = await properLock(dir, { lockfilePath: file }) - return { - close: () => { - log('releasing lock %s', file) - return release() - } - } -} - -module.exports = { - lock -} diff --git a/src/repo/version.js b/src/repo/version.js index 07ab88f..724f58d 100644 --- a/src/repo/version.js +++ b/src/repo/version.js @@ -2,7 +2,7 @@ const repoInit = require('./init') const { MissingRepoOptionsError, NotInitializedRepoError } = require('../errors') -const { VERSION_KEY, createStore } = require('../utils') +const { VERSION_KEY } = require('../utils') const uint8ArrayFromString = require('uint8arrays/from-string') const uint8ArrayToString = require('uint8arrays/to-string') @@ -11,19 +11,14 @@ const uint8ArrayToString = require('uint8arrays/to-string') * This function needs to be cross-repo-version functional to be able to fetch any version number, * even in case of change of repo's versioning. * - * @param {string} path - * @param {Object} repoOptions - Options used to create a repo, the same as pased to ipfs-repo + * @param {import('../types').Backends} backends */ -async function getVersion (path, repoOptions) { - if (!(await repoInit.isRepoInitialized(path, repoOptions))) { - throw new NotInitializedRepoError(`Repo in path ${path} is not initialized!`) +async function getVersion (backends) { + if (!(await repoInit.isRepoInitialized(backends))) { + throw new NotInitializedRepoError('Repo is not initialized!') } - if (!repoOptions) { - throw new MissingRepoOptionsError('Please pass repo options when trying to open a repo') - } - - const store = createStore(path, 'root', repoOptions) + const store = backends.root await store.open() try { @@ -36,16 +31,15 @@ async function getVersion (path, repoOptions) { /** * Function for setting a version in cross-repo-version manner. * - * @param {string} path * @param {number} version - * @param {Object} repoOptions - Options used to create a repo, the same as pased to ipfs-repo + * @param {import('../types').Backends} backends */ -async function setVersion (path, version, repoOptions) { - if (!repoOptions) { +async function setVersion (version, backends) { + if (!backends) { throw new MissingRepoOptionsError('Please pass repo options when trying to open a repo') } - const store = createStore(path, 'root', repoOptions) + const store = backends.root await store.open() await store.put(VERSION_KEY, uint8ArrayFromString(String(version))) await store.close() diff --git a/src/types.d.ts b/src/types.d.ts index 87704d1..89c0e99 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -1,3 +1,5 @@ +import type { Datastore } from 'interface-datastore' +import type { Blockstore } from 'interface-blockstore' export type ProgressCallback = (version: number, progress: string, message: string) => void @@ -6,6 +8,54 @@ export type MigrationProgressCallback = (percent: number, message: string) => vo export interface Migration { version: number description: string - migrate: (repoPath: string, repoOptions: any, onProgress: MigrationProgressCallback) => Promise, - revert: (repoPath: string, repoOptions: any, onProgress: MigrationProgressCallback) => Promise + migrate: (backends: Backends, onProgress: MigrationProgressCallback) => Promise, + revert: (backends: Backends, onProgress: MigrationProgressCallback) => Promise +} + +export interface Backends { + root: Datastore + blocks: Blockstore + keys: Datastore + datastore: Datastore + pins: Datastore +} + +export interface LockCloser { + close: () => Promise +} + +export interface RepoLock { + /** + * Sets the lock if one does not already exist. If a lock already exists, should throw an error. + */ + lock: (path: string) => Promise + + /** + * Checks the existence of the lock. + */ + locked: (path: string) => Promise +} + +export interface RepoOptions { + /** + * Controls automatic migrations of repository. (defaults: true) + */ + autoMigrate: boolean + /** + * Callback function to be notified of migration progress + */ + onMigrationProgress: (version: number, percentComplete: string, message: string) => void + + /** + * If multiple processes are accessing the same repo - e.g. via node cluster or browser UI and webworkers + * one instance must be designated the repo owner to hold the lock on shared resources like the datastore. + * + * Set this property to true on one instance only if this is how your application is set up. + */ + repoOwner: boolean + + /** + * A lock implementation that prevents multiple processes accessing the same repo + */ + repoLock: RepoLock } diff --git a/src/utils.js b/src/utils.js index 854605a..04cc08e 100644 --- a/src/utils.js +++ b/src/utils.js @@ -4,8 +4,6 @@ const { Key, Errors } = require('interface-datastore') -const core = require('datastore-core') -const ShardingStore = core.ShardingDatastore /** * @typedef {import('interface-datastore').Datastore} Datastore @@ -14,33 +12,6 @@ const ShardingStore = core.ShardingDatastore const CONFIG_KEY = new Key('/config') const VERSION_KEY = new Key('/version') -/** - * @param {string} name - * @param {*} options - */ -function getDatastoreAndOptions (name, options) { - if (!options || !options.storageBackends) { - throw new Error('Please pass storage backend definitions') - } - - if (!options.storageBackends[name]) { - throw new Error(`Storage backend '${name}' not defined in config`) - } - - const StorageBackend = options.storageBackends[name] - - let storageBackendOptions = {} - - if (options.storageBackendOptions !== undefined && options.storageBackendOptions[name] !== undefined) { - storageBackendOptions = options.storageBackendOptions[name] - } - - return { - StorageBackend: StorageBackend, - storageOptions: storageBackendOptions - } -} - /** * Level dbs wrap level dbs that wrap level dbs. Find a level-js * instance in the chain if one exists. @@ -112,7 +83,7 @@ async function getWithFallback (key, get, has, store) { } // Newer versions of level.js changed the key type from Uint8Array|string - // to Uint8Array so fall back to trying Uint8Arrays if we are using level.js + // to Uint8Array so fall back to trying Uint8Arrays if we are using level.js // and the string version of the key did not work const levelJs = findLevelJs(store) @@ -138,25 +109,9 @@ async function getWithFallback (key, get, has, store) { } /** - * @param {string} location - * @param {string} name - * @param {*} options - * @returns {Datastore} + * @param {Datastore} store */ -function createStore (location, name, options) { - const { StorageBackend, storageOptions } = getDatastoreAndOptions(name, options) - - if (name !== 'root') { - location = `${location}/${name}` - } - - let store = new StorageBackend(location, storageOptions) - - // @ts-ignore - if (storageOptions.sharding) { - store = new ShardingStore(store, new core.shard.NextToLast(2)) - } - +function wrapStore (store) { // necessary since level-js@5 cannot read keys from level-js@4 and earlier const originalGet = store.get.bind(store) const originalHas = store.has.bind(store) @@ -172,8 +127,21 @@ function createStore (location, name, options) { return store } +/** + * @param {import('./types').Backends} backends + */ +function wrapBackends (backends) { + return { + ...backends, + root: wrapStore(backends.root), + datastore: wrapStore(backends.datastore), + pins: wrapStore(backends.pins), + keys: wrapStore(backends.keys) + } +} + module.exports = { - createStore, + wrapBackends, hasWithFallback, getWithFallback, findLevelJs, diff --git a/test/browser.js b/test/browser.js index 6627ebf..406db3b 100644 --- a/test/browser.js +++ b/test/browser.js @@ -3,6 +3,8 @@ const DatastoreLevel = require('datastore-level') const DatastoreS3 = require('datastore-s3') +const { ShardingDatastore, shard: { NextToLast } } = require('datastore-core') +const BlockstoreDatastoreAdapter = require('blockstore-datastore-adapter') const mockS3 = require('./fixtures/mock-s3') const S3 = require('aws-sdk').S3 const s3Instance = new S3({ @@ -13,6 +15,10 @@ const s3Instance = new S3({ mockS3(s3Instance) const { createRepo } = require('./fixtures/repo') +/** + * @typedef {import('../src/types').Backends} Backends + */ + async function deleteDb (dir) { return new Promise((resolve) => { const req = globalThis.indexedDB.deleteDatabase(dir) @@ -39,104 +45,94 @@ async function cleanup (dir) { const CONFIGURATIONS = [{ name: 'local', cleanup, - repoOptions: { - lock: 'memory', - storageBackends: { - root: DatastoreLevel, - blocks: DatastoreLevel, - keys: DatastoreLevel, - datastore: DatastoreLevel, - pins: DatastoreLevel - }, - storageBackendOptions: { - root: { - extension: '', - prefix: '', + /** + * @param {string} prefix + * @returns {import('../src/types').Backends} + */ + createBackends: (prefix) => { + return { + root: new DatastoreLevel(prefix, { version: 2 - }, - blocks: { - sharding: false, - prefix: '', + }), + blocks: new BlockstoreDatastoreAdapter( + new DatastoreLevel(`${prefix}/blocks`, { + extension: '.data', + version: 2 + }) + ), + datastore: new DatastoreLevel(`${prefix}/datastore`, { version: 2 - }, - keys: { - sharding: false, - prefix: '', + }), + keys: new DatastoreLevel(`${prefix}/keys`, { version: 2 - }, - datastore: { - sharding: false, - prefix: '', + }), + pins: new DatastoreLevel(`${prefix}/pins`, { version: 2 - } + }) } } }, { name: 'with s3', cleanup: () => {}, - repoOptions: { - lock: 'memory', - storageBackends: { - root: DatastoreS3, - blocks: DatastoreS3, - datastore: DatastoreS3, - keys: DatastoreS3, - pins: DatastoreS3 - }, - storageBackendOptions: { - root: { - sharding: true, - extension: '', - s3: s3Instance, - createIfMissing: false - }, - blocks: { - sharding: true, - extension: '.data', - s3: s3Instance, - createIfMissing: false - }, - datastore: { - sharding: true, - s3: s3Instance, - createIfMissing: false - }, - keys: { - sharding: true, - s3: s3Instance, - createIfMissing: false - }, - pins: { - sharding: true, + createBackends: (prefix) => { + return { + root: new DatastoreS3(prefix, { s3: s3Instance, createIfMissing: false - } + }), + blocks: new BlockstoreDatastoreAdapter( + new ShardingDatastore( + new DatastoreS3(`${prefix}/blocks`, { + s3: s3Instance, + createIfMissing: false, + extension: '.data' + }), + new NextToLast(2) + ) + ), + datastore: new ShardingDatastore( + new DatastoreS3(`${prefix}/datastore`, { + s3: s3Instance, + createIfMissing: false + }), + new NextToLast(2) + ), + keys: new ShardingDatastore( + new DatastoreS3(`${prefix}/keys`, { + s3: s3Instance, + createIfMissing: false + }), + new NextToLast(2) + ), + pins: new ShardingDatastore( + new DatastoreS3(`${prefix}/pins`, { + s3: s3Instance, + createIfMissing: false + }), + new NextToLast(2) + ) } } }] -CONFIGURATIONS.forEach(({ name, repoOptions, cleanup }) => { - const setup = () => createRepo(repoOptions) +CONFIGURATIONS.forEach(({ name, createBackends, cleanup }) => { + const setup = (options) => createRepo(createBackends, options) - describe('lock.js tests', () => { - describe('mem-lock tests', () => { - require('./lock-test')(require('../src/repo/lock-memory'), setup, cleanup, repoOptions) + describe(name, () => { + describe('version tests', () => { + require('./version-test')(setup, cleanup) }) - }) - - describe('version tests', () => { - require('./version-test')(setup, cleanup, repoOptions) - }) - describe('migrations tests', () => { - require('./migrations')(setup, cleanup, repoOptions) - }) + describe('migrations tests', () => { + require('./migrations')(setup, cleanup) + }) - describe('init tests', () => { - require('./init-test')(setup, cleanup, repoOptions) - }) + describe('init tests', () => { + require('./init-test')(setup, cleanup) + }) - describe('integration tests', () => { - require('./integration-test')(setup, cleanup, repoOptions) + describe('integration tests', () => { + require('./integration-test')(setup, cleanup) + }) }) }) diff --git a/test/fixtures/generate-car-files.js b/test/fixtures/generate-car-files.js index f44b80e..28eb97f 100644 --- a/test/fixtures/generate-car-files.js +++ b/test/fixtures/generate-car-files.js @@ -94,9 +94,9 @@ const main = async () => { console.info(` car: loadFixture('test/fixtures/${fileName}'),`) const buf = await ipfs.libp2p.datastore.get(PIN_DS_KEY) - const cid = new CID(buf) + const cid = CID.decode(buf) - console.info(` root: new CID('${cid}'),`) + console.info(` root: CID.parse('${cid}'),`) const { writer, out } = await CarWriter.create([cid]) Readable.from(out).pipe(fs.createWriteStream(path.join(__dirname, fileName))) diff --git a/test/fixtures/repo.js b/test/fixtures/repo.js index ab09346..3b8e81f 100644 --- a/test/fixtures/repo.js +++ b/test/fixtures/repo.js @@ -1,25 +1,27 @@ 'use strict' const loadFixture = require('aegir/utils/fixtures') -const { CONFIG_KEY, VERSION_KEY, createStore } = require('../../src/utils') +const { CONFIG_KEY, VERSION_KEY } = require('../../src/utils') -async function createRepo (repoOptions, prefix) { +async function createRepo (createBackends, prefix) { const dir = `${prefix ? `${prefix}/` : ''}test-repo-for-${Date.now()}` - const store = createStore(dir, 'root', repoOptions) - await store.open() - await store.close() + const backends = createBackends(dir) - return dir + await backends.root.open() + await backends.root.close() + + return { + dir, + backends + } } -async function initRepo (dir, repoOptions) { - const store = createStore(dir, 'root', repoOptions) +async function initRepo (backends) { + const store = backends.root await store.open() await store.put(VERSION_KEY, loadFixture('test/fixtures/test-repo/version')) await store.put(CONFIG_KEY, loadFixture('test/fixtures/test-repo/config')) await store.close() - - return dir } module.exports = { diff --git a/test/index.spec.js b/test/index.spec.js index 36271cd..5b92b4b 100644 --- a/test/index.spec.js +++ b/test/index.spec.js @@ -3,10 +3,11 @@ const { expect } = require('aegir/utils/chai') const sinon = require('sinon') +const { MemoryBlockstore } = require('interface-blockstore') +const { MemoryDatastore } = require('interface-datastore') const migrator = require('../src/index') const repoVersion = require('../src/repo/version') -const repoLock = require('../src/repo/lock') const repoInit = require('../src/repo/init') const errors = require('../src/errors') @@ -41,18 +42,25 @@ function createOptions () { } } -function createRepoOptions () { - return { - - } -} - describe('index.js', () => { let getVersionStub let setVersionStub let lockStub let initStub let lockCloseStub + let repoOptions + const repoLock = { + lock: () => ({ + close: () => {} + }) + } + const backends = { + root: new MemoryDatastore(), + blocks: new MemoryBlockstore(), + datastore: new MemoryDatastore(), + keys: new MemoryDatastore(), + pins: new MemoryDatastore() + } beforeEach(() => { // Reset all stubs @@ -61,6 +69,10 @@ describe('index.js', () => { initStub.resolves(true) lockCloseStub.resolves() lockStub.resolves({ close: lockCloseStub }) + + repoOptions = { + repoLock + } }) before(() => { @@ -89,31 +101,37 @@ describe('index.js', () => { it('should error with out path argument', () => { const options = createOptions() - return expect(migrator.revert(undefined, undefined, undefined, options)) + return expect(migrator.revert(undefined, undefined, undefined, undefined, options)) + .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + }) + + it('should error without backends argument', () => { + const options = createOptions() + + return expect(migrator.revert('/some/path', undefined, undefined, undefined, options)) .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) }) it('should error without repo options argument', () => { const options = createOptions() - return expect(migrator.revert('/some/path', undefined, undefined, options)) + return expect(migrator.revert('/some/path', backends, undefined, undefined, options)) .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) }) it('should error without toVersion argument', () => { const options = createOptions() - return expect(migrator.revert('/some/path', {}, undefined, options)) + return expect(migrator.revert('/some/path', backends, {}, undefined, options)) .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) }) it('should error with invalid toVersion argument', () => { const invalidValues = ['eight', '-1', '1', -1] const options = createOptions() - const repoOptions = createRepoOptions() return Promise.all( - invalidValues.map((value) => expect(migrator.revert('/some/path', repoOptions, value, options)) + invalidValues.map((value) => expect(migrator.revert('/some/path', backends, repoOptions, value, options)) .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code)) ) }) @@ -121,9 +139,8 @@ describe('index.js', () => { it('should not revert if current repo version and toVersion matches', async () => { getVersionStub.returns(2) const options = createOptions() - const repoOptions = createRepoOptions() - await expect(migrator.revert('/some/path', repoOptions, 2, options)) + await expect(migrator.revert('/some/path', backends, repoOptions, 2, options)) .to.eventually.be.fulfilled() expect(lockStub.called).to.be.false() @@ -132,9 +149,8 @@ describe('index.js', () => { it('should not revert if current repo version is lower then toVersion', async () => { getVersionStub.returns(2) const options = createOptions() - const repoOptions = createRepoOptions() - await expect(migrator.revert('/some/path', repoOptions, 3, options)) + await expect(migrator.revert('/some/path', backends, repoOptions, 3, options)) .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code) expect(lockStub.called).to.be.false() @@ -144,26 +160,24 @@ describe('index.js', () => { const nonReversibleMigrationsMock = createMigrations() nonReversibleMigrationsMock[2].revert = undefined const options = { migrations: nonReversibleMigrationsMock } - const repoOptions = createRepoOptions() getVersionStub.returns(4) return expect( - migrator.revert('/some/path', repoOptions, 1, options) + migrator.revert('/some/path', backends, repoOptions, 1, options) ).to.eventually.be.rejectedWith(errors.NonReversibleMigrationError) .with.property('code', errors.NonReversibleMigrationError.code) }) it('should revert expected migrations', async () => { const options = createOptions() - const repoOptions = createRepoOptions() getVersionStub.returns(3) - await expect(migrator.revert('/some/path', repoOptions, 1, options)) + await expect(migrator.revert('/some/path', backends, repoOptions, 1, options)) .to.eventually.be.fulfilled() expect(lockCloseStub.calledOnce).to.be.true() expect(lockStub.calledOnce).to.be.true() - expect(setVersionStub.calledOnceWith('/some/path', 1)).to.be.true() + expect(setVersionStub.calledOnceWith(1, backends)).to.be.true() // Checking migrations expect(options.migrations[3].revert.called).to.be.false() @@ -174,15 +188,14 @@ describe('index.js', () => { it('should revert one migration as expected', async () => { const options = createOptions() - const repoOptions = createRepoOptions() getVersionStub.returns(2) - await expect(migrator.revert('/some/path', repoOptions, 1, options)) + await expect(migrator.revert('/some/path', backends, repoOptions, 1, options)) .to.eventually.be.fulfilled() expect(lockCloseStub.calledOnce).to.be.true() expect(lockStub.calledOnce).to.be.true() - expect(setVersionStub.calledOnceWith('/some/path', 1)).to.be.true() + expect(setVersionStub.calledOnceWith(1, backends)).to.be.true() // Checking migrations expect(options.migrations[3].revert.called).to.be.false() @@ -201,15 +214,14 @@ describe('index.js', () => { } ] const options = { migrations: migrationsMock } - const repoOptions = createRepoOptions() getVersionStub.returns(2) - await expect(migrator.revert('/some/path', repoOptions, 1, options)) + await expect(migrator.revert('/some/path', backends, repoOptions, 1, options)) .to.eventually.be.fulfilled() expect(lockCloseStub.calledOnce).to.be.true() expect(lockStub.calledOnce).to.be.true() - expect(setVersionStub.calledOnceWith('/some/path', 1)).to.be.true() + expect(setVersionStub.calledOnceWith(1, backends)).to.be.true() // Checking migrations expect(migrationsMock[0].revert.calledOnce).to.be.true() @@ -217,11 +229,10 @@ describe('index.js', () => { it('should not have any side-effects when in dry run', async () => { const options = createOptions() - const repoOptions = createRepoOptions() getVersionStub.returns(4) options.isDryRun = true - await expect(migrator.revert('/some/path', repoOptions, 2, options)) + await expect(migrator.revert('/some/path', backends, repoOptions, 2, options)) .to.eventually.be.fulfilled() expect(lockCloseStub.called).to.be.false() @@ -233,17 +244,16 @@ describe('index.js', () => { it('should not lock repo when ignoreLock is used', async () => { const options = createOptions() - const repoOptions = createRepoOptions() options.ignoreLock = true getVersionStub.returns(4) - await expect(migrator.revert('/some/path', repoOptions, 2, options)) + await expect(migrator.revert('/some/path', backends, repoOptions, 2, options)) .to.eventually.be.fulfilled() expect(lockCloseStub.called).to.be.false() expect(lockStub.called).to.be.false() - expect(setVersionStub.calledOnceWith('/some/path', 2)).to.be.true() + expect(setVersionStub.calledOnceWith(2, backends)).to.be.true() // Checking migrations expect(options.migrations[3].revert.calledOnce).to.be.true() @@ -254,15 +264,14 @@ describe('index.js', () => { it('should report progress when progress callback is supplied', async () => { const options = createOptions() - const repoOptions = createRepoOptions() options.onProgress = sinon.stub() getVersionStub.returns(4) - options.migrations[2].revert = (path, repoOptions, onProgress) => { + options.migrations[2].revert = (backends, onProgress) => { onProgress(50, 'hello') } - await expect(migrator.revert('/some/path', repoOptions, 2, options)) + await expect(migrator.revert('/some/path', backends, repoOptions, 2, options)) .to.eventually.be.fulfilled() expect(options.onProgress.getCall(0).calledWith(3, '50.00', 'hello')).to.be.true() @@ -271,17 +280,16 @@ describe('index.js', () => { it('should unlock repo when error is thrown', async () => { getVersionStub.returns(4) const options = createOptions() - const repoOptions = createRepoOptions() options.migrations[2].revert = sinon.stub().rejects() - await expect(migrator.revert('/some/path', repoOptions, 2, options)) + await expect(migrator.revert('/some/path', backends, repoOptions, 2, options)) .to.eventually.be.rejected() expect(lockCloseStub.calledOnce).to.be.true() expect(lockStub.calledOnce).to.be.true() // The last successfully reverted migration should be set as repo's version - expect(setVersionStub.calledOnceWith('/some/path', 3)).to.be.true() + expect(setVersionStub.calledOnceWith(3, backends)).to.be.true() }) }) @@ -289,31 +297,36 @@ describe('index.js', () => { it('should error with out path argument', () => { const options = createOptions() - return expect(migrator.migrate(undefined, undefined, undefined, options)) + return expect(migrator.migrate(undefined, undefined, undefined, undefined, options)) + .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + }) + + it('should error with out backends argument', () => { + const options = createOptions() + + return expect(migrator.migrate('/some/path', undefined, undefined, undefined, options)) .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) }) it('should error with out repoOptions argument', () => { const options = createOptions() - return expect(migrator.migrate('/some/path', undefined, undefined, options)) + return expect(migrator.migrate('/some/path', backends, undefined, undefined, options)) .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) }) it('should error with out toVersion argument', () => { const options = createOptions() - const repoOptions = createRepoOptions() - return expect(migrator.migrate('/some/path', repoOptions, undefined, options)) + return expect(migrator.migrate('/some/path', backends, repoOptions, undefined, options)) .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) }) it('should error with invalid toVersion argument', () => { const invalidValues = ['eight', '-1', '1', -1, {}] - const repoOptions = createRepoOptions() return Promise.all( - invalidValues.map((invalidValue) => expect(migrator.migrate('/some/path', repoOptions, invalidValue, createOptions())) + invalidValues.map((invalidValue) => expect(migrator.migrate('/some/path', backends, repoOptions, invalidValue, createOptions())) .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code)) ) }) @@ -333,11 +346,10 @@ describe('index.js', () => { } ] } - const repoOptions = createRepoOptions() getVersionStub.returns(1) - return expect(migrator.migrate('/some/path', repoOptions, 3, options)) + return expect(migrator.migrate('/some/path', backends, repoOptions, 3, options)) .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code) }) @@ -356,20 +368,18 @@ describe('index.js', () => { } ] } - const repoOptions = createRepoOptions() getVersionStub.returns(3) - return expect(migrator.migrate('/some/path', repoOptions, 5, options)) + return expect(migrator.migrate('/some/path', backends, repoOptions, 5, options)) .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code) }) it('should not migrate if current repo version and toVersion matches', async () => { getVersionStub.returns(2) const options = createOptions() - const repoOptions = createRepoOptions() - await expect(migrator.migrate('/some/path', repoOptions, 2, options)) + await expect(migrator.migrate('/some/path', backends, repoOptions, 2, options)) .to.eventually.be.fulfilled() expect(lockStub.called).to.be.false() @@ -378,9 +388,8 @@ describe('index.js', () => { it('should not migrate if current repo version is higher then toVersion', async () => { getVersionStub.returns(3) const options = createOptions() - const repoOptions = createRepoOptions() - await expect(migrator.migrate('/some/path', repoOptions, 2, options)) + await expect(migrator.migrate('/some/path', backends, repoOptions, 2, options)) .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code) expect(lockStub.called).to.be.false() @@ -388,15 +397,14 @@ describe('index.js', () => { it('should migrate expected migrations', async () => { const options = createOptions() - const repoOptions = createRepoOptions() getVersionStub.returns(1) - await expect(migrator.migrate('/some/path', repoOptions, 3, options)) + await expect(migrator.migrate('/some/path', backends, repoOptions, 3, options)) .to.eventually.be.fulfilled() expect(lockCloseStub.calledOnce).to.be.true() expect(lockStub.calledOnce).to.be.true() - expect(setVersionStub.calledOnceWith('/some/path', 3)).to.be.true() + expect(setVersionStub.calledOnceWith(3, backends)).to.be.true() // Checking migrations expect(options.migrations[3].migrate.called).to.be.false() @@ -408,10 +416,9 @@ describe('index.js', () => { it('should not have any side-effects when in dry run', async () => { const options = createOptions() options.isDryRun = true - const repoOptions = createRepoOptions() getVersionStub.returns(2) - await expect(migrator.migrate('/some/path', repoOptions, 4, options)) + await expect(migrator.migrate('/some/path', backends, repoOptions, 4, options)) .to.eventually.be.fulfilled() expect(lockCloseStub.called).to.be.false() @@ -424,15 +431,14 @@ describe('index.js', () => { it('should not lock repo when ignoreLock is used', async () => { const options = createOptions() options.ignoreLock = true - const repoOptions = createRepoOptions() getVersionStub.returns(2) - await expect(migrator.migrate('/some/path', repoOptions, 4, options)) + await expect(migrator.migrate('/some/path', backends, repoOptions, 4, options)) .to.eventually.be.fulfilled() expect(lockCloseStub.called).to.be.false() expect(lockStub.called).to.be.false() - expect(setVersionStub.calledOnceWith('/some/path', 4)).to.be.true() + expect(setVersionStub.calledOnceWith(4, backends)).to.be.true() // Checking migrations expect(options.migrations[3].migrate.calledOnce).to.be.true() @@ -444,14 +450,13 @@ describe('index.js', () => { it('should report progress when progress callback is supplied', async () => { const options = createOptions() options.onProgress = sinon.stub() - const repoOptions = createRepoOptions() getVersionStub.returns(2) - options.migrations[2].migrate = (path, repoOptions, onProgress) => { + options.migrations[2].migrate = (backends, onProgress) => { onProgress(50, 'hello') } - await expect(migrator.migrate('/some/path', repoOptions, 4, options)) + await expect(migrator.migrate('/some/path', backends, repoOptions, 4, options)) .to.eventually.be.fulfilled() expect(options.onProgress.getCall(0).calledWith(3, '50.00', 'hello')).to.be.true() @@ -461,16 +466,15 @@ describe('index.js', () => { getVersionStub.returns(2) const options = createOptions() options.migrations[3].migrate = sinon.stub().rejects() - const repoOptions = createRepoOptions() - await expect(migrator.migrate('/some/path', repoOptions, 4, options)) + await expect(migrator.migrate('/some/path', backends, repoOptions, 4, options)) .to.eventually.be.rejected() expect(lockCloseStub.calledOnce).to.be.true() expect(lockStub.calledOnce).to.be.true() // The last successfully migrated migration should be set as repo's version - expect(setVersionStub.calledOnceWith('/some/path', 3)).to.be.true() + expect(setVersionStub.calledOnceWith(3, backends)).to.be.true() }) }) }) diff --git a/test/init-test.js b/test/init-test.js index 59d4299..aa57506 100644 --- a/test/init-test.js +++ b/test/init-test.js @@ -2,49 +2,50 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const { CONFIG_KEY, VERSION_KEY, createStore } = require('../src/utils') +const { CONFIG_KEY, VERSION_KEY } = require('../src/utils') const repoInit = require('../src/repo/init') const uint8ArrayFromString = require('uint8arrays/from-string') -module.exports = (setup, cleanup, repoOptions) => { +module.exports = (setup, cleanup) => { let dir + let backends beforeEach(async () => { - dir = await setup() + ({ dir, backends } = await setup()) }) afterEach(() => cleanup(dir) ) it('should return true with valid initialized repo', async () => { - const store = createStore(dir, 'root', repoOptions) + const store = backends.root await store.open() await store.put(VERSION_KEY, uint8ArrayFromString('7')) await store.put(CONFIG_KEY, uint8ArrayFromString('config')) await store.close() - expect(await repoInit.isRepoInitialized(dir, repoOptions)).to.be.true() + expect(await repoInit.isRepoInitialized(backends)).to.be.true() }) it('should return false with missing version key', async () => { - const store = createStore(dir, 'root', repoOptions) + const store = backends.root await store.open() await store.put(CONFIG_KEY, '') await store.close() - expect(await repoInit.isRepoInitialized(dir, repoOptions)).to.be.false() + expect(await repoInit.isRepoInitialized(backends)).to.be.false() }) it('should return false with missing config key', async () => { - const store = createStore(dir, 'root', repoOptions) + const store = backends.root await store.open() await store.put(VERSION_KEY, '') await store.close() - expect(await repoInit.isRepoInitialized(dir, repoOptions)).to.be.false() + expect(await repoInit.isRepoInitialized(backends)).to.be.false() }) it('should return false if the repo does not exists', async () => { - return expect(await repoInit.isRepoInitialized('/some/random/dirrr', repoOptions)).to.be.false() + return expect(await repoInit.isRepoInitialized(backends)).to.be.false() }) } diff --git a/test/integration-test.js b/test/integration-test.js index 1c9d247..6faa9f0 100644 --- a/test/integration-test.js +++ b/test/integration-test.js @@ -5,26 +5,34 @@ const { expect } = require('aegir/utils/chai') const migrator = require('../src') const migrations = require('./test-migrations') -const { VERSION_KEY, CONFIG_KEY, createStore } = require('../src/utils') +const { VERSION_KEY, CONFIG_KEY } = require('../src/utils') const { initRepo } = require('./fixtures/repo') -module.exports = (setup, cleanup, repoOptions) => { +module.exports = (setup, cleanup) => { let dir + let backends + const repoOptions = { + repoLock: { + lock: () => ({ + close: () => {} + }) + } + } beforeEach(async () => { - dir = await setup() - await initRepo(dir, repoOptions) + ({ dir, backends } = await setup()) + await initRepo(backends) }) afterEach(() => cleanup(dir)) it('migrate forward', async () => { - await migrator.migrate(dir, repoOptions, migrator.getLatestMigrationVersion(migrations), { + await migrator.migrate(dir, backends, repoOptions, migrator.getLatestMigrationVersion(migrations), { migrations: migrations, onProgress: () => {} }) - const store = createStore(dir, 'root', repoOptions) + const store = backends.root await store.open() const version = await store.get(VERSION_KEY) expect(version.toString()).to.be.equal('2') @@ -36,17 +44,17 @@ module.exports = (setup, cleanup, repoOptions) => { }) it('revert', async () => { - await migrator.migrate(dir, repoOptions, migrator.getLatestMigrationVersion(migrations), { + await migrator.migrate(dir, backends, repoOptions, migrator.getLatestMigrationVersion(migrations), { migrations: migrations, onProgress: () => {} }) - await migrator.revert(dir, repoOptions, 1, { + await migrator.revert(dir, backends, repoOptions, 1, { migrations: migrations, onProgress: () => {} }) - const store = createStore(dir, 'root', repoOptions) + const store = backends.root await store.open() const version = await store.get(VERSION_KEY) expect(version.toString()).to.be.equal('1') diff --git a/test/lock-test.js b/test/lock-test.js deleted file mode 100644 index 700fadb..0000000 --- a/test/lock-test.js +++ /dev/null @@ -1,43 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { expect } = require('aegir/utils/chai') - -// When new lock mechanism is introduced in new version don't forget to update -// the range (from/to) of the previous version test's description - -module.exports = (locker, setup, cleanup) => { - describe('version 7 and below', () => { - let dir - - beforeEach(async () => { dir = await setup() }) - afterEach(() => cleanup(dir)) - - it('should return lock object', async () => { - const lock = await locker.lock(7, dir) - - expect(lock).to.have.property('close') - expect(lock.close).to.be.a('function') - await lock.close() - }) - - it('should prevent acquiring multiple locks for the same dir', async () => { - const lock = await locker.lock(7, dir) - await expect(locker.lock(7, dir)).to.be.eventually.rejected() - await lock.close() - }) - - it('should release lock', async () => { - let lock - lock = await locker.lock(7, dir) - - // It will fail because lock already exists - await expect(locker.lock(7, dir)).to.be.eventually.rejected() - await lock.close() - - // Lets try to lock it one more time to validate it was released - lock = await locker.lock(7, dir) - await lock.close() - }) - }) -} diff --git a/test/migrations/index.js b/test/migrations/index.js index 6e33442..b6049d1 100644 --- a/test/migrations/index.js +++ b/test/migrations/index.js @@ -1,7 +1,7 @@ 'use strict' -module.exports = (createRepo, repoCleanup, repoOptions) => { - require('./migration-8-test')(createRepo, repoCleanup, repoOptions) - require('./migration-9-test')(createRepo, repoCleanup, repoOptions) - require('./migration-10-test')(createRepo, repoCleanup, repoOptions) +module.exports = (setup, cleanup) => { + require('./migration-8-test')(setup, cleanup) + require('./migration-9-test')(setup, cleanup) + require('./migration-10-test')(setup, cleanup) } diff --git a/test/migrations/migration-10-test.js b/test/migrations/migration-10-test.js index 93102fc..311c5ff 100644 --- a/test/migrations/migration-10-test.js +++ b/test/migrations/migration-10-test.js @@ -3,8 +3,9 @@ 'use strict' const { expect } = require('aegir/utils/chai') +const { CID } = require('multiformats/cid') +const { BlockstoreAdapter } = require('interface-blockstore') -const { createStore } = require('../../src/utils') const migration = require('../../migrations/migration-10') const Key = require('interface-datastore').Key const fromString = require('uint8arrays/from-string') @@ -17,58 +18,78 @@ const keys = { CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ: fromString('derp') } -async function bootstrap (dir, backend, repoOptions) { - const store = createStore(dir, backend, repoOptions) +async function bootstrap (store) { await store.open() for (const name of Object.keys(keys)) { - await store.put(new Key(name), keys[name]) + if (store instanceof BlockstoreAdapter) { + await store.put(CID.parse(`b${name.toLowerCase()}`), keys[name]) + } else { + await store.put(new Key(name), keys[name]) + } } await store.close() } -async function validate (dir, backend, repoOptions) { - const store = createStore(dir, backend, repoOptions) - +async function validate (store) { await store.open() for (const name of Object.keys(keys)) { - const key = new Key(`/${name}`) + if (store instanceof BlockstoreAdapter) { + const key = CID.parse(`b${name.toLowerCase()}`) - expect(await store.has(key)).to.be.true(`Could not read key ${name}`) - expect(equals(await store.get(key), keys[name])).to.be.true(`Could not read value for key ${keys[name]}`) + expect(await store.has(key)).to.be.true(`Could not read key ${name}`) + expect(equals(await store.get(key), keys[name])).to.be.true(`Could not read value for key ${keys[name]}`) + } else { + const key = new Key(`/${name}`) + + await expect(store.has(key)).to.eventually.be.true(`Could not read key ${name}`) + expect(equals(await store.get(key), keys[name])).to.be.true(`Could not read value for key ${keys[name]}`) + } } await store.close() } -function withLevel (repoOptions, levelImpl) { - const stores = Object.keys(repoOptions.storageBackends) - .filter(key => repoOptions.storageBackends[key].name === 'LevelDatastore') +function withLevels (backends, LevelImpl) { + const output = {} + + Object.entries(backends) + .forEach(([key, value]) => { + output[key] = withLevel(value, LevelImpl) + }) + + return output +} - const output = { - ...repoOptions +function withLevel (store, LevelImpl) { + let parent = { + child: store } - stores.forEach(store => { - // override version of level passed to datastore options - output.storageBackendOptions[store] = { - ...output.storageBackendOptions[store], - db: levelImpl + while (parent.child) { + if (parent.child.constructor.name === 'LevelDatastore') { + parent.child.database = LevelImpl + delete parent.child.db + + return store } - }) - return output + parent = parent.child + } + + return store } -module.exports = (setup, cleanup, repoOptions) => { +module.exports = (setup, cleanup) => { describe('migration 10', function () { this.timeout(240 * 1000) let dir + let backends beforeEach(async () => { - dir = await setup() + ({ dir, backends } = await setup()) }) afterEach(async () => { @@ -77,48 +98,48 @@ module.exports = (setup, cleanup, repoOptions) => { describe('forwards', () => { beforeEach(async () => { - for (const backend of Object.keys(repoOptions.storageBackends)) { - await bootstrap(dir, backend, withLevel(repoOptions, Level5)) + for (const backend of Object.values(backends)) { + await bootstrap(withLevel(backend, Level5)) } }) it('should migrate keys and values forward', async () => { - await migration.migrate(dir, withLevel(repoOptions, Level6), () => {}) + await migration.migrate(withLevels(backends, Level6), () => {}) - for (const backend of Object.keys(repoOptions.storageBackends)) { - await validate(dir, backend, withLevel(repoOptions, Level6)) + for (const backend of Object.values(backends)) { + await validate(withLevel(backend, Level6)) } }) }) describe('backwards using level@6.x.x', () => { beforeEach(async () => { - for (const backend of Object.keys(repoOptions.storageBackends)) { - await bootstrap(dir, backend, withLevel(repoOptions, Level6)) + for (const backend of Object.values(backends)) { + await bootstrap(withLevel(backend, Level6)) } }) it('should migrate keys and values backward', async () => { - await migration.revert(dir, withLevel(repoOptions, Level6), () => {}) + await migration.revert(withLevels(backends, Level6), () => {}) - for (const backend of Object.keys(repoOptions.storageBackends)) { - await validate(dir, backend, withLevel(repoOptions, Level5)) + for (const backend of Object.values(backends)) { + await validate(withLevel(backend, Level5)) } }) }) describe('backwards using level@5.x.x', () => { beforeEach(async () => { - for (const backend of Object.keys(repoOptions.storageBackends)) { - await bootstrap(dir, backend, withLevel(repoOptions, Level6)) + for (const backend of Object.values(backends)) { + await bootstrap(withLevel(backend, Level6)) } }) it('should migrate keys and values backward', async () => { - await migration.revert(dir, withLevel(repoOptions, Level5), () => {}) + await migration.revert(withLevels(backends, Level5), () => {}) - for (const backend of Object.keys(repoOptions.storageBackends)) { - await validate(dir, backend, withLevel(repoOptions, Level5)) + for (const backend of Object.values(backends)) { + await validate(withLevel(backend, Level5)) } }) }) diff --git a/test/migrations/migration-8-test.js b/test/migrations/migration-8-test.js index 44f4677..7dd46a2 100644 --- a/test/migrations/migration-8-test.js +++ b/test/migrations/migration-8-test.js @@ -4,7 +4,6 @@ const { expect } = require('aegir/utils/chai') -const { createStore } = require('../../src/utils') const migration = require('../../migrations/migration-8') const Key = require('interface-datastore').Key @@ -51,39 +50,61 @@ const blocksFixtures = [ 'CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y'] ] -async function bootstrapBlocks (dir, encoded, repoOptions) { - const store = createStore(dir, 'blocks', repoOptions) +/** + * @param {*} blockstore + * @returns {Datastore} + */ +function unwrap (blockstore) { + if (blockstore.child) { + return unwrap(blockstore.child) + } + + return blockstore +} + +async function bootstrapBlocks (backends, encoded) { + const store = backends.blocks await store.open() + const datastore = unwrap(store) + for (const blocksNames of blocksFixtures) { const name = encoded ? blocksNames[1] : blocksNames[0] - await store.put(new Key(name), '') + + await datastore.put(new Key(`/${name}`), '') } await store.close() } -async function validateBlocks (dir, encoded, repoOptions) { - const store = createStore(dir, 'blocks', repoOptions) +async function validateBlocks (backends, migrated) { + const store = backends.blocks await store.open() + const datastore = unwrap(store) + for (const blockNames of blocksFixtures) { - const newName = encoded ? blockNames[1] : blockNames[0] - const oldName = encoded ? blockNames[0] : blockNames[1] - expect(await store.has(new Key(`/${oldName}`))).to.be.false(`${oldName} was not migrated to ${newName}`) - expect(await store.has(new Key(`/${newName}`))).to.be.true(`${newName} was not removed`) + const newName = migrated ? blockNames[1] : blockNames[0] + const oldName = migrated ? blockNames[0] : blockNames[1] + + const oldKey = new Key(`/${oldName}`) + const newKey = new Key(`/${newName}`) + + expect(await datastore.has(oldKey)).to.be.false(`${oldName} was not migrated to ${newName}`) + expect(await datastore.has(newKey)).to.be.true(`${newName} was not removed`) } await store.close() } -module.exports = (setup, cleanup, repoOptions) => { +module.exports = (setup, cleanup) => { describe('migration 8', function () { this.timeout(240 * 1000) let dir + let backends beforeEach(async () => { - dir = await setup() + ({ dir, backends } = await setup()) }) afterEach(async () => { @@ -93,27 +114,27 @@ module.exports = (setup, cleanup, repoOptions) => { describe('empty repo', () => { describe('forwards', () => { it('should migrate pins forward', async () => { - await migration.migrate(dir, repoOptions, () => {}) + await migration.migrate(backends, () => {}) }) }) describe('backwards', () => { it('should migrate pins backward', async () => { - await migration.revert(dir, repoOptions, () => {}) + await migration.revert(backends, () => {}) }) }) }) it('should migrate blocks forward', async () => { - await bootstrapBlocks(dir, false, repoOptions) - await migration.migrate(dir, repoOptions, () => {}) - await validateBlocks(dir, true, repoOptions) + await bootstrapBlocks(backends, false) + await migration.migrate(backends, () => {}) + await validateBlocks(backends, true) }) it('should migrate blocks backward', async () => { - await bootstrapBlocks(dir, true, repoOptions) - await migration.revert(dir, repoOptions, () => {}) - await validateBlocks(dir, false, repoOptions) + await bootstrapBlocks(backends, true) + await migration.revert(backends, () => {}) + await validateBlocks(backends, false) }) }) } diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index 37a4d4a..8202272 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -5,16 +5,20 @@ const { expect } = require('aegir/utils/chai') const cbor = require('cborg') const migration = require('../../migrations/migration-9') -const { cidToKey, PIN_DS_KEY } = require('../../migrations/migration-9/utils') -const { createStore } = require('../../src/utils') -const CID = require('cids') +const { PIN_DS_KEY } = require('../../migrations/migration-9/utils') +const { CID } = require('multiformats/cid') const { CarReader } = require('@ipld/car') const loadFixture = require('aegir/utils/fixtures') -const multibase = require('multibase') +const dagPb = require('@ipld/dag-pb') +const mhd = require('multiformats/hashes/digest') +const { base32 } = require('multiformats/bases/base32') function pinToCid (key, pin) { - const buf = multibase.encoding('base32upper').decode(key.toString().split('/').pop()) - return new CID(pin.version || 0, pin.codec || 'dag-pb', buf) + return CID.create( + pin.version || 0, + pin.codec || dagPb.code, + mhd.decode(base32.decode('b' + key.toString().toLowerCase().split('/').pop())) + ) } // the test data is generated by the file /test/fixtures/generate-car-files.js @@ -23,12 +27,12 @@ function pinToCid (key, pin) { const pinsets = { 'basic pinset': { car: loadFixture('test/fixtures/pinset-basic.car'), - root: new CID('QmeKxgcTtiE1XfvwcVf8wc65GgMmZumEtXK6YJKuvf3VYx'), + root: CID.parse('QmeKxgcTtiE1XfvwcVf8wc65GgMmZumEtXK6YJKuvf3VYx'), pins: 31 }, 'multiple bucket pinset': { car: loadFixture('test/fixtures/pinset-multiple-buckets.car'), - root: new CID('QmPGd36dodHj1LQtVWK3LcBVkkVWvfXHEwBHnMpN6tu4BD'), + root: CID.parse('QmPGd36dodHj1LQtVWK3LcBVkkVWvfXHEwBHnMpN6tu4BD'), // we need at least 8192 pins in order to create a new bucket pins: 9031 @@ -81,15 +85,10 @@ async function bootstrapBlocks (blockstore, datastore, { car: carBuf, root: expe expect(actualRoot.toString()).to.equal(expectedRoot.toString()) await blockstore.open() - const batch = blockstore.batch() - for await (const { cid, bytes } of car.blocks()) { - // if we don't create a new Uint8Array, IDB writes the whole backing buffer into the db - batch.put(cidToKey(new CID(cid.toString())), new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength)) + blockstore.put(CID.parse(cid.toString()), new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength)) } - await batch.commit() - await blockstore.close() await datastore.open() @@ -101,25 +100,19 @@ async function assertPinsetRootIsPresent (datastore, pinset) { await datastore.open() const buf = await datastore.get(PIN_DS_KEY) await datastore.close() - const cid = new CID(buf) + const cid = CID.decode(buf) expect(cid.toString()).to.equal(pinset.root.toString()) } -module.exports = (setup, cleanup, repoOptions) => { +module.exports = (setup, cleanup) => { describe('migration 9', function () { this.timeout(1000 * 1000) let dir - let datastore - let blockstore - let pinstore + let backends beforeEach(async () => { - dir = await setup() - - blockstore = createStore(dir, 'blocks', repoOptions) - datastore = createStore(dir, 'datastore', repoOptions) - pinstore = createStore(dir, 'pins', repoOptions) + ({ dir, backends } = await setup()) }) afterEach(async () => { @@ -129,13 +122,13 @@ module.exports = (setup, cleanup, repoOptions) => { describe('empty repo', () => { describe('forwards', () => { it('should migrate pins forward', async () => { - await migration.migrate(dir, repoOptions, () => {}) + await migration.migrate(backends, () => {}) }) }) describe('backwards', () => { it('should migrate pins backward', async () => { - await migration.revert(dir, repoOptions, () => {}) + await migration.revert(backends, () => {}) }) }) }) @@ -147,18 +140,18 @@ module.exports = (setup, cleanup, repoOptions) => { describe(title, () => { describe('forwards', () => { beforeEach(async () => { - await bootstrapBlocks(blockstore, datastore, pinset) - await assertPinsetRootIsPresent(datastore, pinset) + await bootstrapBlocks(backends.blocks, backends.datastore, pinset) + await assertPinsetRootIsPresent(backends.datastore, pinset) }) it('should migrate pins forward', async () => { - await migration.migrate(dir, repoOptions, () => {}) + await migration.migrate(backends, () => {}) - await pinstore.open() + await backends.pins.open() let migratedDirect = 0 let migratedNonDagPBRecursive = 0 - for await (const { key, value } of pinstore.query({})) { + for await (const { key, value } of backends.pins.query({})) { pinned[key] = value const pin = cbor.decode(value) @@ -177,33 +170,33 @@ module.exports = (setup, cleanup, repoOptions) => { } } - await pinstore.close() + await backends.pins.close() expect(migratedDirect).to.equal(directPins.length + nonDagPbDirectPins.length) expect(migratedNonDagPBRecursive).to.equal(nonDagPbRecursivePins.length) expect(Object.keys(pinned)).to.have.lengthOf(pinset.pins) - await datastore.open() - await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.false() - await datastore.close() + await backends.datastore.open() + await expect(backends.datastore.has(PIN_DS_KEY)).to.eventually.be.false() + await backends.datastore.close() }) }) describe('backwards', () => { beforeEach(async () => { - await pinstore.open() + await backends.pins.open() for (const key of Object.keys(pinned)) { - await pinstore.put(key, pinned[key]) + await backends.pins.put(key, pinned[key]) } - await pinstore.close() + await backends.pins.close() }) it('should migrate pins backward', async () => { - await migration.revert(dir, repoOptions, () => {}) + await migration.revert(backends, () => {}) - await assertPinsetRootIsPresent(datastore, pinset) + await assertPinsetRootIsPresent(backends.datastore, pinset) }) }) }) diff --git a/test/node.js b/test/node.js index bec82db..f43a535 100644 --- a/test/node.js +++ b/test/node.js @@ -6,6 +6,8 @@ const rimraf = require('rimraf') const DatastoreFS = require('datastore-fs') const DatastoreLevel = require('datastore-level') const DatastoreS3 = require('datastore-s3') +const { ShardingDatastore, shard: { NextToLast } } = require('datastore-core') +const BlockstoreDatastoreAdapter = require('blockstore-datastore-adapter') const mockS3 = require('./fixtures/mock-s3') const S3 = require('aws-sdk').S3 const s3Instance = new S3({ @@ -23,123 +25,94 @@ function cleanup (dir) { const CONFIGURATIONS = [{ name: 'with sharding', cleanup, - repoOptions: { - storageBackends: { - root: DatastoreFS, - blocks: DatastoreFS, - datastore: DatastoreLevel, - keys: DatastoreLevel, - pins: DatastoreLevel - }, - storageBackendOptions: { - root: { - extension: '' - }, - blocks: { - sharding: true, - extension: '.data' - }, - datastore: {}, - keys: {}, - pins: {} + /** + * @param {string} prefix + * @returns {import('../src/types').Backends} + */ + createBackends: (prefix) => { + return { + root: new DatastoreFS(prefix), + blocks: new BlockstoreDatastoreAdapter( + new ShardingDatastore( + new DatastoreFS(`${prefix}/blocks`, { + extension: '.data' + }), + new NextToLast(2)) + ), + datastore: new DatastoreLevel(`${prefix}/datastore`), + keys: new DatastoreLevel(`${prefix}/keys`), + pins: new DatastoreLevel(`${prefix}/pins`) } } }, { name: 'without sharding', cleanup, - repoOptions: { - storageBackends: { - root: DatastoreFS, - blocks: DatastoreFS, - datastore: DatastoreLevel, - keys: DatastoreLevel, - pins: DatastoreLevel - }, - storageBackendOptions: { - root: { - extension: '' - }, - blocks: { - sharding: false, - extension: '.data' - }, - datastore: {}, - keys: {}, - pins: {} + createBackends: (prefix) => { + return { + root: new DatastoreFS(prefix), + blocks: new BlockstoreDatastoreAdapter( + new DatastoreFS(`${prefix}/blocks`, { + extension: '.data' + }) + ), + datastore: new DatastoreLevel(`${prefix}/datastore`), + keys: new DatastoreLevel(`${prefix}/keys`), + pins: new DatastoreLevel(`${prefix}/pins`) } } }, { name: 'with s3', cleanup: () => {}, - repoOptions: { - lock: 'memory', - storageBackends: { - root: DatastoreS3, - blocks: DatastoreS3, - datastore: DatastoreS3, - keys: DatastoreS3, - pins: DatastoreS3 - }, - storageBackendOptions: { - root: { - sharding: true, - extension: '', + createBackends: (prefix) => { + return { + root: new DatastoreS3(prefix, { s3: s3Instance, createIfMissing: false - }, - blocks: { - sharding: true, - extension: '.data', + }), + blocks: new BlockstoreDatastoreAdapter( + new ShardingDatastore( + new DatastoreS3(`${prefix}/blocks`, { + s3: s3Instance, + createIfMissing: false, + extension: '.data' + }), + new NextToLast(2) + ) + ), + datastore: new ShardingDatastore(new DatastoreS3(`${prefix}/datastore`, { s3: s3Instance, createIfMissing: false - }, - datastore: { - sharding: true, + }), new NextToLast(2)), + keys: new ShardingDatastore(new DatastoreS3(`${prefix}/keys`, { s3: s3Instance, createIfMissing: false - }, - keys: { - sharding: true, + }), new NextToLast(2)), + pins: new ShardingDatastore(new DatastoreS3(`${prefix}/pins`, { s3: s3Instance, createIfMissing: false - }, - pins: { - sharding: true, - s3: s3Instance, - createIfMissing: false - } + }), new NextToLast(2)) } } }] -CONFIGURATIONS.forEach(({ name, repoOptions, cleanup }) => { - const setup = () => createRepo(repoOptions, os.tmpdir()) +CONFIGURATIONS.forEach(({ name, createBackends, cleanup }) => { + const setup = () => createRepo(createBackends, os.tmpdir()) describe(name, () => { - if (repoOptions.lock === 'memory') { - describe('mem-lock tests', () => { - require('./lock-test')(require('../src/repo/lock-memory'), setup, cleanup, repoOptions) - }) - } else { - describe('fs-lock tests', () => { - require('./lock-test')(require('../src/repo/lock'), setup, cleanup, repoOptions) - }) - } - describe('version tests', () => { - require('./version-test')(setup, cleanup, repoOptions) + require('./version-test')(setup, cleanup) }) describe('migrations tests', () => { - require('./migrations')(setup, cleanup, repoOptions) + require('./migrations')(setup, cleanup) }) describe('init tests', () => { - require('./init-test')(setup, cleanup, repoOptions) + require('./init-test')(setup, cleanup) }) describe('integration tests', () => { - require('./integration-test')(setup, cleanup, repoOptions) + require('./integration-test')(setup, cleanup) }) }) }) diff --git a/test/test-migrations/migration-2/index.js b/test/test-migrations/migration-2/index.js index b7d7a2a..b8f1415 100644 --- a/test/test-migrations/migration-2/index.js +++ b/test/test-migrations/migration-2/index.js @@ -3,7 +3,6 @@ const Key = require('interface-datastore').Key const _set = require('just-safe-set') const uint8ArrayFromString = require('uint8arrays/from-string') -const { createStore } = require('../../../src/utils') const CONFIG_KEY = new Key('config') const NEW_API_ADDRESS = '/ip6/::/tcp/5001' @@ -53,8 +52,8 @@ function removeNewApiAddress (config) { return config } -async function migrate (repoPath, repoOptions, onProgress) { - const store = createStore(repoPath, 'root', repoOptions) +async function migrate (backends, onProgress) { + const store = backends.root await store.open() try { @@ -76,8 +75,8 @@ async function migrate (repoPath, repoOptions, onProgress) { onProgress(100, 'done!') } -async function revert (repoPath, repoOptions, onProgress) { - const store = createStore(repoPath, 'root', repoOptions) +async function revert (backends, onProgress) { + const store = backends.root await store.open() try { diff --git a/test/version-test.js b/test/version-test.js index 76e351c..d5fd6cd 100644 --- a/test/version-test.js +++ b/test/version-test.js @@ -2,7 +2,7 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const { VERSION_KEY, CONFIG_KEY, createStore } = require('../src/utils') +const { VERSION_KEY, CONFIG_KEY } = require('../src/utils') const version = require('../src/repo/version') const uint8ArrayFromString = require('uint8arrays/from-string') const errors = require('../src/errors') @@ -10,44 +10,45 @@ const errors = require('../src/errors') // When new versioning mechanism is introduced in new version don't forget to update // the range (from/to) of the previous version test's description -module.exports = (setup, cleanup, repoOptions) => { +module.exports = (setup, cleanup) => { let dir + let backends beforeEach(async () => { - dir = await setup() + ({ dir, backends } = await setup()) }) afterEach(() => cleanup(dir)) it('getVersion should fail without any version in repo', async () => { - await expect(version.getVersion(dir, repoOptions)).to.be.eventually.rejectedWith(errors.NotInitializedRepoError) + await expect(version.getVersion(backends)).to.be.eventually.rejectedWith(errors.NotInitializedRepoError) .with.property('code', errors.NotInitializedRepoError.code) }) describe('version 7 and below', () => { it('should get version number', async () => { // Create version file - const store = createStore(dir, 'root', repoOptions) + const store = backends.root await store.open() await store.put(CONFIG_KEY, uint8ArrayFromString('some dummy config')) await store.put(VERSION_KEY, uint8ArrayFromString('7')) await store.close() - expect(await version.getVersion(dir, repoOptions)).to.be.equal(7) + expect(await version.getVersion(backends)).to.be.equal(7) }) it('should set version number', async () => { - await expect(version.getVersion(dir, repoOptions)).to.be.eventually.rejectedWith(errors.NotInitializedRepoError).with.property('code', errors.NotInitializedRepoError.code) + await expect(version.getVersion(backends)).to.be.eventually.rejectedWith(errors.NotInitializedRepoError).with.property('code', errors.NotInitializedRepoError.code) // Create version file - const store = createStore(dir, 'root', repoOptions) + const store = backends.root await store.open() await store.put(CONFIG_KEY, uint8ArrayFromString('some dummy config')) await store.put(VERSION_KEY, uint8ArrayFromString('5')) await store.close() - await version.setVersion(dir, 7, repoOptions) - expect(await version.getVersion(dir, repoOptions)).to.be.equal(7) + await version.setVersion(7, backends) + expect(await version.getVersion(backends)).to.be.equal(7) }) }) }