Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.

Refactor refs-local to not use ipfs.add #2980

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 11 additions & 11 deletions packages/interface-ipfs-core/src/refs-local.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,11 @@
'use strict'

const { fixtures } = require('./utils')
const createFile = require('./utils/create-file')
const { getDescribe, getIt, expect } = require('./utils/mocha')
const all = require('it-all')
const UnixFS = require('ipfs-unixfs')
const { DAGLink, DAGNode } = require('ipld-dag-pb')

/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
Expand All @@ -26,17 +29,14 @@ module.exports = (common, options) => {
after(() => common.clean())

it('should get local refs', async function () {
const content = (name) => ({
path: `test-folder/${name}`,
content: fixtures.directory.files[name]
})

const dirs = [
content('pp.txt'),
content('holmes.txt')
]

await all(ipfs.add(dirs))
const pp = await createFile(ipfs, fixtures.directory.files['pp.txt'])
const holmes = await createFile(ipfs, fixtures.directory.files['holmes.txt'])
const directory = new UnixFS({ type: 'directory' })
const serialized = new DAGNode(directory.marshal(), [
new DAGLink('pp.txt', pp.cumulativeSize, pp.cid),
new DAGLink('holmes.txt', holmes.cumulativeSize, holmes.cid)
]).serialize()
await ipfs.block.put(serialized)

const refs = await all(ipfs.refs.local())

Expand Down
45 changes: 45 additions & 0 deletions packages/interface-ipfs-core/src/utils/create-file.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
'use strict'

const UnixFS = require('ipfs-unixfs')
const { DAGLink, DAGNode } = require('ipld-dag-pb')

// This function creates blocks from lower-level primitives
// to simulate an `ipfs add` without explicitly using `ipfs add`
// for conformance testing clients that haven't implemented UnixFS yet
module.exports = async (ipfs, data, chunkSize = 262144) => {
const chunks = []

for (let i = 0; i < data.length; i += chunkSize) {
const unixfs = new UnixFS({
type: 'file',
data: data.slice(i, i + chunkSize)
})
const dagNode = new DAGNode(unixfs.marshal())
const block = await ipfs.block.put(dagNode.serialize())

chunks.push({
unixfs,
size: block.data.length,
cid: block.cid
})
}

if (chunks.length === 1) {
return {
cid: chunks[0].cid,
cumulativeSize: chunks[0].size
}
}

const unixfs = new UnixFS({
type: 'file',
blockSizes: chunks.map(chunk => chunk.unixfs.fileSize())
})
const dagNode = new DAGNode(unixfs.marshal(), chunks.map(chunk => new DAGLink('', chunk.size, chunk.cid)))
const block = await ipfs.block.put(dagNode.serialize())

return {
cid: block.cid,
cumulativeSize: chunks.reduce((acc, curr) => acc + curr.size, 0) + block.data.length
}
}