From 812937662063b360be2b9e6473dcc2b0e7f80508 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 18 Mar 2021 14:09:24 +0000 Subject: [PATCH 01/10] chore: tighten up input types We normalise content to `AsyncIterator` so there's no point accepting more than that, it also leads to type ballooning around passing input to the importer. --- packages/ipfs-unixfs-importer/src/types.d.ts | 10 +++++----- packages/ipfs-unixfs/src/index.js | 18 +++++++++++++----- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/packages/ipfs-unixfs-importer/src/types.d.ts b/packages/ipfs-unixfs-importer/src/types.d.ts index 19edd024..4cfda048 100644 --- a/packages/ipfs-unixfs-importer/src/types.d.ts +++ b/packages/ipfs-unixfs-importer/src/types.d.ts @@ -1,4 +1,4 @@ -import { UnixFS, Mtime, MtimeLike } from 'ipfs-unixfs' +import { UnixFS, Mtime } from 'ipfs-unixfs' import CID, { CIDVersion } from 'cids' import { HashName } from 'multihashes' import Block from 'ipld-block' @@ -6,21 +6,21 @@ import { CodecName } from 'multicodec' interface ImportCandidate { path?: string - content?: AsyncIterable | Iterable | Uint8Array | ArrayLike | string - mtime?: MtimeLike + content?: AsyncIterable + mtime?: Mtime mode?: number } interface File { content: AsyncIterable path?: string - mtime?: MtimeLike + mtime?: Mtime mode?: number } interface Directory { path?: string - mtime?: MtimeLike + mtime?: Mtime mode?: number } diff --git a/packages/ipfs-unixfs/src/index.js b/packages/ipfs-unixfs/src/index.js index 10bd47c9..89305761 100644 --- a/packages/ipfs-unixfs/src/index.js +++ b/packages/ipfs-unixfs/src/index.js @@ -8,11 +8,11 @@ const errcode = require('err-code') /** * @typedef {object} Mtime * @property {number} secs - * @property {number | null} [nsecs] + * @property {number} [nsecs] */ /** - * @typedef {null | undefined | { secs: number, nsecs?: number} | { Seconds: number, FractionalNanoseconds?: number} | Mtime | [number, number] | Date} MtimeLike + * @typedef {null | undefined | { secs: number, nsecs?: number} | { Seconds: number, FractionalNanoseconds?: number} | Mtime | [number, number] | Date} MtimeLike */ const types = [ @@ -40,11 +40,19 @@ function parseMode (mode) { return undefined } - if (typeof mode === 'string') { - mode = parseInt(mode, 8) + if (typeof mode === 'number') { + return mode & 0xFFF } - return mode & 0xFFF + mode = mode.toString() + + if (mode.substring(0, 1) === '0') { + // octal string + return parseInt(mode, 8) & 0xFFF + } + + // decimal string + return parseInt(mode, 10) & 0xFFF } /** From 55293d010926db31d52491c277b5af13a5c38dd5 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 18 Mar 2021 14:12:06 +0000 Subject: [PATCH 02/10] chore: upgrade aegir and fix extends path --- packages/ipfs-unixfs-exporter/package.json | 2 +- packages/ipfs-unixfs-exporter/tsconfig.json | 2 +- packages/ipfs-unixfs-importer/package.json | 2 +- packages/ipfs-unixfs-importer/tsconfig.json | 2 +- packages/ipfs-unixfs/package.json | 2 +- packages/ipfs-unixfs/tsconfig.json | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/ipfs-unixfs-exporter/package.json b/packages/ipfs-unixfs-exporter/package.json index ffa97dbc..159e3c67 100644 --- a/packages/ipfs-unixfs-exporter/package.json +++ b/packages/ipfs-unixfs-exporter/package.json @@ -36,7 +36,7 @@ "@types/mocha": "^8.2.1", "@types/sinon": "^9.0.10", "abort-controller": "^3.0.0", - "aegir": "^32.0.0", + "aegir": "^32.1.0", "copy": "^0.3.2", "crypto-browserify": "^3.12.0", "detect-node": "^2.0.4", diff --git a/packages/ipfs-unixfs-exporter/tsconfig.json b/packages/ipfs-unixfs-exporter/tsconfig.json index 3c4a52f9..c6eb8f25 100644 --- a/packages/ipfs-unixfs-exporter/tsconfig.json +++ b/packages/ipfs-unixfs-exporter/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", + "extends": "aegir/src/config/tsconfig.aegir.json", "compilerOptions": { "outDir": "dist", "importsNotUsedAsValues": "preserve" diff --git a/packages/ipfs-unixfs-importer/package.json b/packages/ipfs-unixfs-importer/package.json index b388b58d..c45ff4cf 100644 --- a/packages/ipfs-unixfs-importer/package.json +++ b/packages/ipfs-unixfs-importer/package.json @@ -34,7 +34,7 @@ "homepage": "https://github.com/ipfs/js-ipfs-unixfs#readme", "devDependencies": { "@types/mocha": "^8.2.1", - "aegir": "^32.0.0", + "aegir": "^32.1.0", "assert": "^2.0.0", "copy": "^0.3.2", "crypto-browserify": "^3.12.0", diff --git a/packages/ipfs-unixfs-importer/tsconfig.json b/packages/ipfs-unixfs-importer/tsconfig.json index 3c4a52f9..c6eb8f25 100644 --- a/packages/ipfs-unixfs-importer/tsconfig.json +++ b/packages/ipfs-unixfs-importer/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", + "extends": "aegir/src/config/tsconfig.aegir.json", "compilerOptions": { "outDir": "dist", "importsNotUsedAsValues": "preserve" diff --git a/packages/ipfs-unixfs/package.json b/packages/ipfs-unixfs/package.json index cec244a1..4ce5ba3a 100644 --- a/packages/ipfs-unixfs/package.json +++ b/packages/ipfs-unixfs/package.json @@ -37,7 +37,7 @@ "homepage": "https://github.com/ipfs/js-ipfs-unixfs#readme", "devDependencies": { "@types/mocha": "^8.2.1", - "aegir": "^32.0.0", + "aegir": "^32.1.0", "copy": "^0.3.2", "mkdirp": "^1.0.4", "npm-run-all": "^4.1.5", diff --git a/packages/ipfs-unixfs/tsconfig.json b/packages/ipfs-unixfs/tsconfig.json index f121edfe..f06b4656 100644 --- a/packages/ipfs-unixfs/tsconfig.json +++ b/packages/ipfs-unixfs/tsconfig.json @@ -1,5 +1,5 @@ { - "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", + "extends": "aegir/src/config/tsconfig.aegir.json", "compilerOptions": { "outDir": "dist" }, From 3117af98a3e4747e938079c7216e1c1f41b6a6b4 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 19 Mar 2021 12:51:46 +0000 Subject: [PATCH 03/10] chore: move all types to types.d.ts files --- .../test/exporter-sharded.spec.js | 7 +- .../test/exporter-subtree.spec.js | 9 +- .../test/exporter.spec.js | 21 +-- .../test/helpers/as-async-iterable.js | 14 ++ .../test/import-export-dir-sharding.spec.js | 15 +- .../test/import-export-nested-dir.spec.js | 9 +- .../test/import-export.spec.js | 3 +- .../test/importer.spec.js | 136 ++++++++---------- .../src/dag-builder/index.js | 67 +++++---- packages/ipfs-unixfs-importer/src/index.js | 23 ++- packages/ipfs-unixfs-importer/src/types.d.ts | 2 +- .../test/builder-only-hash.spec.js | 3 +- .../ipfs-unixfs-importer/test/builder.spec.js | 9 +- .../test/helpers/as-async-iterable.js | 6 +- .../finite-pseudorandom-byte-stream.js | 2 +- .../test/helpers/random-byte-stream.js | 2 +- packages/ipfs-unixfs/src/index.js | 60 ++++---- packages/ipfs-unixfs/src/types.d.ts | 7 + .../ipfs-unixfs/test/unixfs-format.spec.js | 62 ++++++++ 19 files changed, 273 insertions(+), 184 deletions(-) create mode 100644 packages/ipfs-unixfs-exporter/test/helpers/as-async-iterable.js create mode 100644 packages/ipfs-unixfs/src/types.d.ts diff --git a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js index 876591fa..ddec0d16 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js @@ -20,6 +20,7 @@ const { } = require('ipld-dag-pb') const blockApi = require('./helpers/block') const uint8ArrayConcat = require('uint8arrays/concat') +const asAsyncIterable = require('./helpers/as-async-iterable') /** * @typedef {import('cids')} CID @@ -49,14 +50,14 @@ describe('exporter sharded', function () { const createShardWithFileNames = (numFiles, fileName) => { const files = new Array(numFiles).fill(0).map((_, index) => ({ path: fileName(index), - content: Uint8Array.from([0, 1, 2, 3, 4, index]) + content: asAsyncIterable(Uint8Array.from([0, 1, 2, 3, 4, index])) })) return createShardWithFiles(files) } /** - * @param {{ path: string, content: Uint8Array }[] } files + * @param {{ path: string, content: AsyncIterable }[] } files */ const createShardWithFiles = async (files) => { const result = await last(importer(files, block, { @@ -88,7 +89,7 @@ describe('exporter sharded', function () { const imported = await all(importer(Object.keys(files).map(path => ({ path, - content: files[path].content + content: asAsyncIterable(files[path].content) })), block, { wrapWithDirectory: true, shardSplitThreshold: SHARD_SPLIT_THRESHOLD diff --git a/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js b/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js index 99633ec9..854ab0d7 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js @@ -12,6 +12,7 @@ const last = require('it-last') const blockApi = require('./helpers/block') const randomBytes = require('it-buffer-stream') const uint8ArrayConcat = require('uint8arrays/concat') +const asAsyncIterable = require('./helpers/as-async-iterable') const ONE_MEG = Math.pow(1024, 2) @@ -36,7 +37,7 @@ describe('exporter subtree', () => { content: randomBytes(ONE_MEG) }, { path: './level-1/200Bytes.txt', - content + content: asAsyncIterable(content) }], block)) if (!imported) { @@ -64,7 +65,7 @@ describe('exporter subtree', () => { content: randomBytes(ONE_MEG) }, { path: './level-1/200Bytes.txt', - content + content: asAsyncIterable(content) }, { path: './level-1/level-2' }], block)) @@ -121,12 +122,12 @@ describe('exporter subtree', () => { content: randomBytes(ONE_MEG) }, { path: './level-1/200Bytes.txt', - content + content: asAsyncIterable(content) }, { path: './level-1/level-2' }, { path: './level-1/level-2/200Bytes.txt', - content + content: asAsyncIterable(content) }], block)) if (!imported) { diff --git a/packages/ipfs-unixfs-exporter/test/exporter.spec.js b/packages/ipfs-unixfs-exporter/test/exporter.spec.js index 47cca47d..dcc3c355 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter.spec.js @@ -25,6 +25,7 @@ const blockApi = require('./helpers/block') const uint8ArrayFromString = require('uint8arrays/from-string') const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayConcat = require('uint8arrays/concat') +const asAsyncIterable = require('./helpers/as-async-iterable') const ONE_MEG = Math.pow(1024, 2) @@ -79,7 +80,7 @@ describe('exporter', () => { async function addTestFile ({ file, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves }) { const result = await all(importer([{ path, - content: file + content: asAsyncIterable(file) }], block, { strategy, rawLeaves, @@ -195,7 +196,7 @@ describe('exporter', () => { const files = await all(importer([{ path: filePath, - content: smallFile + content: asAsyncIterable(smallFile) }], block)) const path = `/ipfs/${files[1].cid.toBaseEncodedString()}/${fileName}` @@ -211,7 +212,7 @@ describe('exporter', () => { const files = await all(importer([{ path: filePath, - content: smallFile + content: asAsyncIterable(smallFile) }], block)) const path = `/ipfs/${files[1].cid.toBaseEncodedString()}/${fileName}` @@ -613,7 +614,7 @@ describe('exporter', () => { it('exports a large file > 1mb imported with raw leaves', async () => { const imported = await first(importer([{ path: '1.2MiB.txt', - content: bigFile + content: asAsyncIterable(bigFile) }], block, { rawLeaves: true })) @@ -890,7 +891,7 @@ describe('exporter', () => { const imported = await first(importer([{ path: '200Bytes.txt', - content: bigFile + content: asAsyncIterable(bigFile) }], block, { rawLeaves: true })) @@ -915,7 +916,7 @@ describe('exporter', () => { it('exports a raw leaf', async () => { const imported = await first(importer([{ path: '200Bytes.txt', - content: smallFile + content: asAsyncIterable(smallFile) }], block, { rawLeaves: true })) @@ -1022,7 +1023,7 @@ describe('exporter', () => { it('exports a node with depth', async () => { const imported = await all(importer([{ path: '/foo/bar/baz.txt', - content: uint8ArrayFromString('hello world') + content: asAsyncIterable(uint8ArrayFromString('hello world')) }], block)) const exported = await exporter(imported[0].cid, ipld) @@ -1033,13 +1034,13 @@ describe('exporter', () => { it('exports a node recursively with depth', async () => { const dir = await last(importer([{ path: '/foo/bar/baz.txt', - content: uint8ArrayFromString('hello world') + content: asAsyncIterable(uint8ArrayFromString('hello world')) }, { path: '/foo/qux.txt', - content: uint8ArrayFromString('hello world') + content: asAsyncIterable(uint8ArrayFromString('hello world')) }, { path: '/foo/bar/quux.txt', - content: uint8ArrayFromString('hello world') + content: asAsyncIterable(uint8ArrayFromString('hello world')) }], block)) if (!dir) { diff --git a/packages/ipfs-unixfs-exporter/test/helpers/as-async-iterable.js b/packages/ipfs-unixfs-exporter/test/helpers/as-async-iterable.js new file mode 100644 index 00000000..7aba5df2 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/test/helpers/as-async-iterable.js @@ -0,0 +1,14 @@ +'use strict' + +/** + * @param {Uint8Array | Uint8Array[]} arr + */ +async function * asAsyncIterable (arr) { + if (!Array.isArray(arr)) { + arr = [arr] + } + + yield * arr +} + +module.exports = asAsyncIterable diff --git a/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js b/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js index 64be0cdc..acffcec5 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js +++ b/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js @@ -15,6 +15,7 @@ const blockApi = require('./helpers/block') const uint8ArrayFromString = require('uint8arrays/from-string') const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayConcat = require('uint8arrays/concat') +const asAsyncIterable = require('./helpers/as-async-iterable') /** * @typedef {import('../src').UnixFSEntry} UnixFSEntry @@ -37,7 +38,7 @@ describe('builder: directory sharding', () => { const content = uint8ArrayFromString('i have the best bytes') const nodes = await all(importer([{ path: 'a/b', - content + content: asAsyncIterable(content) }], block, { shardSplitThreshold: Infinity // never shard })) @@ -73,7 +74,7 @@ describe('builder: directory sharding', () => { it('yields a sharded dir', async () => { const nodes = await all(importer([{ path: 'a/b', - content: uint8ArrayFromString('i have the best bytes') + content: asAsyncIterable(uint8ArrayFromString('i have the best bytes')) }], block, { shardSplitThreshold: 0 // always shard })) @@ -95,7 +96,7 @@ describe('builder: directory sharding', () => { const content = 'i have the best bytes' const nodes = await all(importer([{ path: 'a/b', - content: uint8ArrayFromString(content) + content: asAsyncIterable(uint8ArrayFromString(content)) }], block, { shardSplitThreshold: Infinity // never shard })) @@ -132,7 +133,7 @@ describe('builder: directory sharding', () => { const content = 'i have the best bytes' const nodes = await all(importer([{ path: 'a/b', - content: uint8ArrayFromString(content) + content: asAsyncIterable(uint8ArrayFromString(content)) }], block, { shardSplitThreshold: 0 // always shard })) @@ -177,7 +178,7 @@ describe('builder: directory sharding', () => { for (let i = 0; i < maxDirs; i++) { yield { path: 'big/' + i.toString().padStart(4, '0'), - content: uint8ArrayFromString(i.toString()) + content: asAsyncIterable(uint8ArrayFromString(i.toString())) } } } @@ -196,7 +197,7 @@ describe('builder: directory sharding', () => { for (let i = 0; i < maxDirs; i++) { yield { path: 'big/' + i.toString().padStart(4, '0'), - content: uint8ArrayFromString(i.toString()) + content: asAsyncIterable(uint8ArrayFromString(i.toString())) } } } @@ -249,7 +250,7 @@ describe('builder: directory sharding', () => { yield { path: dir.concat(i.toString().padStart(4, '0')).join('/'), - content: uint8ArrayFromString(i.toString()) + content: asAsyncIterable(uint8ArrayFromString(i.toString())) } pending-- diff --git a/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js b/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js index 8aa1221f..f7158c9c 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js +++ b/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js @@ -13,6 +13,7 @@ const blockApi = require('./helpers/block') const uint8ArrayFromString = require('uint8arrays/from-string') const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayConcat = require('uint8arrays/concat') +const asAsyncIterable = require('./helpers/as-async-iterable') describe('import and export: directory', () => { const rootHash = 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK' @@ -31,16 +32,16 @@ describe('import and export: directory', () => { const source = [{ path: 'a/b/c/d/e', - content: uint8ArrayFromString('banana') + content: asAsyncIterable(uint8ArrayFromString('banana')) }, { path: 'a/b/c/d/f', - content: uint8ArrayFromString('strawberry') + content: asAsyncIterable(uint8ArrayFromString('strawberry')) }, { path: 'a/b/g', - content: uint8ArrayFromString('ice') + content: asAsyncIterable(uint8ArrayFromString('ice')) }, { path: 'a/b/h', - content: uint8ArrayFromString('cream') + content: asAsyncIterable(uint8ArrayFromString('cream')) }] const files = await all(importer(source, block)) diff --git a/packages/ipfs-unixfs-exporter/test/import-export.spec.js b/packages/ipfs-unixfs-exporter/test/import-export.spec.js index 90d3b2e3..245f830a 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export.spec.js +++ b/packages/ipfs-unixfs-exporter/test/import-export.spec.js @@ -13,6 +13,7 @@ const loadFixture = require('aegir/utils/fixtures') const isNode = require('detect-node') const bigFile = loadFixture((isNode ? __dirname : 'test') + '/fixtures/1.2MiB.txt') const blockApi = require('./helpers/block') +const asAsyncIterable = require('./helpers/as-async-iterable') const { importer } = require('ipfs-unixfs-importer') const { exporter } = require('../src') @@ -42,7 +43,7 @@ describe('import and export', function () { it('imports and exports', async () => { const path = `${strategy}-big.dat` - const values = [{ path: path, content: bigFile }] + const values = [{ path: path, content: asAsyncIterable(bigFile) }] // @ts-ignore for await (const file of importer(values, block, importerOptions)) { diff --git a/packages/ipfs-unixfs-exporter/test/importer.spec.js b/packages/ipfs-unixfs-exporter/test/importer.spec.js index 0f7b2a77..e4646fbe 100644 --- a/packages/ipfs-unixfs-exporter/test/importer.spec.js +++ b/packages/ipfs-unixfs-exporter/test/importer.spec.js @@ -25,9 +25,10 @@ const first = require('it-first') const blockApi = require('./helpers/block') const uint8ArrayConcat = require('uint8arrays/concat') const uint8ArrayFromString = require('uint8arrays/from-string') -const uint8ArrayToString = require('uint8arrays/to-string') +const asAsyncIterable = require('./helpers/as-async-iterable') const last = require('it-last') const CID = require('cids') +const { parseMtime } = require('ipfs-unixfs') /** * @typedef {import('ipld')} IPLD @@ -207,7 +208,7 @@ const strategyOverrides = { const checkLeafNodeTypes = async (block, ipld, options, expected) => { const file = await first(importer([{ path: 'foo', - content: new Uint8Array(262144 + 5).fill(1) + content: asAsyncIterable(new Uint8Array(262144 + 5).fill(1)) }], block, options)) if (!file) { @@ -240,7 +241,7 @@ const checkLeafNodeTypes = async (block, ipld, options, expected) => { const checkNodeLinks = async (block, ipld, options, expected) => { for await (const file of importer([{ path: 'foo', - content: new Uint8Array(100).fill(1) + content: asAsyncIterable(new Uint8Array(100).fill(1)) }], block, options)) { const node = await ipld.get(file.cid) const meta = UnixFS.unmarshal(node.Data) @@ -415,7 +416,7 @@ strategies.forEach((strategy) => { it('doesn\'t yield anything on empty file', async () => { const files = await all(importer([{ path: 'emptyfile', - content: new Uint8Array(0) + content: asAsyncIterable(new Uint8Array(0)) }], block, options)) expect(files.length).to.eql(1) @@ -427,36 +428,20 @@ strategies.forEach((strategy) => { it('supports more than one root', async () => { const files = await all(importer([{ path: '200Bytes.txt', - content: smallFile + content: asAsyncIterable(smallFile) }, { path: '200Bytes.txt', - content: bigFile + content: asAsyncIterable(bigFile) }], block, options)) expect(files).to.have.lengthOf(2) }) - it('accepts strings as content', async () => { - const content = 'I am a string' - const res = await all(importer([{ - path: '200Bytes.txt', - content - }], block, options)) - - const file = await exporter(res[0].cid, ipld) - - if (file.type !== 'file') { - throw new Error('Unexpected type') - } - - expect(uint8ArrayToString(uint8ArrayConcat(await all(file.content())))).to.equal(content) - }) - it('small file with an escaped slash in the title', async () => { const filePath = `small-\\/file-${Math.random()}.txt` const files = await all(importer([{ path: filePath, - content: smallFile + content: asAsyncIterable(smallFile) }], block, options)) expect(files.length).to.equal(1) @@ -467,7 +452,7 @@ strategies.forEach((strategy) => { const filePath = `small-[v]-file-${Math.random()}.txt` const files = await all(importer([{ path: filePath, - content: smallFile + content: asAsyncIterable(smallFile) }], block, options)) expect(files.length).to.equal(1) @@ -477,7 +462,7 @@ strategies.forEach((strategy) => { it('small file as buffer (smaller than a chunk)', async () => { const files = await all(importer([{ path: '200Bytes.txt', - content: smallFile + content: asAsyncIterable(smallFile) }], block, options)) expectFiles(files, [ @@ -488,7 +473,7 @@ strategies.forEach((strategy) => { it('small file as array (smaller than a chunk)', async () => { const files = await all(importer([{ path: '200Bytes.txt', - content: Uint8Array.from(smallFile) + content: asAsyncIterable(Uint8Array.from(smallFile)) }], block, options)) expectFiles(files, [ @@ -496,21 +481,10 @@ strategies.forEach((strategy) => { ]) }) - it('small file as string (smaller than a chunk)', async () => { - const files = await all(importer([{ - path: 'small.txt', - content: 'this is a file\n' - }], block, options)) - - expectFiles(files, [ - 'small.txt' - ]) - }) - it('small file (smaller than a chunk) with raw leaves', async () => { const files = await all(importer([{ path: '200Bytes.txt', - content: smallFile + content: asAsyncIterable(smallFile) }], block, { ...options, rawLeaves: true @@ -524,7 +498,7 @@ strategies.forEach((strategy) => { it('small file (smaller than a chunk) with raw leaves and mode', async () => { const files = await all(importer([{ path: '200Bytes.txt', - content: smallFile, + content: asAsyncIterable(smallFile), mode: 0o123 }], block, { ...options, @@ -539,7 +513,7 @@ strategies.forEach((strategy) => { it('small file (smaller than a chunk) with raw leaves and mtime', async () => { const files = await all(importer([{ path: '200Bytes.txt', - content: smallFile, + content: asAsyncIterable(smallFile), mtime: { secs: 10, nsecs: 0 @@ -557,7 +531,7 @@ strategies.forEach((strategy) => { it('small file (smaller than a chunk) with raw leaves and metadata', async () => { const files = await all(importer([{ path: '200Bytes.txt', - content: smallFile, + content: asAsyncIterable(smallFile), mode: 0o123, mtime: { secs: 10, @@ -576,7 +550,7 @@ strategies.forEach((strategy) => { it('small file (smaller than a chunk) inside a dir', async () => { const files = await all(importer([{ path: 'foo/bar/200Bytes.txt', - content: smallFile + content: asAsyncIterable(smallFile) }], block, options)) expectFiles(files, [ @@ -591,7 +565,7 @@ strategies.forEach((strategy) => { const files = await all(importer([{ path: '1.2MiB.txt', - content: bigFile + content: asAsyncIterable(bigFile) }], block, options)) expectFiles(files, [ @@ -604,7 +578,7 @@ strategies.forEach((strategy) => { const files = await all(importer([{ path: 'foo-big/1.2MiB.txt', - content: bigFile + content: asAsyncIterable(bigFile) }], block, options)) expectFiles(files, [ @@ -626,10 +600,10 @@ strategies.forEach((strategy) => { it('directory with files', async () => { const files = await all(importer([{ path: 'pim/200Bytes.txt', - content: smallFile + content: asAsyncIterable(smallFile) }, { path: 'pim/1.2MiB.txt', - content: bigFile + content: asAsyncIterable(bigFile) }], block, options)) expectFiles(files, [ @@ -642,13 +616,13 @@ strategies.forEach((strategy) => { it('nested directory (2 levels deep)', async () => { const files = await all(importer([{ path: 'pam/pum/200Bytes.txt', - content: smallFile + content: asAsyncIterable(smallFile) }, { path: 'pam/pum/1.2MiB.txt', - content: bigFile + content: asAsyncIterable(bigFile) }, { path: 'pam/1.2MiB.txt', - content: bigFile + content: asAsyncIterable(bigFile) }], block, options)) const result = stringifyMh(files) @@ -688,7 +662,7 @@ strategies.forEach((strategy) => { const content = String(Math.random() + Date.now()) const files = await all(importer([{ path: content + '.txt', - content: uint8ArrayFromString(content) + content: asAsyncIterable(uint8ArrayFromString(content)) }], block, { onlyHash: true })) @@ -716,7 +690,7 @@ strategies.forEach((strategy) => { await all(importer([{ path, - content: bigFile + content: asAsyncIterable(bigFile) }], block, options)) expect(options.progress.called).to.equal(true) @@ -757,7 +731,10 @@ strategies.forEach((strategy) => { shardSplitThreshold: 3 } - const files = await all(importer(inputFiles, block, options)) + const files = await all(importer(inputFiles.map(file => ({ + ...file, + content: asAsyncIterable(file.content) + })), block, options)) const file = files[0] expect(file).to.exist() @@ -826,7 +803,7 @@ strategies.forEach((strategy) => { for await (const file of importer([{ path: '1.2MiB.txt', - content: bigFile + content: asAsyncIterable(bigFile) }], block, options)) { for await (const { cid } of collectLeafCids(file.cid, ipld)) { expect(cid).to.have.property('codec', 'raw') @@ -845,8 +822,8 @@ strategies.forEach((strategy) => { for await (const file of importer([{ path: '1.2MiB.txt', - content: bigFile, - mtime: now + content: asAsyncIterable(bigFile), + mtime: parseMtime(now) }], block, options)) { const node = await exporter(file.cid, ipld) @@ -861,7 +838,7 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo', - mtime: now + mtime: parseMtime(now) }], block)) const node = await exporter(entries[0].cid, ipld) @@ -876,11 +853,11 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo', - mtime: now, + mtime: parseMtime(now), mode: perms }, { path: '/foo/bar.txt', - content: bigFile + content: asAsyncIterable(bigFile) }], block)) const nodes = await all(recursive(entries[entries.length - 1].cid, ipld)) @@ -902,11 +879,11 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo/bar', - mtime: now, + mtime: parseMtime(now), mode: perms }, { path: '/foo/bar/baz.txt', - content: bigFile + content: asAsyncIterable(bigFile) }], block)) const nodes = await all(recursive(entries[entries.length - 1].cid, ipld)) @@ -928,16 +905,16 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo/bar/qux.txt', - content: bigFile + content: asAsyncIterable(bigFile) }, { path: '/foo/bar', - mtime: now, + mtime: parseMtime(now), mode: perms }, { path: '/foo/quux' }, { path: '/foo/bar/baz.txt', - content: bigFile + content: asAsyncIterable(bigFile) }], block)) const nodes = await all(recursive(entries[entries.length - 1].cid, ipld)) @@ -958,13 +935,13 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo', - mtime: now + mtime: parseMtime(now) }, { path: '/foo/bar.txt', - content: bigFile + content: asAsyncIterable(bigFile) }, { path: '/foo/baz.txt', - content: bigFile + content: asAsyncIterable(bigFile) }, { path: '/foo/qux' }], block, { @@ -991,7 +968,7 @@ strategies.forEach((strategy) => { for await (const file of importer([{ path: '1.2MiB.txt', - content: bigFile, + content: asAsyncIterable(bigFile), mode }], block, options)) { const node = await exporter(file.cid, ipld) @@ -1022,11 +999,11 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo/file1.txt', - content: bigFile, + content: asAsyncIterable(bigFile), mode: mode1 }, { path: '/foo/file2.txt', - content: bigFile, + content: asAsyncIterable(bigFile), mode: mode2 }], block)) @@ -1044,11 +1021,11 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo/file1.txt', - content: bigFile, + content: asAsyncIterable(bigFile), mode: mode }, { path: '/foo/bar/baz/file2.txt', - content: bigFile + content: asAsyncIterable(bigFile) }], block)) const node1 = await exporter(entries[0].cid, ipld) @@ -1063,7 +1040,7 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo/file1.txt', - content: bigFile + content: asAsyncIterable(bigFile) }], block)) const node1 = await exporter(entries[0].cid, ipld) @@ -1091,6 +1068,7 @@ describe('configuration', () => { const cid = new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') const unixfs = new UnixFS({ type: 'directory' }) + // @ts-expect-error custom dag builder expects weird data const entries = await all(importer([{ path: 'path', content: 'content' @@ -1126,10 +1104,10 @@ describe('configuration', () => { let chunked = false const entries = await all(importer([{ path: 'path', - content: uint8ArrayFromString('content') + content: asAsyncIterable(uint8ArrayFromString('content')) }], block, { - /** @type {import('ipfs-unixfs-importer/src/types').ChunkValidator} */ - chunkValidator: async function * (source, opts) { // eslint-disable-line require-await + /** @type {import('ipfs-unixfs-importer').ChunkValidator} */ + chunkValidator: async function * (source) { // eslint-disable-line require-await validated = true for await (const str of source) { @@ -1140,8 +1118,8 @@ describe('configuration', () => { } } }, - /** @type {import('ipfs-unixfs-importer/src/types').Chunker} */ - chunker: async function * (source, opts) { // eslint-disable-line require-await + /** @type {import('ipfs-unixfs-importer').Chunker} */ + chunker: async function * (source) { // eslint-disable-line require-await chunked = true yield * source } @@ -1159,7 +1137,7 @@ describe('configuration', () => { const buf = uint8ArrayFromString('content') const result = await last(importer([{ - content: buf + content: asAsyncIterable(buf) }], block, { cidVersion: 0, rawLeaves: false @@ -1172,7 +1150,7 @@ describe('configuration', () => { const { cid: cidV0 } = result const result2 = await last(importer([{ - content: buf + content: asAsyncIterable(buf) }], block, { cidVersion: 1, rawLeaves: false diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/index.js b/packages/ipfs-unixfs-importer/src/dag-builder/index.js index 9a082725..5bb45d62 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/index.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/index.js @@ -13,21 +13,43 @@ const errCode = require('err-code') */ /** - * @param {any} item - * @returns {item is ArrayLike} + * @param {any} thing + * @returns {thing is Iterable} */ -function isArrayLike (item) { - return ( - Array.isArray(item) || - (Boolean(item) && - typeof item === 'object' && - typeof (item.length) === 'number' && - (item.length === 0 || - (item.length > 0 && - (item.length - 1) in item) - ) - ) - ) +function isIterable (thing) { + return Symbol.iterator in thing +} + +/** + * @param {any} thing + * @returns {thing is AsyncIterable} + */ +function isAsyncIterable (thing) { + return Symbol.asyncIterator in thing +} + +/** + * @param {Uint8Array | AsyncIterable | Iterable} content + * @returns {AsyncIterable} + */ +function contentAsAsyncIterable (content) { + try { + if (content instanceof Uint8Array) { + return (async function * () { + yield content + }()) + } else if (isIterable(content)) { + return (async function * () { + yield * content + }()) + } else if (isAsyncIterable(content)) { + return content + } + } catch { + throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') + } + + throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') } /** @@ -47,21 +69,6 @@ async function * dagBuilder (source, block, options) { } if (entry.content) { - const source = entry.content - - /** @type {AsyncIterable>} */ - const content = (async function * () { - // wrap in iterator if it is a, string, Uint8Array or array-like - if (typeof source === 'string' || isArrayLike(source)) { - yield source - // @ts-ignore - } else if (source[Symbol.asyncIterator] || source[Symbol.iterator]) { - yield * source - } else { - throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') - } - }()) - /** * @type {Chunker} */ @@ -91,7 +98,7 @@ async function * dagBuilder (source, block, options) { path: entry.path, mtime: entry.mtime, mode: entry.mode, - content: chunker(chunkValidator(content, options), options) + content: chunker(chunkValidator(contentAsAsyncIterable(entry.content), options), options) } yield () => fileBuilder(file, block, options) diff --git a/packages/ipfs-unixfs-importer/src/index.js b/packages/ipfs-unixfs-importer/src/index.js index 3418f69b..8297c672 100644 --- a/packages/ipfs-unixfs-importer/src/index.js +++ b/packages/ipfs-unixfs-importer/src/index.js @@ -11,10 +11,18 @@ const defaultOptions = require('./options') * @typedef {import('./types').Directory} Directory * @typedef {import('./types').File} File * @typedef {import('./types').ImportResult} ImportResult + * + * @typedef {import('./types').Chunker} Chunker + * @typedef {import('./types').DAGBuilder} DAGBuilder + * @typedef {import('./types').TreeBuilder} TreeBuilder + * @typedef {import('./types').BufferImporter} BufferImporter + * @typedef {import('./types').ChunkValidator} ChunkValidator + * @typedef {import('./types').Reducer} Reducer + * @typedef {import('./types').ProgressHandler} ProgressHandler */ /** - * @param {AsyncIterable | Iterable} source + * @param {AsyncIterable | Iterable | ImportCandidate} source * @param {BlockAPI} block * @param {UserImporterOptions} options */ @@ -37,7 +45,18 @@ async function * importer (source, block, options = {}) { treeBuilder = require('./tree-builder') } - for await (const entry of treeBuilder(parallelBatch(dagBuilder(source, block, opts), opts.fileImportConcurrency), block, opts)) { + /** @type {AsyncIterable | Iterable} */ + let candidates + + if (Symbol.asyncIterator in source || Symbol.iterator in source) { + // @ts-ignore + candidates = source + } else { + // @ts-ignore + candidates = [source] + } + + for await (const entry of treeBuilder(parallelBatch(dagBuilder(candidates, block, opts), opts.fileImportConcurrency), block, opts)) { yield { cid: entry.cid, path: entry.path, diff --git a/packages/ipfs-unixfs-importer/src/types.d.ts b/packages/ipfs-unixfs-importer/src/types.d.ts index 4cfda048..eacedca3 100644 --- a/packages/ipfs-unixfs-importer/src/types.d.ts +++ b/packages/ipfs-unixfs-importer/src/types.d.ts @@ -42,7 +42,7 @@ type Chunker = (source: AsyncIterable, options: ImporterOptions) => type DAGBuilder = (source: AsyncIterable | Iterable, block: BlockAPI, options: ImporterOptions) => AsyncIterable<() => Promise> type TreeBuilder = (source: AsyncIterable, block: BlockAPI, options: ImporterOptions) => AsyncIterable type BufferImporter = (file: File, block: BlockAPI, options: ImporterOptions) => AsyncIterable<() => Promise> -type ChunkValidator = (source: AsyncIterable>, options: ImporterOptions) => AsyncIterable +type ChunkValidator = (source: AsyncIterable, options: ImporterOptions) => AsyncIterable type UnixFSV1DagBuilder = (item: T, block: BlockAPI, options: ImporterOptions) => Promise type Reducer = (leaves: InProgressImportResult[]) => Promise diff --git a/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js b/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js index 5652ffbd..42e22930 100644 --- a/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js @@ -9,6 +9,7 @@ const builder = require('../src/dag-builder') const all = require('it-all') const blockApi = require('./helpers/block') const defaultOptions = require('../src/options') +const asAsyncIterable = require('./helpers/as-async-iterable') describe('builder: onlyHash', () => { /** @type {IPLD} */ @@ -24,7 +25,7 @@ describe('builder: onlyHash', () => { it('will only chunk and hash if passed an "onlyHash" option', async () => { const nodes = await all(builder([{ path: 'foo.txt', - content: Uint8Array.from([0, 1, 2, 3, 4]) + content: asAsyncIterable(Uint8Array.from([0, 1, 2, 3, 4])) }], block, { ...defaultOptions({}), onlyHash: true diff --git a/packages/ipfs-unixfs-importer/test/builder.spec.js b/packages/ipfs-unixfs-importer/test/builder.spec.js index 042ba6a5..49a92d87 100644 --- a/packages/ipfs-unixfs-importer/test/builder.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder.spec.js @@ -13,6 +13,7 @@ const first = require('it-first') const blockApi = require('./helpers/block') const uint8ArrayFromString = require('uint8arrays/from-string') const defaultOptions = require('../src/options') +const asAsyncIterable = require('./helpers/as-async-iterable') describe('builder', () => { /** @type {import('ipld')} */ @@ -30,10 +31,10 @@ describe('builder', () => { it('allows multihash hash algorithm to be specified', async () => { for (let i = 0; i < testMultihashes.length; i++) { const hashAlg = testMultihashes[i] - const content = String(Math.random() + Date.now()) + const content = uint8ArrayFromString(String(Math.random() + Date.now())) const inputFile = { path: content + '.txt', - content: uint8ArrayFromString(content) + content: asAsyncIterable(content) } const result = await first(builder([inputFile], block, { @@ -57,7 +58,7 @@ describe('builder', () => { const node = await ipld.get(imported.cid) const fetchedContent = UnixFS.unmarshal(node.Data).data - expect(fetchedContent).to.deep.equal(inputFile.content) + expect(fetchedContent).to.deep.equal(content) } }) @@ -70,7 +71,7 @@ describe('builder', () => { const inputFile = { path: content + '.txt', // Bigger than maxChunkSize - content: new Uint8Array(262144 + 5).fill(1) + content: asAsyncIterable(new Uint8Array(262144 + 5).fill(1)) } const result = await first(builder([inputFile], block, { diff --git a/packages/ipfs-unixfs-importer/test/helpers/as-async-iterable.js b/packages/ipfs-unixfs-importer/test/helpers/as-async-iterable.js index 28a8d3ae..7aba5df2 100644 --- a/packages/ipfs-unixfs-importer/test/helpers/as-async-iterable.js +++ b/packages/ipfs-unixfs-importer/test/helpers/as-async-iterable.js @@ -1,9 +1,13 @@ 'use strict' /** - * @param {Uint8Array[]} arr + * @param {Uint8Array | Uint8Array[]} arr */ async function * asAsyncIterable (arr) { + if (!Array.isArray(arr)) { + arr = [arr] + } + yield * arr } diff --git a/packages/ipfs-unixfs-importer/test/helpers/finite-pseudorandom-byte-stream.js b/packages/ipfs-unixfs-importer/test/helpers/finite-pseudorandom-byte-stream.js index e6593605..f1e1ee2d 100644 --- a/packages/ipfs-unixfs-importer/test/helpers/finite-pseudorandom-byte-stream.js +++ b/packages/ipfs-unixfs-importer/test/helpers/finite-pseudorandom-byte-stream.js @@ -6,7 +6,7 @@ const REPEATABLE_CHUNK_SIZE = 300000 * @param {number} maxSize * @param {number} seed */ -module.exports = function * (maxSize, seed) { +module.exports = async function * (maxSize, seed) { const chunks = Math.ceil(maxSize / REPEATABLE_CHUNK_SIZE) let emitted = 0 const buf = new Uint8Array(REPEATABLE_CHUNK_SIZE) diff --git a/packages/ipfs-unixfs-importer/test/helpers/random-byte-stream.js b/packages/ipfs-unixfs-importer/test/helpers/random-byte-stream.js index ff171e37..e147d2ca 100644 --- a/packages/ipfs-unixfs-importer/test/helpers/random-byte-stream.js +++ b/packages/ipfs-unixfs-importer/test/helpers/random-byte-stream.js @@ -3,7 +3,7 @@ /** * @param {number} seed */ -function * randomByteStream (seed) { +async function * randomByteStream (seed) { while (true) { const r = Math.floor(random(seed) * 256) seed = r diff --git a/packages/ipfs-unixfs/src/index.js b/packages/ipfs-unixfs/src/index.js index 89305761..974d6cca 100644 --- a/packages/ipfs-unixfs/src/index.js +++ b/packages/ipfs-unixfs/src/index.js @@ -6,13 +6,8 @@ const { const errcode = require('err-code') /** - * @typedef {object} Mtime - * @property {number} secs - * @property {number} [nsecs] - */ - -/** - * @typedef {null | undefined | { secs: number, nsecs?: number} | { Seconds: number, FractionalNanoseconds?: number} | Mtime | [number, number] | Date} MtimeLike + * @typedef {import('./types').Mtime} Mtime + * @typedef {import('./types').MtimeLike} MtimeLike */ const types = [ @@ -56,46 +51,43 @@ function parseMode (mode) { } /** - * @param {MtimeLike} mtime - * @returns {Mtime | undefined} + * @param {any} input */ -function parseMtime (mtime) { - if (mtime == null) { +function parseMtime (input) { + if (input == null) { return undefined } + /** @type {Mtime | undefined} */ + let mtime + // { secs, nsecs } - if (Object.prototype.hasOwnProperty.call(mtime, 'secs')) { + if (input.secs != null) { mtime = { - // @ts-ignore - secs: mtime.secs, - // @ts-ignore - nsecs: mtime.nsecs + secs: input.secs, + nsecs: input.nsecs } } // UnixFS TimeSpec - if (Object.prototype.hasOwnProperty.call(mtime, 'Seconds')) { - // @ts-ignore + if (input.Seconds != null) { mtime = { - // @ts-ignore - secs: mtime.Seconds, - // @ts-ignore - nsecs: mtime.FractionalNanoseconds + secs: input.Seconds, + nsecs: input.FractionalNanoseconds } } // process.hrtime() - if (Array.isArray(mtime)) { + if (Array.isArray(input)) { mtime = { - secs: mtime[0], - nsecs: mtime[1] + secs: input[0], + nsecs: input[1] } } // Javascript Date - if (mtime instanceof Date) { - const ms = mtime.getTime() + if (input instanceof Date) { + const ms = input.getTime() const secs = Math.floor(ms / 1000) mtime = { @@ -108,13 +100,13 @@ function parseMtime (mtime) { TODO: https://github.com/ipfs/aegir/issues/487 // process.hrtime.bigint() - if (typeof mtime === 'bigint') { - const secs = mtime / BigInt(1e9) - const nsecs = mtime - (secs * BigInt(1e9)) + if (input instanceof BigInt) { + const secs = input / BigInt(1e9) + const nsecs = input - (secs * BigInt(1e9)) mtime = { - secs: parseInt(secs), - nsecs: parseInt(nsecs) + secs: parseInt(secs.toString()), + nsecs: parseInt(nsecs.toString()) } } */ @@ -123,12 +115,10 @@ function parseMtime (mtime) { return undefined } - // @ts-ignore - if (mtime.nsecs < 0 || mtime.nsecs > 999999999) { + if (mtime != null && mtime.nsecs != null && (mtime.nsecs < 0 || mtime.nsecs > 999999999)) { throw errcode(new Error('mtime-nsecs must be within the range [0,999999999]'), 'ERR_INVALID_MTIME_NSECS') } - // @ts-ignore return mtime } diff --git a/packages/ipfs-unixfs/src/types.d.ts b/packages/ipfs-unixfs/src/types.d.ts new file mode 100644 index 00000000..cedc5057 --- /dev/null +++ b/packages/ipfs-unixfs/src/types.d.ts @@ -0,0 +1,7 @@ + +export interface Mtime { + secs: number + nsecs?: number +} + +export type MtimeLike = Mtime | { Seconds: number, FractionalNanoseconds?: number } | [number, number] | Date diff --git a/packages/ipfs-unixfs/test/unixfs-format.spec.js b/packages/ipfs-unixfs/test/unixfs-format.spec.js index deefcfd6..6e3d4bb6 100644 --- a/packages/ipfs-unixfs/test/unixfs-format.spec.js +++ b/packages/ipfs-unixfs/test/unixfs-format.spec.js @@ -211,6 +211,68 @@ describe('unixfs-format', () => { expect(unmarshaled).to.have.deep.property('mtime', mtime) }) + it.skip('sets mtime to 0 as BigInt', () => { + const mtime = { + secs: 0, + nsecs: 0 + } + const data = new UnixFS({ + type: 'file' + // TODO: https://github.com/ipfs/aegir/issues/487 + // mtime: BigInt(0) + }) + + const marshaled = data.marshal() + const unmarshaled = UnixFS.unmarshal(marshaled) + expect(unmarshaled).to.have.deep.property('mtime', mtime) + }) + + it.skip('sets mtime to 0 as BigInt literal', () => { + const mtime = { + secs: 0, + nsecs: 0 + } + const data = new UnixFS({ + type: 'file' + // TODO: https://github.com/ipfs/aegir/issues/487 + // mtime: 0n + }) + + const marshaled = data.marshal() + const unmarshaled = UnixFS.unmarshal(marshaled) + expect(unmarshaled).to.have.deep.property('mtime', mtime) + }) + + it('sets mtime to 0 as Date', () => { + const mtime = { + secs: 0, + nsecs: 0 + } + const data = new UnixFS({ + type: 'file', + mtime: new Date(0) + }) + + const marshaled = data.marshal() + const unmarshaled = UnixFS.unmarshal(marshaled) + expect(unmarshaled).to.have.deep.property('mtime', mtime) + }) + + it('sets mtime to 0 as hrtime', () => { + const mtime = { + secs: 0, + nsecs: 0 + } + const data = new UnixFS({ + type: 'file', + mtime: [0, 0] + }) + + const marshaled = data.marshal() + const unmarshaled = UnixFS.unmarshal(marshaled) + expect(unmarshaled).to.have.deep.property('mtime', mtime) + }) + it('survives undefined mtime', () => { const entry = new UnixFS({ type: 'file', From 0487c0d06adad5a6f321809e11f57be55eee43f9 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 19 Mar 2021 13:36:13 +0000 Subject: [PATCH 04/10] chore: change require engine --- package.json | 1 + packages/ipfs-unixfs-exporter/package.json | 4 ++-- packages/ipfs-unixfs-importer/package.json | 2 +- packages/ipfs-unixfs/package.json | 4 ++-- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 798be93e..a6b00429 100644 --- a/package.json +++ b/package.json @@ -51,6 +51,7 @@ "packages/*" ], "engines": { + "node": ">=14.0.0", "npm": ">=7.0.0" } } diff --git a/packages/ipfs-unixfs-exporter/package.json b/packages/ipfs-unixfs-exporter/package.json index 159e3c67..9ea152f8 100644 --- a/packages/ipfs-unixfs-exporter/package.json +++ b/packages/ipfs-unixfs-exporter/package.json @@ -28,8 +28,8 @@ "url": "https://github.com/ipfs/js-ipfs-unixfs/issues" }, "engines": { - "node": ">=10.0.0", - "npm": ">=4.0.0" + "node": ">=14.0.0", + "npm": ">=7.0.0" }, "homepage": "https://github.com/ipfs/js-ipfs-unixfs#readme", "devDependencies": { diff --git a/packages/ipfs-unixfs-importer/package.json b/packages/ipfs-unixfs-importer/package.json index c45ff4cf..301b4fc9 100644 --- a/packages/ipfs-unixfs-importer/package.json +++ b/packages/ipfs-unixfs-importer/package.json @@ -28,7 +28,7 @@ "url": "https://github.com/ipfs/js-ipfs-unixfs/issues" }, "engines": { - "node": ">=15.0.0", + "node": ">=14.0.0", "npm": ">=7.0.0" }, "homepage": "https://github.com/ipfs/js-ipfs-unixfs#readme", diff --git a/packages/ipfs-unixfs/package.json b/packages/ipfs-unixfs/package.json index 4ce5ba3a..aea732a6 100644 --- a/packages/ipfs-unixfs/package.json +++ b/packages/ipfs-unixfs/package.json @@ -31,8 +31,8 @@ "url": "https://github.com/ipfs/js-ipfs-unixfs/issues" }, "engines": { - "node": ">=10.0.0", - "npm": ">=4.0.0" + "node": ">=14.0.0", + "npm": ">=7.0.0" }, "homepage": "https://github.com/ipfs/js-ipfs-unixfs#readme", "devDependencies": { From 8c21aa329997e00a07783ca11ec162fb62c6b05e Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 19 Mar 2021 13:39:07 +0000 Subject: [PATCH 05/10] chore: try removing prepare from root package.json --- package.json | 1 - 1 file changed, 1 deletion(-) diff --git a/package.json b/package.json index a6b00429..d805210d 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,6 @@ "reset": "lerna run clean && rimraf packages/*/node_modules node_modules", "test": "lerna run test", "coverage": "lerna run coverage", - "prepare": "lerna run prepare", "build": "lerna run build", "clean": "lerna run clean", "lint": "lerna run lint", From ac8266fd322bb9b7ce244b15cb952a979a357f89 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 19 Mar 2021 13:51:24 +0000 Subject: [PATCH 06/10] chore: not sure why this is necessary --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index d805210d..ae918625 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,7 @@ "version": "1.0.0", "description": "JS implementation of the IPFS UnixFS", "scripts": { - "reset": "lerna run clean && rimraf packages/*/node_modules node_modules", + "reset": "lerna run clean && rimraf packages/*/node_modules node_modules packages/*/package-lock.json package-lock.json", "test": "lerna run test", "coverage": "lerna run coverage", "build": "lerna run build", From b95aa4ad6077f502c2de76de462b5e8502dce4c9 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 19 Mar 2021 13:51:54 +0000 Subject: [PATCH 07/10] chore: restore prepare --- package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/package.json b/package.json index ae918625..b32abee4 100644 --- a/package.json +++ b/package.json @@ -6,6 +6,7 @@ "reset": "lerna run clean && rimraf packages/*/node_modules node_modules packages/*/package-lock.json package-lock.json", "test": "lerna run test", "coverage": "lerna run coverage", + "prepare": "lerna run prepare", "build": "lerna run build", "clean": "lerna run clean", "lint": "lerna run lint", From b641e3a485ed9d9d6431b5c1ca6d521abf413be7 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 19 Mar 2021 13:57:42 +0000 Subject: [PATCH 08/10] chore: move expect error --- packages/ipfs-unixfs-exporter/test/importer.spec.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/ipfs-unixfs-exporter/test/importer.spec.js b/packages/ipfs-unixfs-exporter/test/importer.spec.js index e4646fbe..79a16a2b 100644 --- a/packages/ipfs-unixfs-exporter/test/importer.spec.js +++ b/packages/ipfs-unixfs-exporter/test/importer.spec.js @@ -379,9 +379,9 @@ strategies.forEach((strategy) => { it('fails on bad content', async () => { try { - // @ts-expect-error bad content await all(importer([{ path: '200Bytes.txt', + // @ts-expect-error bad content content: 7 }], block, options)) throw new Error('No error was thrown') @@ -392,9 +392,9 @@ strategies.forEach((strategy) => { it('fails on an iterator that yields bad content', async () => { try { - // @ts-expect-error bad content await all(importer([{ path: '200Bytes.txt', + // @ts-expect-error bad content content: { [Symbol.iterator]: function * () { yield 7 From 148fdb223f6f602302e5975c98b3c953a2ac0eb9 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 19 Mar 2021 14:02:29 +0000 Subject: [PATCH 09/10] chore: move expect error --- packages/ipfs-unixfs-exporter/test/importer.spec.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/ipfs-unixfs-exporter/test/importer.spec.js b/packages/ipfs-unixfs-exporter/test/importer.spec.js index 79a16a2b..e4646fbe 100644 --- a/packages/ipfs-unixfs-exporter/test/importer.spec.js +++ b/packages/ipfs-unixfs-exporter/test/importer.spec.js @@ -379,9 +379,9 @@ strategies.forEach((strategy) => { it('fails on bad content', async () => { try { + // @ts-expect-error bad content await all(importer([{ path: '200Bytes.txt', - // @ts-expect-error bad content content: 7 }], block, options)) throw new Error('No error was thrown') @@ -392,9 +392,9 @@ strategies.forEach((strategy) => { it('fails on an iterator that yields bad content', async () => { try { + // @ts-expect-error bad content await all(importer([{ path: '200Bytes.txt', - // @ts-expect-error bad content content: { [Symbol.iterator]: function * () { yield 7 From 154ca564b19581958f9988021b9782f0c07d05c0 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 19 Mar 2021 14:04:22 +0000 Subject: [PATCH 10/10] chore: move expect error --- packages/ipfs-unixfs-exporter/test/importer.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/ipfs-unixfs-exporter/test/importer.spec.js b/packages/ipfs-unixfs-exporter/test/importer.spec.js index e4646fbe..7f975db2 100644 --- a/packages/ipfs-unixfs-exporter/test/importer.spec.js +++ b/packages/ipfs-unixfs-exporter/test/importer.spec.js @@ -392,10 +392,10 @@ strategies.forEach((strategy) => { it('fails on an iterator that yields bad content', async () => { try { - // @ts-expect-error bad content await all(importer([{ path: '200Bytes.txt', content: { + // @ts-expect-error bad content [Symbol.iterator]: function * () { yield 7 }