diff --git a/packages/ipfs-core-utils/package.json b/packages/ipfs-core-utils/package.json index 3c791413bd..c69bbe62d1 100644 --- a/packages/ipfs-core-utils/package.json +++ b/packages/ipfs-core-utils/package.json @@ -11,6 +11,9 @@ "src", "dist" ], + "browser": { + "./src/files/blob.js": "./src/files/blob.browser.js" + }, "repository": { "type": "git", "url": "git+https://github.com/ipfs/js-ipfs.git" @@ -31,7 +34,8 @@ "buffer": "^5.6.0", "cids": "^0.8.3", "err-code": "^2.0.0", - "ipfs-utils": "^2.2.2" + "ipfs-utils": "^2.2.2", + "web-file-polyfill": "^1.0.0" }, "devDependencies": { "aegir": "^23.0.0", @@ -41,4 +45,4 @@ "dirty-chai": "^2.0.1", "it-all": "^1.0.1" } -} +} \ No newline at end of file diff --git a/packages/ipfs-core-utils/src/files/blob.browser.js b/packages/ipfs-core-utils/src/files/blob.browser.js new file mode 100644 index 0000000000..ae4b4adf5a --- /dev/null +++ b/packages/ipfs-core-utils/src/files/blob.browser.js @@ -0,0 +1,25 @@ +// @ts-check +'use strict' +/* eslint-env browser */ + +exports.Blob = Blob +exports.File = File + +/** + * Universal blob reading function + * @param {Blob} blob + * @returns {AsyncIterable} + */ +const readBlob = async function * (blob) { + const { body } = new Response(blob) + const reader = body.getReader() + while (true) { + const next = await reader.read() + if (next.done) { + return + } else { + yield next.value + } + } +} +exports.readBlob = readBlob diff --git a/packages/ipfs-core-utils/src/files/blob.js b/packages/ipfs-core-utils/src/files/blob.js new file mode 100644 index 0000000000..edfbf07458 --- /dev/null +++ b/packages/ipfs-core-utils/src/files/blob.js @@ -0,0 +1,11 @@ +// @ts-check +'use strict' + +// Electron in renderer process has native `Blob` but it would not pick up +// browser override. Therefor we do the runtime check and pick browser verison +// if native Blob is available and node polyfill otherwise. +if (typeof Blob === 'function') { + module.exports = require('./blob.browser') +} else { + module.exports = require('./blob.node') +} diff --git a/packages/ipfs-core-utils/src/files/blob.node.js b/packages/ipfs-core-utils/src/files/blob.node.js new file mode 100644 index 0000000000..8365fc90fb --- /dev/null +++ b/packages/ipfs-core-utils/src/files/blob.node.js @@ -0,0 +1,18 @@ +// @ts-check +'use strict' + +const { Blob, File } = require('web-file-polyfill') + +/** + * Universal blob reading function + * @param {InstanceType} blob + * @returns {AsyncIterable} + */ +// eslint-disable-next-line require-await +const readBlob = async function * BlobParts (blob) { + // @ts-ignore - https://github.com/microsoft/TypeScript/issues/29867 + yield * blob.stream() +} +exports.readBlob = readBlob +exports.Blob = Blob +exports.File = File diff --git a/packages/ipfs-core-utils/src/files/normalise-input.js b/packages/ipfs-core-utils/src/files/normalise-input.js index c3f73168c5..79bbd5485f 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input.js +++ b/packages/ipfs-core-utils/src/files/normalise-input.js @@ -1,258 +1,398 @@ +// @ts-check 'use strict' const errCode = require('err-code') -const { Buffer } = require('buffer') -const globalThis = require('ipfs-utils/src/globalthis') +const { File, Blob, readBlob } = require('./blob') -/* - * Transform one of: +/** + * @template T + * @typedef {Iterable|AsyncIterable|ReadableStream} Multiple + */ + +/** + * @typedef {ExtendedFile | FileStream | Directory} NormalizedAddInput + * @typedef {SingleFileInput | MultiFileInput} Input + * @typedef {Blob|Bytes|string|FileObject|Iterable|Multiple} SingleFileInput + * @typedef {Multiple|Multiple|Multiple} MultiFileInput + * + * @typedef {Object} FileObject + * @property {string} [path] + * @property {string} [type] - MIME type of the file. + * @property {FileContent} [content] + * @property {Mode} [mode] + * @property {UnixFSTime} [mtime] + * @typedef {Blob|Bytes|string|Iterable|Multiple} FileContent * - * ``` - * Bytes (Buffer|ArrayBuffer|TypedArray) [single file] - * Bloby (Blob|File) [single file] - * String [single file] - * { path, content: Bytes } [single file] - * { path, content: Bloby } [single file] - * { path, content: String } [single file] - * { path, content: Iterable } [single file] - * { path, content: Iterable } [single file] - * { path, content: AsyncIterable } [single file] - * Iterable [single file] - * Iterable [single file] - * Iterable [multiple files] - * Iterable [multiple files] - * Iterable<{ path, content: Bytes }> [multiple files] - * Iterable<{ path, content: Bloby }> [multiple files] - * Iterable<{ path, content: String }> [multiple files] - * Iterable<{ path, content: Iterable }> [multiple files] - * Iterable<{ path, content: Iterable }> [multiple files] - * Iterable<{ path, content: AsyncIterable }> [multiple files] - * AsyncIterable [single file] - * AsyncIterable [multiple files] - * AsyncIterable [multiple files] - * AsyncIterable<{ path, content: Bytes }> [multiple files] - * AsyncIterable<{ path, content: Bloby }> [multiple files] - * AsyncIterable<{ path, content: String }> [multiple files] - * AsyncIterable<{ path, content: Iterable }> [multiple files] - * AsyncIterable<{ path, content: Iterable }> [multiple files] - * AsyncIterable<{ path, content: AsyncIterable }> [multiple files] - * ``` - * Into: + * @typedef {ArrayBuffer|ArrayBufferView} Bytes * - * ``` - * AsyncIterable<{ path, content: AsyncIterable }> - * ``` + *@typedef {string|number|InstanceType} Mode + * @typedef {Date|UnixFSTime|UnixFSTimeSpec|HRTime} MTime + * @typedef {Object} UnixFSTime + * @property {number} secs + * @property {number} [nsecs] + * + * @typedef {Object} UnixFSTimeSpec + * @property {number} Seconds + * @property {number} [FractionalNanoseconds] + * + * @typedef {[number, number]} HRTime - Node process.hrtime + */ + +/** + * Normalizes input into async iterable of extended File or custom FileStream + * objects. * - * @param input Object - * @return AsyncInterable<{ path, content: AsyncIterable }> + * @param {Input} input + * @return {AsyncIterable} */ -module.exports = function normaliseInput (input) { +module.exports = async function * normaliseInput (input) { // must give us something - if (input === null || input === undefined) { + if (input == null) { throw errCode(new Error(`Unexpected input: ${input}`), 'ERR_UNEXPECTED_INPUT') } - // String - if (typeof input === 'string' || input instanceof String) { - return (async function * () { // eslint-disable-line require-await - yield toFileObject(input) - })() - } - - // Buffer|ArrayBuffer|TypedArray - // Blob|File - if (isBytes(input) || isBloby(input)) { - return (async function * () { // eslint-disable-line require-await - yield toFileObject(input) - })() - } - - // Iterable - if (input[Symbol.iterator]) { - return (async function * () { // eslint-disable-line require-await - const iterator = input[Symbol.iterator]() - const first = iterator.next() - if (first.done) return iterator - - // Iterable - // Iterable - if (Number.isInteger(first.value) || isBytes(first.value)) { - yield toFileObject((function * () { - yield first.value - yield * iterator - })()) - return - } - - // Iterable - // Iterable - // Iterable<{ path, content }> - if (isFileObject(first.value) || isBloby(first.value) || typeof first.value === 'string') { - yield toFileObject(first.value) - for (const obj of iterator) { - yield toFileObject(obj) - } - return - } + // If input is a one of the following types + // - string + // - ArrayBuffer + // - ArrayBufferView + // - Blob + // - FileObject + // It is turned into collection of one file (with that content) + const file = asFile(input) + if (file != null) { + yield file + return + } - throw errCode(new Error('Unexpected input: ' + typeof input), 'ERR_UNEXPECTED_INPUT') - })() + // If input is sync iterable we expect it to be a homogenous collection & + // need to probe it's first item to tell if input to be interpreted as single + // file with multiple chunks or multiple files. + // NOTE: We had to ensure that input was not string or arraybuffer view + // because those are also iterables. + /** @type {null|Iterable<*>} */ + const iterable = asIterable(input) + if (iterable != null) { + yield * normaliseIterableInput(iterable) + + // Return here since we have have exhasted an input iterator. + return } - // window.ReadableStream - if (typeof input.getReader === 'function') { - return (async function * () { - for await (const obj of browserStreamToIt(input)) { - yield toFileObject(obj) - } - })() - } - - // AsyncIterable - if (input[Symbol.asyncIterator]) { - return (async function * () { - const iterator = input[Symbol.asyncIterator]() - const first = await iterator.next() - if (first.done) return iterator - - // AsyncIterable - if (isBytes(first.value)) { - yield toFileObject((async function * () { // eslint-disable-line require-await - yield first.value - yield * iterator - })()) - return - } + // If we got here than we are dealing with async input, which can be either + // readable stream or an async iterable (casting former to later) + const stream = asReadableStream(input) + const asyncIterable = stream + ? iterateReadableStream(stream) + : asAsyncIterable(input) + + // Async iterable (which we assume to be homogenous) may represent single file + // with multilpe chunks or multiple files, to decide we probe it's first item. + if (asyncIterable != null) { + // Create peekable to be able to probe head without consuming it. + const peekable = AsyncPeekable.from(asyncIterable) + const { done, value } = await peekable.peek() + // If done input was empty so we return early. + if (done) { + return + } - // AsyncIterable - // AsyncIterable - // AsyncIterable<{ path, content }> - if (isFileObject(first.value) || isBloby(first.value) || typeof first.value === 'string') { - yield toFileObject(first.value) - for await (const obj of iterator) { - yield toFileObject(obj) + // If first item is array buffer or one of it's views input represents a + // single file with multiple chunks. + if (ArrayBuffer.isView(value) || value instanceof ArrayBuffer) { + yield new FileStream(peekable, '') + // Otherwise we interpret input as async collection of multiple files. + // In that case itemss of input can be either `string`, `Blob` or + // `FileObject`, so we normalize each to a file. If item is anything else + // we throw an exception. + } else { + for await (const content of peekable) { + // Note: If content here is `ArrayBuffer` or a view this will turn it + // into a file, but that can only occur if async iterable contained + // variadic chunks which is not supported. + const file = asFile(content) + if (file) { + yield file + } else { + throw errCode(new Error('Unexpected input: ' + typeof input), 'ERR_UNEXPECTED_INPUT') } - return } + } - throw errCode(new Error('Unexpected input: ' + typeof input), 'ERR_UNEXPECTED_INPUT') - })() - } - - // { path, content: ? } - // Note: Detected _after_ AsyncIterable because Node.js streams have a - // `path` property that passes this check. - if (isFileObject(input)) { - return (async function * () { // eslint-disable-line require-await - yield toFileObject(input) - })() + return } throw errCode(new Error('Unexpected input: ' + typeof input), 'ERR_UNEXPECTED_INPUT') } -function toFileObject (input) { - const obj = { - path: input.path || '', - mode: input.mode, - mtime: input.mtime - } - - if (input.content) { - obj.content = toAsyncIterable(input.content) - } else if (!input.path) { // Not already a file object with path or content prop - obj.content = toAsyncIterable(input) +/** + * + * @param {Iterable|Iterable} iterable + * @returns {Iterable} + * @typedef {Iterable|Iterable|Iterable} IterableFileContent + * @typedef {Iterable|Iterable|Iterable} IterableFiles + */ +const normaliseIterableInput = function * (iterable) { + // In order to peek at first without loosing capablitiy to iterate, we + // create peekable which allows us to do that. + const peekable = Peekable.from(iterable) + // First try to interpret it a single file content chunks. + const bytes = asIterableBytes(peekable) + if (bytes != null) { + yield new ExtendedFile(bytes, '') + // If first item is a `Blob`, `string`, or a `FileObject` we treat this + // input as collection of files. We iterate and normalize each each value + // into a file. + } else { + for (const content of peekable) { + const file = asFile(content) + if (file) { + yield file + } else { + throw errCode(new Error('Unexpected input: ' + typeof content), 'ERR_UNEXPECTED_INPUT') + } + } } - return obj + // Otherwise eslint complains about lack of return + return undefined } -function toAsyncIterable (input) { - // Bytes | String - if (isBytes(input) || typeof input === 'string') { - return (async function * () { // eslint-disable-line require-await - yield toBuffer(input) - })() - } - - // Bloby - if (isBloby(input)) { - return blobToAsyncGenerator(input) +/** + * Utility function takes any input and returns a `File|FileStream|Directoriy` + * (containing that input) if input was one of the following types (or `null` + * otherwise): + * - `ArrayBuffer` + * - `ArrayBufferView` + * - `string` + * - `Blob` + * - `FileObject` + * It will return `File` instance when content is of known size (not a stream) + * other it returns a `FileStream`. If input is `FileObject` with no `content` + * returns `Directory`. + * @param {any} input + * @param {string} [name] - optional name for the file + * @returns {null|ExtendedFile|FileStream|Directory} + */ +const asFile = (input, name) => { + const file = asFileFromBlobPart(input, name) + if (file) { + return file + } else { + // If input is a `FileObject` + const fileObject = asFileObject(input) + if (fileObject) { + return fileFromFileObject(fileObject) + } else { + return null + } } +} - // Browser stream - if (typeof input.getReader === 'function') { - return browserStreamToIt(input) +/** + * Utility function takes any input and returns a `File` (containing it) + * if `input` is of `BlobPart` type, otherwise returns `null`. If optional + * `name` is passed it will be used as a file name. + * @param {any} content + * @param {string} [name] + * @param {Object} [options] + * @param {string} [options.path] + * @param {Mode} [options.mode] + * @param {MTime} [options.mtime] + * @returns {ExtendedFile|null} + */ +const asFileFromBlobPart = (content, name, options = {}) => { + if ( + typeof content === 'string' || + ArrayBuffer.isView(content) || + content instanceof ArrayBuffer + ) { + return new ExtendedFile([content], name || '', options) + } else if (content instanceof File) { + // Preserver file name if new name is not provided + return new ExtendedFile([content], name == null ? content.name : '', { + type: content.type, + ...options + }) + } else if (content instanceof Blob) { + // Preserve a mime type. + return new ExtendedFile([content], name || '', { + type: content.type, + ...options + }) + } else if (content instanceof String) { + return new ExtendedFile([content.toString()], name || '', options) + } else { + return null } +} - // Iterator - if (input[Symbol.iterator]) { - return (async function * () { // eslint-disable-line require-await - const iterator = input[Symbol.iterator]() - const first = iterator.next() - if (first.done) return iterator +/** + * Utility function takes a `FileObject` and returns a web `File` (with extended) + * attributes if content is of known size or a `FileStream` if content is an + * async stream or `Directory` if it has no content. + * @param {FileObject} fileObject + * @returns {null|ExtendedFile|FileStream|Directory} + */ +const fileFromFileObject = (fileObject) => { + const { path, mtime, mode, content, type } = fileObject + // `lastModified` is set to `undefined` as we do not want to preserve + // it in case `file.content` was instanceo of a `File`. + const ext = { mtime, mode, path, type, lastModified: undefined } + const name = path == null ? '' : basename(path) + const file = asFileFromBlobPart(content, name, ext) + if (file) { + return file + } else { + // If content is empty it is a diretory + if (content == null) { + return new Directory(name, ext) + } - // Iterable - if (Number.isInteger(first.value)) { - yield toBuffer(Array.from((function * () { - yield first.value - yield * iterator - })())) - return + // First try to interpret it a single file content chunks. + const iterable = asIterable(content) + if (iterable != null) { + const peekable = Peekable.from(iterable) + // File object content can only contain iterable of numbers or array + // buffers (or it's views). If so we create an object otherwise + // throw an exception. + const bytes = asIterableBytes(peekable) + if (bytes != null) { + return new ExtendedFile(bytes, name, ext) + } else { + throw errCode(new Error('Unexpected FileObject content: ' + typeof content), 'ERR_UNEXPECTED_INPUT') } + } - // Iterable - if (isBytes(first.value)) { - yield toBuffer(first.value) - for (const chunk of iterator) { - yield toBuffer(chunk) - } - return - } + // If we got here than we are dealing with async input, which can be either + // readable stream or an async iterable (casting former to later) + const stream = asReadableStream(content) + const asyncIterable = stream + ? iterateReadableStream(stream) + : asAsyncIterable(content) + if (asyncIterable != null) { + return new FileStream(asyncIterable, name, ext) + } - throw errCode(new Error('Unexpected input: ' + typeof input), 'ERR_UNEXPECTED_INPUT') - })() + throw errCode(new Error(`Unexpected FileObject content: ${content}`), 'ERR_UNEXPECTED_INPUT') } +} - // AsyncIterable - if (input[Symbol.asyncIterator]) { - return (async function * () { - for await (const chunk of input) { - yield toBuffer(chunk) - } - })() +/** + * @param {Peekable} content + * @returns {ArrayBufferView[]|ArrayBuffer[]|null} + */ +const asIterableBytes = (content) => { + const { done, value } = content.peek() + // If it is done input was empty collection so we return early. + if (done) { + return [] } - throw errCode(new Error(`Unexpected input: ${input}`), 'ERR_UNEXPECTED_INPUT') + // If first item is an integer we treat input as a byte array and result + // will be collection of one file contaning those bytes. + if (Number.isInteger(value)) { + const bytes = new Uint8Array(content) + return [bytes] + + // If first item is array buffer or it's view, it is interpreted as chunks + // of one file. In that case we collect all chunks and normalize input into + // collection with a single file containing those chunks. + // Note: Since this is a synchronous iterator all chunks are already in + // memory so by by collecting them into a single file we are not allocate + // new memory (unless iterator is generating content, but that is exotic + // enough use case that we prefer to go with File over FileStream). + } else if (ArrayBuffer.isView(value) || value instanceof ArrayBuffer) { + return [...content].map(normalizeArrayBufferView) + } else { + return null + } } -function toBuffer (chunk) { - return isBytes(chunk) ? chunk : Buffer.from(chunk) +/** + * @param {*} input + * @returns {Uint8Array} + */ +const normalizeArrayBufferView = (input) => { + if (input instanceof Uint8Array) { + return input + } else if (ArrayBuffer.isView(input)) { + return new Uint8Array(input.buffer, input.byteOffset, input.byteLength) + } else if (input instanceof ArrayBuffer) { + return new Uint8Array(input) + } else { + throw errCode(new Error(`Unexpected input: ${input}`), 'ERR_UNEXPECTED_INPUT') + } } -function isBytes (obj) { - return Buffer.isBuffer(obj) || ArrayBuffer.isView(obj) || obj instanceof ArrayBuffer +/** + * Pattern matches given `input` as `ReadableStream` and return back either + * matched input or `null`. + * + * @param {any} input + * @returns {ReadableStream|null} + */ +const asReadableStream = input => { + if (input && typeof input.getReader === 'function') { + return input + } else { + return null + } } -function isBloby (obj) { - return typeof globalThis.Blob !== 'undefined' && obj instanceof globalThis.Blob +/** + * Pattern matches given `input` as `AsyncIterable` and returns back either + * matched `AsyncIterable` or `null`. + * @template I + * @param {AsyncIterable|Input} input + * @returns {AsyncIterable|null} + */ +const asAsyncIterable = input => { + /** @type {*} */ + const object = input + if (object && typeof object[Symbol.asyncIterator] === 'function') { + return object + } else { + return null + } } -// An object with a path or content property -function isFileObject (obj) { - return typeof obj === 'object' && (obj.path || obj.content) +/** + * Pattern matches given input as `Iterable` and returns back either matched + * iterable or `null`. + * @template I + * @param {Iterable|Input} input + * @returns {Iterable|null} + */ +const asIterable = input => { + /** @type {*} */ + const object = input + if (object && typeof object[Symbol.iterator] === 'function') { + return object + } else { + return null + } } -function blobToAsyncGenerator (blob) { - if (typeof blob.stream === 'function') { - // firefox < 69 does not support blob.stream() - return browserStreamToIt(blob.stream()) +/** + * Pattern matches given input as "FileObject" and returns back eithr matched + * input or `null`. + * @param {*} input + * @returns {FileObject|null} + */ +const asFileObject = input => { + if (typeof input === 'object' && input && (input.path || input.content)) { + return input + } else { + return null } - - return readBlob(blob) } +/** + * @template T + * @param {ReadableStream} stream + * @returns {AsyncIterable} + */ -async function * browserStreamToIt (stream) { +const iterateReadableStream = async function * (stream) { const reader = stream.getReader() while (true) { @@ -266,33 +406,208 @@ async function * browserStreamToIt (stream) { } } -async function * readBlob (blob, options) { - options = options || {} +/** + * @template T + */ +class Peekable { + /** + * @template T + * @template {Iterable} I + * @param {I} iterable + * @returns {Peekable} + */ + static from (iterable) { + return new Peekable(iterable) + } - const reader = new globalThis.FileReader() - const chunkSize = options.chunkSize || 1024 * 1024 - let offset = options.offset || 0 + /** + * @private + * @param {Iterable} iterable + */ + constructor (iterable) { + const iterator = iterable[Symbol.iterator]() + /** @private */ + this.first = iterator.next() + /** @private */ + this.rest = iterator + } - const getNextChunk = () => new Promise((resolve, reject) => { - reader.onloadend = e => { - const data = e.target.result - resolve(data.byteLength === 0 ? null : data) - } - reader.onerror = reject + peek () { + return this.first + } - const end = offset + chunkSize - const slice = blob.slice(offset, end) - reader.readAsArrayBuffer(slice) - offset = end - }) + next () { + const { first, rest } = this + this.first = rest.next() + return first + } - while (true) { - const data = await getNextChunk() + [Symbol.iterator] () { + return this + } - if (data == null) { - return + [Symbol.asyncIterator] () { + return this + } +} + +/** + * @template T + */ +class AsyncPeekable { + /** + * @template T + * @template {AsyncIterable} I + * @param {I} iterable + * @returns {AsyncPeekable} + */ + static from (iterable) { + return new AsyncPeekable(iterable) + } + + /** + * @private + * @param {AsyncIterable} iterable + */ + constructor (iterable) { + const iterator = iterable[Symbol.asyncIterator]() + /** @private */ + this.first = iterator.next() + /** @private */ + this.rest = iterator + } + + peek () { + return this.first + } + + next () { + const { first, rest } = this + this.first = rest.next() + return first + } + + [Symbol.asyncIterator] () { + return this + } +} + +/** + * @param {string} path + * @returns {string} + */ +const basename = (path) => + path.split(/\\|\//).pop() + +class ExtendedFile extends File { + /** + * @param {BlobPart[]} init + * @param {string} name - A USVString representing the file name or the path + * to the file. + * @param {Object} [options] + * @param {string} [options.type] - A DOMString representing the MIME type + * of the content that will be put into the file. Defaults to a value of "". + * @param {number} [options.lastModified] - A number representing the number + * of milliseconds between the Unix time epoch and when the file was last + * modified. Defaults to a value of Date.now(). + * @param {string} [options.path] + * @param {Mode} [options.mode] + * @param {MTime} [options.mtime] + */ + constructor (init, name, options = {}) { + super(init, name, options) + const { path, mode, mtime, lastModified } = options + this.path = path || name + this.mode = mode + // If `mtime` isn't provided but `lastModified` is, derive `mtime` from it. + // If neither is provided keep `mtime` undefined. This way if input was a + // File it's `lastModified` is used otherwise `mtime` is not set. + this.mtime = mtime || (lastModified && new Date(lastModified)) + + /** @type {'file'} */ + this.kind = 'file' + } + + /** + * @returns {AsyncIterable} + */ + get content () { + return readBlob(this) + } +} +// It appears that in electron native `File` has read-only `path` property, +// overriding it the property so that constructor can set a `path`. +Object.defineProperty(ExtendedFile.prototype, 'path', { writable: true }) +module.exports.ExtendedFile = ExtendedFile + +class FileStream { + /** + * @param {AsyncIterable} source + * @param {string} name + * @param {Object} [options] + * @param {string} [options.type] + * @param {number} [options.lastModified] + * @param {string} [options.path] + * @param {MTime} [options.mtime] + * @param {Mode} [options.mode] + */ + constructor (source, name, options = {}) { + this.source = source + this.name = name + this.type = options.type || '' + this.lastModified = options.lastModified || Date.now() + this.path = options.path || '' + this.mtime = options.mtime || (options.lastModified && new Date(options.lastModified)) + this.mode = options.mode + + /** @type {'file-stream'} */ + this.kind = 'file-stream' + } + + get size () { + throw Error('File size is unknown') + } + + async * [Symbol.asyncIterator] () { + for await (const chunk of this.source) { + if (ArrayBuffer.isView(chunk)) { + yield chunk + } else if (chunk instanceof ArrayBuffer) { + yield new Uint8Array(chunk) + } else { + throw errCode(new Error(`Unexpected file content: ${chunk}`), 'ERR_UNEXPECTED_INPUT') + } } + } - yield Buffer.from(data) + get content () { + return this + } +} +module.exports.FileStream = FileStream + +class Directory { + /** + * @param {string} name + * @param {Object} [options] + * @param {string} [options.type] + * @param {number} [options.lastModified] + * @param {string} [options.path] + * @param {MTime} [options.mtime] + * @param {Mode} [options.mode] + */ + constructor (name, options = {}) { + this.name = name + this.type = options.type || '' + this.lastModified = options.lastModified || Date.now() + this.path = options.path || '' + this.mtime = options.mtime + this.mode = options.mode + + /** @type {'directory'} */ + this.kind = 'directory' + /** @type {void} */ + this.content = undefined } } +module.exports.Directory = Directory diff --git a/packages/ipfs-core-utils/test/files/normalise-input.spec.js b/packages/ipfs-core-utils/test/files/normalise-input.spec.js index e8491bd768..4094014c54 100644 --- a/packages/ipfs-core-utils/test/files/normalise-input.spec.js +++ b/packages/ipfs-core-utils/test/files/normalise-input.spec.js @@ -3,6 +3,8 @@ /* eslint-env mocha */ const { expect } = require('../utils/chai') const normalise = require('../../src/files/normalise-input') +const { Blob, File } = require('../../src/files/blob') +const TextEncoder = require('ipfs-utils/src/text-encoder') const { supportsFileReader } = require('ipfs-utils/src/supports') const { Buffer } = require('buffer') const all = require('it-all') @@ -41,17 +43,75 @@ async function testContent (input) { await verifyNormalisation(result) } -function iterableOf (thing) { - return [thing] +function * iterableOf (...things) { + yield * things } -function asyncIterableOf (thing) { - return (async function * () { // eslint-disable-line require-await - yield thing - }()) +// eslint-disable-next-line require-await +async function * asyncIterableOf (...things) { + yield * things +} + +const encodeText = (text) => new TextEncoder().encode(text) + +const readInput = async (input) => { + const output = [] + for (const file of await all(normalise(input))) { + const content = file.content && concatUint8Array(await all(file.content)) + output.push({ file, content }) + } + return output +} + +const concatUint8Array = (chunks) => { + const bytes = [] + for (const chunk of chunks) { + bytes.push(...chunk) + } + return new Uint8Array(bytes) } describe('normalise-input', function () { + /** + * @param {string} name + * @param {*} input + * @param {ExpectOutput[]} expected + * @returns {void} + * + * @typedef {Object} ExpectOutput + * @property {Function} instanceOf + * @property {string} [path="/"] + * @property {*} [mtime] + * @property {*} [mode] + * @property {Uint8Array[]} [content] + */ + function testInput (name, input, expected) { + it(name, async () => { + const output = await readInput(input) + expect(output.length).to.equal(expected.length, `normilaize to ${expected.length} files`) + let index = 0 + for (const { file, content } of output) { + const inn = expected[index] + expect(file).to.be.an.instanceOf(inn.instanceOf) + expect(file.type).to.be.equal(inn.type || '', 'has expected type') + expect(file.path).to.be.equal(inn.path || '', 'has expected path') + expect(file.mtime).to.be.deep.equal(inn.mtime, 'has expected mtime') + expect(file.mode).to.be.deep.equal(inn.mode, 'has expected mode') + + expect(content).to.deep.equal(inn.content, 'has expected content') + + index += 1 + } + }) + } + + function testInvalid (name, input, reason = /Unexpected/) { + it(`${name} is invalid input`, () => { + const result = readInput(input) + expect(result).eventually.to.be.rejectedWith(reason) + }) + } + function testInputType (content, name, isBytes) { it(name, async function () { await testContent(content()) @@ -139,4 +199,1458 @@ describe('normalise-input', function () { describe('TypedArray', () => { testInputType(TYPEDARRAY, 'TypedArray', true) }) + + describe('keeps blobs when possible', () => { + const lastModified = 1594672000418 + const mtime = new Date(lastModified) + + describe('string [single file]', () => { + testInput('string -> [File]', 'hello', [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + } + ]) + + testInput('string -> [File]', 'hello', [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + } + ]) + }) + + describe('Bytes [single file]', () => { + testInput('Buffer -> [File]', Buffer.from('from buffer'), [ + { + path: '', + instanceOf: File, + content: encodeText('from buffer') + } + ]) + + testInput('Uint8Array -> [File]', new Uint8Array([1, 2, 3, 4]), [ + { + path: '', + instanceOf: File, + content: new Uint8Array([1, 2, 3, 4]) + } + ]) + + testInput('Uint32Array -> [File]', new Uint32Array([1, 2, 3]), [ + { + path: '', + instanceOf: File, + content: new Uint8Array([1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0]) + } + ]) + + testInput('ArrayBuffer -> [File]', encodeText('ArrayBuffer').buffer, [ + { + path: '', + instanceOf: File, + content: encodeText('ArrayBuffer') + } + ]) + }) + + describe('Bloby [single file]', () => { + testInput('Blob -> [File]', new Blob(['blob']), [ + { + path: '', + instanceOf: File, + content: encodeText('blob') + } + ]) + + testInput( + 'Blob(content, { type: "text/plain" }) -> [File]', + new Blob(['blob'], { type: 'text/plain' }), + [ + { + path: '', + instanceOf: File, + content: encodeText('blob'), + type: 'text/plain' + } + ] + ) + + testInput( + 'File -> [File]', + new File(['DOM file'], 'bla', { lastModified }), + [ + { + path: 'bla', + instanceOf: File, + content: encodeText('DOM file') + } + ] + ) + + testInput( + 'File(content, name, { type: "text/plain" }) -> [File]', + new File(['DOM file'], 'file2', { + type: 'text/plain', + lastModified + }), + [ + { + path: 'file2', + instanceOf: File, + content: encodeText('DOM file'), + type: 'text/plain' + } + ] + ) + }) + + describe('FileObject [single file]', () => { + describe('no content is treated as an empty directory', () => { + testInput('{path:"foo/bar"} -> [Directory]', { path: 'foo/bar' }, [ + { + path: 'foo/bar', + instanceOf: normalise.Directory, + content: undefined + } + ]) + + testInput( + '{path:"foo/bar", mtime, mode} -> [Directory]', + { + path: 'foo/bar', + mtime, + mode: 420 + }, + [ + { + path: 'foo/bar', + instanceOf: normalise.Directory, + mtime, + mode: 420, + content: undefined + } + ] + ) + }) + + describe('invalid FileObject', () => { + testInvalid('{}', {}) + testInvalid( + '{content:null}', + { + content: null + }, + /Unexpected input/ + ) + testInvalid('{ name: "file" }', { name: 'file' }) + testInvalid('{ mtime, mode }', { mtime, mode: 420 }) + }) + + describe('Bytes content', () => { + testInput( + '{content:Buffer} -> [File]', + { + content: Buffer.from('node buffer') + }, + [ + { + path: '', + instanceOf: File, + content: encodeText('node buffer') + } + ] + ) + + testInput( + '{content:Buffer, path} -> [File]', + { + content: Buffer.from('node buffer'), + path: 'node/buffer' + }, + [ + { + path: 'node/buffer', + instanceOf: File, + content: encodeText('node buffer') + } + ] + ) + + testInput( + '{content:Buffer, path, mode} -> [File]', + { + content: Buffer.from('node buffer'), + path: 'node/buffer', + mode: 420 + }, + [ + { + path: 'node/buffer', + instanceOf: File, + mode: 420, + content: encodeText('node buffer') + } + ] + ) + + testInput( + '{content:Buffer, path, mode, mtime} -> [File]', + { + content: Buffer.from('node buffer'), + path: 'node/buffer', + mode: 420, + mtime + }, + [ + { + path: 'node/buffer', + instanceOf: File, + mode: 420, + mtime, + content: encodeText('node buffer') + } + ] + ) + + testInput( + '{content:ArrayBuffer} -> [File]', + { + content: encodeText('ArrayBuffer').buffer + }, + [ + { + path: '', + instanceOf: File, + content: encodeText('ArrayBuffer') + } + ] + ) + + testInput( + '{content:Uint8Array, path} -> [File]', + { + content: encodeText('Uint8Array').buffer, + path: 'web/Uint8Array' + }, + [ + { + path: 'web/Uint8Array', + instanceOf: File, + content: encodeText('Uint8Array') + } + ] + ) + }) + + describe('Bloby content', () => { + testInput( + '{content:Blob, path} -> [File]', + { + content: new Blob(['blob']), + path: 'web/blob' + }, + [ + { + path: 'web/blob', + instanceOf: File, + content: encodeText('blob') + } + ] + ) + + testInput( + '{content:Blob, path} -> [File]', + { + content: new Blob(['blob'], { type: 'text/plain' }), + path: 'web/blob' + }, + [ + { + path: 'web/blob', + instanceOf: File, + type: '', + content: encodeText('blob') + } + ] + ) + + testInput( + '{content:File} -> [File]', + { + content: new File(['file'], 'foo') + }, + [ + { + path: '', + instanceOf: File, + content: encodeText('file') + } + ] + ) + }) + + describe('string content', () => { + testInput( + '{content:"text"} -> [File]', + { + content: 'text' + }, + [ + { + path: '', + instanceOf: File, + content: encodeText('text') + } + ] + ) + + testInput( + '{content:"text", path } -> [File]', + { + content: 'text', + path: 'text-file', + type: 'text/plain' + }, + [ + { + path: 'text-file', + type: 'text/plain', + instanceOf: File, + content: encodeText('text') + } + ] + ) + }) + + describe('Iterable content', () => { + testInput( + '{content:[] -> [File]', + { + content: [] + }, + [ + { + path: '', + instanceOf: File, + content: new Uint8Array([]) + } + ] + ) + + testInvalid( + '{content:["hello"]}', + { + content: ['hello'] + }, + /Unexpected FileObject content/ + ) + + testInvalid('{content:[Uint8Array, string]}', { + content: [encodeText('hello '), 'text'] + }) + + testInput( + '{content:[104, 101, 108, 108, 111 ] -> [File]', + { + content: [104, 101, 108, 108, 111] + }, + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + } + ] + ) + + testInput( + '{content:Uint8Array[]} -> [File]', + { + content: [encodeText('hello '), encodeText('world')] + }, + [ + { + path: '', + instanceOf: File, + content: encodeText('hello world') + } + ] + ) + + testInvalid('{content:[Uint8Array, Blob]}', { + content: [encodeText('hello '), new Blob(['test'])] + }) + + testInvalid('{content:[Uint8Array, File]}', { + content: [encodeText('hello '), new File(['File'], 'file')] + }) + + testInvalid('{content:[Uint8Array, number]}', { + content: [encodeText('hello '), 3] + }) + + testInput( + '{content:Iterable} -> [File]', + { + content: iterableOf(104, 101, 108, 108, 111) + }, + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + } + ] + ) + + testInput( + '{content: [0, "test"]} -> [File]', + { + content: [0, 'test'] + }, + [ + { + path: '', + instanceOf: File, + content: new Uint8Array([0, 'test']) + } + ] + ) + + testInput( + '{content:Iterable} -> [File]', + { + content: iterableOf(encodeText('hello '), encodeText('world!')) + }, + [ + { + path: '', + instanceOf: File, + content: encodeText('hello world!') + } + ] + ) + + testInput( + '{content:Iterable} -> [File]', + { + content: iterableOf( + encodeText('hello ').buffer, + encodeText('world!') + ) + }, + [ + { + path: '', + instanceOf: File, + content: encodeText('hello world!') + } + ] + ) + + testInvalid('{content: Iterable}', { + content: iterableOf(new Blob(['hello'])) + }) + + testInvalid('{content: Iterable}', { + content: iterableOf(new File(['hello'], '')) + }) + }) + + describe('AsyncIterable content', () => { + testInvalid('{content: AsyncIterable}', { + content: asyncIterableOf(104, 101, 108, 108, 111) + }) + + testInvalid('{content: AsyncIterable}', { + content: asyncIterableOf('hello') + }) + + testInvalid('{content: AsyncIterable}', { + content: asyncIterableOf(encodeText('hello'), 'test') + }) + + testInvalid('{content: AsyncIterable}', { + content: asyncIterableOf(encodeText('hello'), 1) + }) + + testInvalid('{content: AsyncIterable}', { + content: asyncIterableOf(encodeText('hello'), new Blob()) + }) + + testInvalid('{content: AsyncIterable}', { + content: asyncIterableOf(encodeText('hello'), new File([], '')) + }) + + testInvalid('{content: AsyncIterable>}', { + content: asyncIterableOf(encodeText('hello'), [1, 2]) + }) + + testInput( + '{content: AsyncIterable} -> [File]Stream', + { + content: asyncIterableOf(encodeText('hello'), encodeText(' world')) + }, + [ + { + path: '', + instanceOf: normalise.FileStream, + content: encodeText('hello world') + } + ] + ) + + testInput( + '{content: AsyncIterable} -> [File]Stream', + { + content: asyncIterableOf( + encodeText('hello').buffer, + encodeText(' world') + ) + }, + [ + { + path: '', + instanceOf: normalise.FileStream, + content: encodeText('hello world') + } + ] + ) + }) + }) + + describe('Iterable [single file]', () => { + testInput( + '[] -> [File]', + [], + [ + { + path: '', + instanceOf: File, + content: new Uint8Array([]) + } + ] + ) + + testInput( + 'number[] -> [File]', + { + content: [104, 101, 108, 108, 111] + }, + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + } + ] + ) + + testInput( + 'Iterable -> [File]', + { + content: iterableOf(104, 101, 108, 108, 111) + }, + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + } + ] + ) + + testInput( + '[0, "test"] -> [File]', + [0, 'test'], + [ + { + path: '', + instanceOf: File, + content: new Uint8Array([0, 'test']) + } + ] + ) + + testInput('Iterable<0|*> -> [File]', iterableOf(0, 'test'), [ + { + path: '', + instanceOf: File, + content: new Uint8Array([0, 'test']) + } + ]) + }) + + describe('Iterable [single file]', () => { + testInput( + '[Bytes] -> [File]', + [encodeText('hello')], + [{ path: '', instanceOf: File, content: encodeText('hello') }] + ) + + testInput( + '[Bytes, Bytes] -> [File]', + [encodeText('hello '), encodeText('text')], + [{ path: '', instanceOf: File, content: encodeText('hello text') }] + ) + + testInvalid('[Bytes, string]', [encodeText('hello'), 'world']) + + testInvalid('[Uint8Array, Blob]', [ + encodeText('hello '), + new Blob(['test']) + ]) + + testInvalid('[Uint8Array, File]', [ + encodeText('hello '), + new File(['File'], 'file') + ]) + + testInvalid('[Uint8Array, number]', [encodeText('hello '), 3]) + + testInput( + 'Iterable -> [File]', + iterableOf(encodeText('hello ').buffer, encodeText('world!')), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello world!') + } + ] + ) + }) + + describe('Iterable [multiple files]', () => { + testInput( + '["hello"] -> [File]', + ['hello'], + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + } + ] + ) + + testInput( + '["hello", "world"] -> [File, File]', + ['hello', 'world'], + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: '', + instanceOf: File, + content: encodeText('world') + } + ] + ) + + testInvalid( + '[string, null]', + ['hello', null] + ) + + testInvalid( + '[string, number[]]', + ['hello', [1, 2, 3]] + ) + }) + + describe('Iterable [multiple files]', () => { + testInput( + '[Blob] -> [File]', + iterableOf(new Blob(['hello'])), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + } + ] + ) + + testInput( + '[File] -> [File]', + iterableOf(new File(['hello'], 'foo', { lastModified })), + [ + { + path: 'foo', + instanceOf: File, + content: encodeText('hello') + } + ] + ) + + testInput( + '[Blob, File] -> [File, File]', + iterableOf(new Blob(['hello']), new File(['world'], 'w', { lastModified })), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: 'w', + instanceOf: File, + content: encodeText('world') + } + ] + ) + + testInvalid( + '[Blob, null]', + iterableOf(new Blob(['hello']), null) + ) + + testInvalid( + '[string, number[]]', + iterableOf(new Blob(['hello']), [1, 2, 3]) + ) + }) + + describe('Iterable [multiple files]', () => { + testInput( + '[{ content: string }] -> [File]', + iterableOf({ content: 'hello' }), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + } + ] + ) + + testInput( + '[{path:string}, {content:string}] -> [Directory, File]', + iterableOf({ path: 'dir' }, { content: 'hello' }), + [ + { + path: 'dir', + instanceOf: normalise.Directory + }, + { + path: '', + instanceOf: File, + content: encodeText('hello') + } + ] + ) + + testInput( + '[{path, content:Blob, mtime}, {path, mtime}] -> [File, Directory]', + iterableOf( + { + content: new Blob(['file']), + type: 'text/plain', + path: 'file', + mtime + }, + { + path: 'dir', + mtime + } + ), + [ + { + path: 'file', + instanceOf: File, + mtime, + type: 'text/plain', + content: encodeText('file') + }, + { + path: 'dir', + instanceOf: normalise.Directory, + mtime + } + ] + ) + + testInvalid( + '[{content:string}, {content:null}]', + iterableOf({ content: 'hello' }, { content: null }) + ) + + testInvalid( + '[{content:string}, number[]]', + iterableOf({ content: 'hello' }, [1, 2, 3]) + ) + + testInvalid('[{content: Iterable}]', + iterableOf({ + content: iterableOf(new File(['hello'], '')) + }) + ) + }) + + describe('Iterable [multiple files]', () => { + testInput( + '["hello", "world"] -> [File, File]', + ['hello', 'world'], + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: '', + instanceOf: File, + content: encodeText('world') + } + ] + ) + + testInput( + '[string, Bytes] -> [File, File]', + ['hello', encodeText('world')], + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: '', + instanceOf: File, + content: encodeText('world') + } + ] + ) + + testInput( + '[string, Blob] -> [File, File]', + ['hello', new Blob(['world'])], + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: '', + instanceOf: File, + content: encodeText('world') + } + ] + ) + + testInput( + '[string, File] -> [File, File]', + ['hello', new File(['world'], 'w', { lastModified })], + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: 'w', + instanceOf: File, + content: encodeText('world') + } + ] + ) + + testInput( + '[string, {path:"foo"}] -> [File, Directory]', + ['hello', { path: 'foo' }], + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: 'foo', + instanceOf: normalise.Directory + } + ] + ) + + testInput( + '[string, {content:"bar"}] -> [File, File]', + ['hello', { content: 'bar' }], + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: '', + instanceOf: File, + content: encodeText('bar') + } + ] + ) + + testInput( + '[string, Bytes] -> [File, File]', + iterableOf('hello', encodeText('bar')), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: '', + instanceOf: File, + content: encodeText('bar') + } + ] + ) + + testInput( + '[string, FileObject, Bytes] -> [File, FileStream, File]', + iterableOf( + 'hello', + { content: asyncIterableOf() }, + encodeText('bar') + ), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: '', + instanceOf: normalise.FileStream, + content: new Uint8Array([]) + }, + { + path: '', + instanceOf: File, + content: encodeText('bar') + } + ] + ) + + testInput( + '[FileObject, File] -> [FileStream, File]', + iterableOf( + { content: asyncIterableOf(encodeText('world'), encodeText('!')) }, + new File(['file'], 'file', { lastModified }) + ), + [ + { + path: '', + instanceOf: normalise.FileStream, + content: encodeText('world!') + }, + { + path: 'file', + instanceOf: File, + content: encodeText('file') + } + ] + ) + + testInput( + '[FileObject, FileObject] -> [FileStream, FileStream]', + iterableOf( + { content: asyncIterableOf(encodeText('hello'), encodeText(' ')) }, + { content: asyncIterableOf(encodeText('world'), encodeText('!')) } + ), + [ + { + path: '', + instanceOf: normalise.FileStream, + content: encodeText('hello ') + }, + { + path: '', + instanceOf: normalise.FileStream, + content: encodeText('world!') + } + ] + ) + + testInput( + '[FileObject, FileObject, FileObject, string] -> [File, Directory, FileStream, File]', + iterableOf( + { content: iterableOf(encodeText('hello'), encodeText(' ')), path: 'bla' }, + { path: 'foo' }, + { content: asyncIterableOf(encodeText('world'), encodeText('!')) }, + 'bzz' + ), + [ + { + path: 'bla', + instanceOf: File, + content: encodeText('hello ') + }, + { + path: 'foo', + instanceOf: normalise.Directory + }, + { + path: '', + instanceOf: normalise.FileStream, + content: encodeText('world!') + }, + { + path: '', + instanceOf: File, + content: encodeText('bzz') + } + ] + ) + }) + + describe('AsyncIterable [single file]', () => { + testInput( + '[Bytes] -> [File]', + asyncIterableOf(encodeText('hello')), + [ + { + path: '', + instanceOf: normalise.FileStream, + content: encodeText('hello') + } + ] + ) + + testInput( + '[Bytes, Bytes] -> [File]', + asyncIterableOf(encodeText('hello '), encodeText('text')), + [ + { + path: '', + instanceOf: normalise.FileStream, + content: encodeText('hello text') + } + ] + ) + + testInvalid('[Bytes, string]', asyncIterableOf(encodeText('hello'), 'world')) + + testInvalid('[Uint8Array, Blob]', asyncIterableOf( + encodeText('hello '), + new Blob(['test']) + )) + + testInvalid('[Uint8Array, File]', asyncIterableOf( + encodeText('hello '), + new File(['File'], 'file') + )) + + testInvalid('[Uint8Array, number]', asyncIterableOf(encodeText('hello '), 3)) + + testInput( + 'AsyncIterable -> [File]', + asyncIterableOf(encodeText('hello ').buffer, encodeText('world!')), + [ + { + path: '', + instanceOf: normalise.FileStream, + content: encodeText('hello world!') + } + ] + ) + }) + + describe('AsyncIterable [multiple files]', () => { + testInput( + '[Blob] -> [File]', + asyncIterableOf(new Blob(['hello'])), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + } + ] + ) + + testInput( + '[File] -> [File]', + asyncIterableOf(new File(['hello'], 'foo', { lastModified })), + [ + { + path: 'foo', + instanceOf: File, + content: encodeText('hello') + } + ] + ) + + testInput( + '[Blob, File] -> [File, File]', + asyncIterableOf( + new Blob(['hello']), + new File(['world'], 'w', { lastModified }) + ), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: 'w', + instanceOf: File, + content: encodeText('world') + } + ] + ) + + testInvalid( + '[Blob, null]', + asyncIterableOf(new Blob(['hello']), null) + ) + + testInvalid( + '[string, number[]]', + asyncIterableOf(new Blob(['hello']), [1, 2, 3]) + ) + }) + + describe('AsyncIterable [multiple files]', () => { + testInput( + '["hello"] -> [File]', + asyncIterableOf('hello'), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + } + ] + ) + + testInput( + '["hello", "world"] -> [File, File]', + asyncIterableOf('hello', 'world'), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: '', + instanceOf: File, + content: encodeText('world') + } + ] + ) + + testInvalid( + '[string, null]', + asyncIterableOf('hello', null) + ) + + testInvalid( + '[string, number[]]', + asyncIterableOf('hello', [1, 2, 3]) + ) + }) + + describe('AsyncIterable [multiple files]', () => { + testInput( + '[{ content: string }] -> [File]', + asyncIterableOf({ content: 'hello' }), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + } + ] + ) + + testInput( + '[{path:string}, {content:string}] -> [Directory, File]', + asyncIterableOf({ path: 'dir' }, { content: 'hello' }), + [ + { + path: 'dir', + instanceOf: normalise.Directory + }, + { + path: '', + instanceOf: File, + content: encodeText('hello') + } + ] + ) + + testInput( + '[{path, content:Blob, mtime}, {path, mtime}] -> [File, Directory]', + asyncIterableOf( + { + content: new Blob(['file']), + type: 'text/plain', + path: 'file', + mtime + }, + { + path: 'dir', + mtime + } + ), + [ + { + path: 'file', + instanceOf: File, + mtime, + type: 'text/plain', + content: encodeText('file') + }, + { + path: 'dir', + instanceOf: normalise.Directory, + mtime + } + ] + ) + + testInvalid( + '[{content:string}, {content:null}]', + asyncIterableOf({ content: 'hello' }, { content: null }) + ) + + testInvalid( + '[{content:string}, number[]]', + asyncIterableOf({ content: 'hello' }, [1, 2, 3]) + ) + + testInvalid('[{content: Iterable}]', + asyncIterableOf({ + content: iterableOf(new File(['hello'], '')) + }) + ) + }) + + describe('AsyncIterable [multiple files]', () => { + testInput( + '["hello", "world"] -> [File, File]', + asyncIterableOf('hello', 'world'), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: '', + instanceOf: File, + content: encodeText('world') + } + ] + ) + + testInput( + '[string, Bytes] -> [File, File]', + asyncIterableOf('hello', encodeText('world')), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: '', + instanceOf: File, + content: encodeText('world') + } + ] + ) + + testInput( + '[string, Blob] -> [File, File]', + asyncIterableOf('hello', new Blob(['world'])), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: '', + instanceOf: File, + content: encodeText('world') + } + ] + ) + + testInput( + '[string, File] -> [File, File]', + asyncIterableOf('hello', new File(['world'], 'w', { lastModified })), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: 'w', + instanceOf: File, + content: encodeText('world') + } + ] + ) + + testInput( + '[string, {path:"foo"}] -> [File, Directory]', + asyncIterableOf('hello', { path: 'foo' }), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: 'foo', + instanceOf: normalise.Directory + } + ] + ) + + testInput( + '[string, {content:"bar"}] -> [File, File]', + asyncIterableOf('hello', { content: 'bar' }), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: '', + instanceOf: File, + content: encodeText('bar') + } + ] + ) + + testInput( + '[string, Bytes] -> [File, File]', + asyncIterableOf('hello', encodeText('bar')), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: '', + instanceOf: File, + content: encodeText('bar') + } + ] + ) + + testInput( + '[string, FileObject, Bytes] -> [File, FileStream, File]', + asyncIterableOf( + 'hello', + { content: asyncIterableOf() }, + encodeText('bar') + ), + [ + { + path: '', + instanceOf: File, + content: encodeText('hello') + }, + { + path: '', + instanceOf: normalise.FileStream, + content: new Uint8Array([]) + }, + { + path: '', + instanceOf: File, + content: encodeText('bar') + } + ] + ) + + testInput( + '[FileObject, File] -> [FileStream, File]', + asyncIterableOf( + { content: asyncIterableOf(encodeText('world'), encodeText('!')) }, + new File(['file'], 'file', { lastModified }) + ), + [ + { + path: '', + instanceOf: normalise.FileStream, + content: encodeText('world!') + }, + { + path: 'file', + instanceOf: File, + content: encodeText('file') + } + ] + ) + + testInput( + '[FileObject, FileObject] -> [FileStream, FileStream]', + asyncIterableOf( + { content: asyncIterableOf(encodeText('hello'), encodeText(' ')) }, + { content: asyncIterableOf(encodeText('world'), encodeText('!')) } + ), + [ + { + path: '', + instanceOf: normalise.FileStream, + content: encodeText('hello ') + }, + { + path: '', + instanceOf: normalise.FileStream, + content: encodeText('world!') + } + ] + ) + + testInput( + '[FileObject, FileObject, FileObject, string] -> [File, Directory, FileStream, File]', + asyncIterableOf( + { content: iterableOf(encodeText('hello'), encodeText(' ')), path: 'bla' }, + { path: 'foo' }, + { content: asyncIterableOf(encodeText('world'), encodeText('!')) }, + 'bzz' + ), + [ + { + path: 'bla', + instanceOf: File, + content: encodeText('hello ') + }, + { + path: 'foo', + instanceOf: normalise.Directory + }, + { + path: '', + instanceOf: normalise.FileStream, + content: encodeText('world!') + }, + { + path: '', + instanceOf: File, + content: encodeText('bzz') + } + ] + ) + }) + }) }) diff --git a/packages/ipfs-http-client/package.json b/packages/ipfs-http-client/package.json index 6b768851bc..f9b6c1e25c 100644 --- a/packages/ipfs-http-client/package.json +++ b/packages/ipfs-http-client/package.json @@ -15,7 +15,7 @@ ], "main": "src/index.js", "browser": { - "./src/lib/to-stream.js": "./src/lib/to-stream.browser.js", + "./src/lib/to-body.js": "./src/lib/to-body.browser.js", "ipfs-utils/src/files/glob-source": false, "go-ipfs": false }, @@ -55,7 +55,6 @@ "iso-url": "^0.4.7", "it-last": "^1.0.1", "it-tar": "^1.2.2", - "it-to-buffer": "^1.0.0", "it-to-stream": "^0.1.1", "merge-options": "^2.0.0", "multiaddr": "^7.4.3", @@ -185,4 +184,4 @@ "Łukasz Magiera ", "Łukasz Magiera " ] -} +} \ No newline at end of file diff --git a/packages/ipfs-http-client/src/lib/async-iterable.js b/packages/ipfs-http-client/src/lib/async-iterable.js new file mode 100644 index 0000000000..8867b80ed6 --- /dev/null +++ b/packages/ipfs-http-client/src/lib/async-iterable.js @@ -0,0 +1,24 @@ +// @ts-check +'use strict' + +/** + * @template T + * @param {Iterable|AsyncIterable} iterable + * @returns {AsyncIterable} + */ +// eslint-disable-next-line require-await +const from = async function * AsyncIterableFrom (iterable) { + yield * iterable +} +exports.from = from + +/** + * @template T + * @param {...T} items + * @returns {AsyncIterable} + */ +// eslint-disable-next-line require-await +const of = async function * AsyncIterableOf (...items) { + yield * items +} +exports.of = of diff --git a/packages/ipfs-http-client/src/lib/form-data-encoder.js b/packages/ipfs-http-client/src/lib/form-data-encoder.js new file mode 100644 index 0000000000..54da02be74 --- /dev/null +++ b/packages/ipfs-http-client/src/lib/form-data-encoder.js @@ -0,0 +1,114 @@ +// @ts-check +'use strict' + +const { nanoid } = require('nanoid') +const { Blob } = require('ipfs-core-utils/src/files/blob') +const { from } = require('./async-iterable') + +class FormDataEncoder { +/** + * @param {Object} [options] + * @param {string} [options.boundary] + */ + constructor (options = {}) { + this.boundary = getBoundary(options) + this.type = `multipart/form-data; boundary=${this.boundary}` + } + + /** + * @param {AsyncIterable|Iterable} source + * @returns {AsyncIterable} + */ + async * encode (source) { + const { boundary } = this + for await (const part of from(source)) { + yield `--${boundary}\r\n` + yield * encodeHead(part) + yield '\r\n' + yield * encodeBody(part.content) + yield '\r\n' + } + + yield `--${boundary}--\r\n` + } +} +exports.FormDataEncoder = FormDataEncoder + +/** + * @param {void|Blob|AsyncIterable} content + * @returns {Iterable|AsyncIterable} + */ +function encodeBody (content) { + if (content == null) { + return [] + } else if (content instanceof Blob) { + return [content] + } else { + /** @type {AsyncIterable|AsyncIterable} */ + const chunks = (content) + return chunks + } +} + +/** + * @typedef {Object} Part + * @property {string} name + * @property {void|Blob|AsyncIterable} content + * @property {string} [filename] + * @property {Headers} [headers] + * + * @typedef {Record} Headers + * + */ + +/** + * @param {Part} part + * @returns {Iterable} + */ +function * encodeHead ({ name, content, filename, headers }) { + const file = filename || getFileName(content) + const contentDisposition = + file == null + ? `form-data; name="${name}"` + : `form-data; name="${name}"; filename="${encodeURIComponent(file)}"` + + yield `Content-Disposition: ${contentDisposition}\r\n` + + let hasContentType = false + if (headers) { + for (const [name, value] of Object.entries(headers)) { + // if content type is provided we do no want to derive + if (name === 'Content-Type' || name === 'content-type') { + hasContentType = true + } + + yield `${name}: ${value}\r\n` + } + } + + const contentType = !hasContentType ? getContentType(content) : null + if (contentType != null) { + yield `Content-Type: ${contentType}\r\n` + } + + // Otherwise jslint is unhappy. + return undefined +} + +/** + * @param {any} content + * @returns {string|null} + */ +const getFileName = (content) => + content.filepath || content.webkitRelativePath || content.name || null + +const getContentType = (content) => + content.type || null + +/** + * @param {Object} options + * @param {string} [options.boundary] + * @returns {string} + */ +const getBoundary = ({ boundary }) => + (boundary || `-----------------------------${nanoid()}`).toLowerCase() diff --git a/packages/ipfs-http-client/src/lib/mode-to-headers.js b/packages/ipfs-http-client/src/lib/mode-to-headers.js new file mode 100644 index 0000000000..7a75b88274 --- /dev/null +++ b/packages/ipfs-http-client/src/lib/mode-to-headers.js @@ -0,0 +1,23 @@ +// @ts-check +'use strict' + +const modeToString = require('./mode-to-string') + +/** + * @typedef {import('./mode-to-string').Mode} Mode + */ + +/** + * @param {Mode} mode + * @returns {void|{mode:string}} + */ +const modeToHeaders = (mode) => { + const value = modeToString(mode) + if (value != null) { + return { mode: value } + } else { + return undefined + } +} + +module.exports = modeToHeaders diff --git a/packages/ipfs-http-client/src/lib/mode-to-string.js b/packages/ipfs-http-client/src/lib/mode-to-string.js index ee2742b9a3..99a725561b 100644 --- a/packages/ipfs-http-client/src/lib/mode-to-string.js +++ b/packages/ipfs-http-client/src/lib/mode-to-string.js @@ -1,13 +1,26 @@ +// @ts-check 'use strict' +/** + * @typedef {import('ipfs-core-utils/src/files/normalise-input').Mode} Mode + */ + +/** + * @param {undefined|null|Mode} mode + * @returns {undefined|string} + */ module.exports = (mode) => { if (mode === undefined || mode === null) { return undefined } - if (typeof mode === 'string' || mode instanceof String) { + if (typeof mode === 'string') { return mode } + if (mode instanceof String) { + return mode.toString() + } + return mode.toString(8).padStart(4, '0') } diff --git a/packages/ipfs-http-client/src/lib/mtime-to-headers.js b/packages/ipfs-http-client/src/lib/mtime-to-headers.js new file mode 100644 index 0000000000..548132d18e --- /dev/null +++ b/packages/ipfs-http-client/src/lib/mtime-to-headers.js @@ -0,0 +1,31 @@ +// @ts-check +'use strict' + +const mtimeToObject = require('./mtime-to-object') + +/** + * @typedef {import('./mtime-to-object').MTime} MTime + * @typedef {Object} MTimeHeaders + * @property {number} [mtime] + * @property {number} [nsecs] + */ + +/** + * @param {null|undefined|MTime} mtime + * @returns {void|MTimeHeaders} + */ +const mtimeToHeaders = (mtime) => { + const data = mtimeToObject(mtime) + if (data) { + const { secs, nsecs } = data + const headers = { mtime: secs } + if (nsecs != null) { + headers['mtime-nsecs'] = nsecs + } + return headers + } else { + return undefined + } +} + +module.exports = mtimeToHeaders diff --git a/packages/ipfs-http-client/src/lib/mtime-to-object.js b/packages/ipfs-http-client/src/lib/mtime-to-object.js index be89148f64..5e621c39c6 100644 --- a/packages/ipfs-http-client/src/lib/mtime-to-object.js +++ b/packages/ipfs-http-client/src/lib/mtime-to-object.js @@ -1,5 +1,15 @@ +// @ts-check 'use strict' +/** + * @typedef {import('ipfs-core-utils/src/files/normalise-input').MTime} MTime + * @typedef {import('ipfs-core-utils/src/files/normalise-input').UnixFSTime} UnixFSTime + */ + +/** + * @param {null|undefined|MTime} mtime + * @returns {UnixFSTime} + */ module.exports = function parseMtime (mtime) { if (mtime == null) { return undefined @@ -17,15 +27,12 @@ module.exports = function parseMtime (mtime) { } // { secs, nsecs } - if (Object.prototype.hasOwnProperty.call(mtime, 'secs')) { - return { - secs: mtime.secs, - nsecs: mtime.nsecs - } + if ('secs' in mtime && typeof mtime.secs === 'number') { + return { secs: mtime.secs, nsecs: mtime.nsecs } } // UnixFS TimeSpec - if (Object.prototype.hasOwnProperty.call(mtime, 'Seconds')) { + if ('Seconds' in mtime && typeof mtime.Seconds === 'number') { return { secs: mtime.Seconds, nsecs: mtime.FractionalNanoseconds diff --git a/packages/ipfs-http-client/src/lib/multipart-request.js b/packages/ipfs-http-client/src/lib/multipart-request.js index eee4e26b1b..9c35904388 100644 --- a/packages/ipfs-http-client/src/lib/multipart-request.js +++ b/packages/ipfs-http-client/src/lib/multipart-request.js @@ -1,68 +1,87 @@ +// @ts-check 'use strict' const normaliseInput = require('ipfs-core-utils/src/files/normalise-input') -const toStream = require('./to-stream') -const { nanoid } = require('nanoid') -const modeToString = require('../lib/mode-to-string') -const mtimeToObject = require('../lib/mtime-to-object') +const toBody = require('./to-body') +const modeToHeaders = require('../lib/mode-to-headers') +const mtimeToHeaders = require('../lib/mtime-to-headers') const merge = require('merge-options').bind({ ignoreUndefined: true }) +const { FormDataEncoder } = require('./form-data-encoder') -async function multipartRequest (source = '', abortController, headers = {}, boundary = `-----------------------------${nanoid()}`) { - async function * streamFiles (source) { - try { - let index = 0 +/** + * @typedef {import('ipfs-core-utils/src/files/normalise-input').Input} Source + * @typedef {import('stream').Readable} NodeReadableStream + * @typedef {import('./form-data-encoder').Part} Part + * @typedef {import('./form-data-encoder').Headers} Headers + * + * @typedef {Object} MultipartRequest + * @property {Headers} headers + * @property {NodeReadableStream|Blob} body + */ - for await (const { content, path, mode, mtime } of normaliseInput(source)) { - let fileSuffix = '' - const type = content ? 'file' : 'dir' +/** + * @param {Source} source + * @param {AbortController} [abortController] + * @param {Headers} [headers] + * @param {string} [boundary] + * @returns {Promise} + */ +async function multipartRequest (source = '', abortController, headers = {}, boundary) { + const encoder = new FormDataEncoder({ boundary }) + const data = encoder.encode(toFormDataParts(source, abortController)) + // In node this will produce readable stream, in browser it will + // produce a blob instance. + const body = await toBody(data) - if (index > 0) { - yield '\r\n' - - fileSuffix = `-${index}` - } - - yield `--${boundary}\r\n` - yield `Content-Disposition: form-data; name="${type}${fileSuffix}"; filename="${encodeURIComponent(path)}"\r\n` - yield `Content-Type: ${content ? 'application/octet-stream' : 'application/x-directory'}\r\n` - - if (mode !== null && mode !== undefined) { - yield `mode: ${modeToString(mode)}\r\n` - } - - if (mtime != null) { - const { - secs, nsecs - } = mtimeToObject(mtime) - - yield `mtime: ${secs}\r\n` - - if (nsecs != null) { - yield `mtime-nsecs: ${nsecs}\r\n` - } - } - - yield '\r\n' + return { + headers: merge(headers, { + 'Content-Type': encoder.type + }), + body + } +} - if (content) { - yield * content - } +/** + * Takes `ipfs.add` input and turns it into async iterable of form-data parts + * that `FormDataEncoder` encode. + * @param {Source} source + * @param {AbortController} [abortController] + * @returns {AsyncIterable} + */ +async function * toFormDataParts (source, abortController) { + try { + let index = 0 + for await (const input of normaliseInput(source)) { + const { kind, path, mode, mtime } = input + const type = kind === 'directory' ? 'dir' : 'file' + const suffix = index > 0 ? `-${index}` : '' + const name = `${type}${suffix}` + const filename = path + const headers = { + 'Content-Type': type === 'file' ? 'application/octet-stream' : 'application/x-directory', + ...(mtime && mtimeToHeaders(mtime)), + ...(mode && modeToHeaders(mode)) + } + // If `input.kind` is a 'file' than input is an `ExtendedFile` instance + // and we do not want touch it's content because that would read the + // underlying blob in browser. Instead we pass it as is so that encoder + // can inline it. Otherwise it's either `Directory` that has not content + // or `FileStream` which can't be inlined so it will be read and inlined. + const content = input.kind === 'file' ? input : input.content - index++ + yield { + name, + content, + filename, + headers } - } catch (err) { - // workaround for https://github.com/node-fetch/node-fetch/issues/753 - abortController.abort(err) - } finally { - yield `\r\n--${boundary}--\r\n` - } - } - return { - headers: merge(headers, { - 'Content-Type': `multipart/form-data; boundary=${boundary}` - }), - body: await toStream(streamFiles(source)) + index++ + } + } catch (err) { + // workaround for https://github.com/node-fetch/node-fetch/issues/753 + // @ts-ignore - abort does not take an argument + abortController.abort(err) } } diff --git a/packages/ipfs-http-client/src/lib/to-body.browser.js b/packages/ipfs-http-client/src/lib/to-body.browser.js new file mode 100644 index 0000000000..2780eae59f --- /dev/null +++ b/packages/ipfs-http-client/src/lib/to-body.browser.js @@ -0,0 +1,23 @@ +// @ts-check +'use strict' +/* eslint-env browser */ + +// browsers can't stream. When the 'Send ReadableStream in request body' row +// is green here: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request#Browser_compatibility +// we'll be able to wrap the passed iterator in the it-to-browser-readablestream module +// in the meantime we create Blob out of all parts. + +/** + * Turns async iterable of the `BlobPart`s into an aggregate `Blob`, so it can + * be used as fetch request body. + * @param {AsyncIterable} source + * @returns {Promise} + */ +module.exports = async (source) => { + const parts = [] + for await (const chunk of source) { + parts.push(chunk) + } + + return new Blob(parts) +} diff --git a/packages/ipfs-http-client/src/lib/to-body.js b/packages/ipfs-http-client/src/lib/to-body.js new file mode 100644 index 0000000000..a6d63c5609 --- /dev/null +++ b/packages/ipfs-http-client/src/lib/to-body.js @@ -0,0 +1,36 @@ +// @ts-check +'use strict' + +const toStream = require('it-to-stream') +const { readBlob, Blob } = require('ipfs-core-utils/src/files/blob') + +/** + * Takes async iterable of blob parts and inlines all the blobs. Resulting async + * iterable no longer contains any blobs. Useful for turning this into node + * stream. + * @param {AsyncIterable} source + * @returns {AsyncIterable} + */ +const unpackBlobs = async function * UnpackBlob (source) { + for await (const chunk of source) { + if (chunk instanceof Blob) { + yield * readBlob(chunk) + } else { + yield chunk + } + } +} + +/** + * @typedef {import('stream').Readable} Readable + */ + +/** + * Takes async async iterable of `BlobParts` and returns value that can be + * used as a fetch request body. In node that would be a `Readable` stream + * in browser it will be a `Promise`. + * @param {AsyncIterable} source + * @returns {Readable|Promise} + */ +module.exports = (source) => + toStream.readable(unpackBlobs(source)) diff --git a/packages/ipfs-http-client/src/lib/to-stream.browser.js b/packages/ipfs-http-client/src/lib/to-stream.browser.js deleted file mode 100644 index 9f5784fedb..0000000000 --- a/packages/ipfs-http-client/src/lib/to-stream.browser.js +++ /dev/null @@ -1,22 +0,0 @@ -'use strict' - -// browsers can't stream. When the 'Send ReadableStream in request body' row -// is green here: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request#Browser_compatibility -// we'll be able to wrap the passed iterator in the it-to-browser-readablestream module -// in the meantime we have to convert the whole thing to a BufferSource of some sort -const toBuffer = require('it-to-buffer') -const { Buffer } = require('buffer') - -module.exports = (it) => { - async function * bufferise (source) { - for await (const chunk of source) { - if (Buffer.isBuffer(chunk)) { - yield chunk - } else { - yield Buffer.from(chunk) - } - } - } - - return toBuffer(bufferise(it)) -} diff --git a/packages/ipfs-http-client/src/lib/to-stream.js b/packages/ipfs-http-client/src/lib/to-stream.js deleted file mode 100644 index f0f59ffc50..0000000000 --- a/packages/ipfs-http-client/src/lib/to-stream.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -const toStream = require('it-to-stream') - -module.exports = (it) => { - return toStream.readable(it) -}