@@ -12,8 +12,6 @@ IPFS unixFS Engine
1212![ ] ( https://img.shields.io/badge/npm-%3E%3D3.0.0-orange.svg?style=flat-square )
1313![ ] ( https://img.shields.io/badge/Node.js-%3E%3D4.0.0-orange.svg?style=flat-square )
1414
15- [ ![ Sauce Test Status] ( https://saucelabs.com/browser-matrix/ipfs-unixfs-engine.svg )] ( https://saucelabs.com/u/ipfs-unixfs-engine )
16-
1715> JavaScript implementation of the layout and chunking mechanisms used by IPFS
1816
1917## Table of Contents
@@ -29,20 +27,10 @@ IPFS unixFS Engine
2927- [ Contribute] ( #contribute )
3028- [ License] ( #license )
3129
32- ## BEWARE BEWARE BEWARE there might be 🐉
33-
34- This module has passed through several iterations and still is far from a nice and easy understandable codebase. Currently missing features:
35-
36- - [ ] tar importer
37- - [x] trickle dag exporter
38- - [ ] sharding (HAMT)
39-
4030## Install
4131
42- With [ npm] ( https://npmjs.org/ ) installed, run
43-
4432```
45- $ npm install ipfs-unixfs-engine
33+ > npm install ipfs-unixfs-engine
4634```
4735
4836## Usage
@@ -51,56 +39,35 @@ $ npm install ipfs-unixfs-engine
5139
5240Let's create a little directory to import:
5341``` sh
54- $ cd /tmp
55- $ mkdir foo
56- $ echo ' hello' > foo/bar
57- $ echo ' world' > foo/quux
42+ > cd /tmp
43+ > mkdir foo
44+ > echo ' hello' > foo/bar
45+ > echo ' world' > foo/quux
5846```
5947
6048And write the importing logic:
6149``` js
62- // Dependencies to create a DAG Service (where the dir will be imported into)
63- const memStore = require (' abstract-blob-store' )
64- const Repo = require (' ipfs-repo' )
65- const Block = require (' ipfs-block' )
66- const BlockService = require (' ipfs-block-service' )
67- const MerkleDag = require (' ipfs-merkle-dag' )
68- const fs = require (' fs' )
69-
70- const repo = new Repo (' ' , { stores: memStore })
71- const blockService = new BlockService (repo)
72- const dagService = new ipfsMerkleDag.DAGService (blocks)
73-
74-
7550const Importer = require (' ipfs-unixfs-engine' ).Importer
76- const filesAddStream = new Importer (dagService )
51+ const filesAddStream = new Importer (< dag or ipld - resolver instance )
7752
7853// An array to hold the return of nested file/dir info from the importer
7954// A root DAG Node is received upon completion
8055
8156const res = []
8257
8358// Import path /tmp/foo/bar
84-
8559const rs = fs .createReadStream (file)
8660const rs2 = fs .createReadStream (file2)
87- const input = {path: / tmp/ foo/ bar, content: rs}
88- const input2 = {path: / tmp/ foo/ quxx, content: rs2}
61+ const input = { path: / tmp/ foo/ bar, content: rs }
62+ const input2 = { path: / tmp/ foo/ quxx, content: rs2 }
8963
9064// Listen for the data event from the importer stream
91-
92- filesAddStream .on (' data' , (info ) => {
93- res .push (info)
94- })
65+ filesAddStream .on (' data' , (info ) => res .push (info))
9566
9667// The end event of the stream signals that the importer is done
97-
98- filesAddStream .on (' end' , () => {
99- console .log (' Finished filesAddStreaming files!' )
100- })
68+ filesAddStream .on (' end' , () => console .log (' Finished filesAddStreaming files!' ))
10169
10270// Calling write on the importer to filesAddStream the file/object tuples
103-
10471filesAddStream .write (input)
10572filesAddStream .write (input2)
10673filesAddStream .end ()
@@ -129,7 +96,7 @@ When run, the stat of DAG Node is outputted for each file on data event until th
12996### Importer API
13097
13198``` js
132- const Importer = require (' ipfs-unixfs-engine' ).importer
99+ const Importer = require (' ipfs-unixfs-engine' ).Importer
133100```
134101
135102#### const add = new Importer(dag)
@@ -173,24 +140,11 @@ In the second argument of the importer constructor you can specify the following
173140### Example Exporter
174141
175142```
176- const Repo = require('ipfs-repo')
177- const Block = require('ipfs-block')
178- const BlockService = require('ipfs-block-service')
179- const MerkleDAG = require('ipfs-merkle-dag')
180-
181- const repo = new Repo('', { stores: memStore })
182- const blockService = new BlockService(repo)
183- const dagService = new MerkleDag.DAGService(blockService)
184-
185143// Create an export readable object stream with the hash you want to export and a dag service
186-
187- const filesStream = Exporter(<multihash>, dag)
144+ const filesStream = Exporter(<multihash>, <dag or ipld-resolver instance>)
188145
189146// Pipe the return stream to console
190-
191- filesStream.on('data', (file) => {
192- file.content.pipe(process.stdout)
193- }
147+ filesStream.on('data', (file) => file.content.pipe(process.stdout))
194148```
195149
196150### Exporter: API
@@ -199,9 +153,9 @@ filesStream.on('data', (file) => {
199153const Exporter = require (' ipfs-unixfs-engine' ).Exporter
200154```
201155
202- ### new Exporter(hash, dagService )
156+ ### new Exporter(< hash >, < dag or ipld-resolver > )
203157
204- Uses the given [ DAG Service ] [ ] to fetch an IPFS [ UnixFS] [ ] object(s) by their multiaddress.
158+ Uses the given [ dag API or an ipld-resolver instance ] [ ] to fetch an IPFS [ UnixFS] [ ] object(s) by their multiaddress.
205159
206160Creates a new readable stream in object mode that outputs objects of the form
207161
@@ -215,7 +169,7 @@ Creates a new readable stream in object mode that outputs objects of the form
215169Errors are received as with a normal stream, by listening on the ` 'error' ` event to be emitted.
216170
217171
218- [ DAG Service ] : https://github.com/vijayee /js-ipfs-merkle-dag/
172+ [ IPLD Resolver ] : https://github.com/ipld /js-ipld-resolver
219173[ UnixFS ] : https://github.com/ipfs/specs/tree/master/unixfs
220174
221175## Contribute
0 commit comments