|
2 | 2 | 'use strict'
|
3 | 3 |
|
4 | 4 | const unixFSEngine = require('./../src')
|
5 |
| -const importer = unixFSEngine.importer |
6 | 5 | const exporter = unixFSEngine.exporter
|
7 | 6 | const BlockService = require('ipfs-blocks').BlockService
|
8 | 7 | const DAGService = require('ipfs-merkle-dag').DAGService
|
9 |
| -const DAGNode = require('ipfs-merkle-dag').DAGNode |
10 | 8 | const UnixFS = require('ipfs-unixfs')
|
11 |
| -const streamifier = require('streamifier') |
12 | 9 |
|
13 | 10 | const expect = require('chai').expect
|
14 | 11 |
|
15 |
| -const smallBuf = require('buffer!./test-data/200Bytes.txt') |
16 |
| -const bigBuf = require('buffer!./test-data/1.2MiB.txt') |
17 |
| -const bigBlock = require('buffer!./test-data/1.2MiB.txt.block') |
18 |
| -const bigLink = require('buffer!./test-data/1.2MiB.txt.link-block0') |
19 |
| -const marbuf = require('buffer!./test-data/200Bytes.txt.block') |
| 12 | +// const smallBuf = require('buffer!./test-data/200Bytes.txt') |
| 13 | +// const bigBuf = require('buffer!./test-data/1.2MiB.txt') |
| 14 | +// const bigBlock = require('buffer!./test-data/1.2MiB.txt.block') |
| 15 | +// const bigLink = require('buffer!./test-data/1.2MiB.txt.link-block0') |
| 16 | +// const marbuf = require('buffer!./test-data/200Bytes.txt.block') |
20 | 17 |
|
21 | 18 | module.exports = function (repo) {
|
22 |
| - describe('layout: importer', function () { |
23 |
| - it('import a small buffer', function (done) { |
24 |
| - // this is just like "import a small file" |
25 |
| - const r = streamifier.createReadStream(smallBuf) |
26 |
| - const i = new Importer(ds) |
27 |
| - i.on('file', (file) => { |
28 |
| - expect(file.path).to.equal('200Bytes.txt') |
29 |
| - expect(bs58.encode(file.multihash)).to.equal('QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8') |
30 |
| - expect(file.size).to.equal(211) |
31 |
| - done() |
32 |
| - }) |
33 |
| - i.add({path: '200Bytes.txt', stream: r}) |
34 |
| - i.finish() |
35 |
| - }) |
36 |
| - |
37 |
| - /*it('import a big buffer', function (done) { |
38 |
| - // this is just like "import a big file" |
39 |
| - const buf = bigBuf |
40 |
| - const bs = new BlockService(repo) |
41 |
| - const ds = new DAGService(bs) |
42 |
| - importer.import(buf, ds, function (err, stat) { |
| 19 | + it('export a file with no links', (done) => { |
| 20 | + const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8' |
| 21 | + const bs = new BlockService(repo) |
| 22 | + const ds = new DAGService(bs) |
| 23 | + const testExport = exporter(hash, ds) |
| 24 | + testExport.on('file', (data) => { |
| 25 | + ds.get(hash, (err, fetchedNode) => { |
43 | 26 | expect(err).to.not.exist
|
44 |
| - ds.get(stat.Hash, function (err, node) { |
45 |
| - expect(err).to.not.exist |
46 |
| - const bigDAGNode = new DAGNode() |
47 |
| - bigDAGNode.unMarshal(bigBlock) |
48 |
| - expect(node.size()).to.equal(bigDAGNode.size()) |
49 |
| - expect(node.links).to.deep.equal(bigDAGNode.links) |
50 |
| -
|
51 |
| - const nodeUnixFS = UnixFS.unmarshal(node.data) |
52 |
| - const bigDAGNodeUnixFS = UnixFS.unmarshal(bigDAGNode.data) |
53 |
| - expect(nodeUnixFS.type).to.equal(bigDAGNodeUnixFS.type) |
54 |
| - expect(nodeUnixFS.data).to.deep.equal(bigDAGNodeUnixFS.data) |
55 |
| - expect(nodeUnixFS.blockSizes).to.deep.equal(bigDAGNodeUnixFS.blockSizes) |
56 |
| - expect(nodeUnixFS.fileSize()).to.equal(bigDAGNodeUnixFS.fileSize()) |
57 |
| -
|
58 |
| - expect(node.data).to.deep.equal(bigDAGNode.data) |
59 |
| - expect(node.multihash()).to.deep.equal(bigDAGNode.multihash()) |
60 |
| -
|
61 |
| - ds.get(node.links[0].hash, function (err, node) { |
62 |
| - expect(err).to.not.exist |
63 |
| - const leaf = new DAGNode() |
64 |
| -
|
65 |
| - const marbuf2 = bigLink |
66 |
| - leaf.unMarshal(marbuf2) |
67 |
| - expect(node.links).to.deep.equal(leaf.links) |
68 |
| - expect(node.links.length).to.equal(0) |
69 |
| - expect(leaf.links.length).to.equal(0) |
70 |
| - expect(leaf.marshal()).to.deep.equal(marbuf2) |
71 |
| - const nodeUnixFS = UnixFS.unmarshal(node.data) |
72 |
| - const leafUnixFS = UnixFS.unmarshal(leaf.data) |
73 |
| - expect(nodeUnixFS.type).to.equal(leafUnixFS.type) |
74 |
| - expect(nodeUnixFS.fileSize()).to.equal(leafUnixFS.fileSize()) |
75 |
| - expect(nodeUnixFS.data).to.deep.equal(leafUnixFS.data) |
76 |
| - expect(nodeUnixFS.blockSizes).to.deep.equal(leafUnixFS.blockSizes) |
77 |
| - expect(node.data).to.deep.equal(leaf.data) |
78 |
| - expect(node.marshal()).to.deep.equal(leaf.marshal()) |
79 |
| - done() |
80 |
| - }) |
81 |
| - }) |
82 |
| - }) |
83 |
| - })*/ |
84 |
| - |
85 |
| - it('export a file with no links', (done) => { |
86 |
| - const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8' |
87 |
| - const bs = new BlockService(repo) |
88 |
| - const ds = new DAGService(bs) |
89 |
| - const testExport = exporter(hash, ds) |
90 |
| - testExport.on('file', (data) => { |
91 |
| - ds.get(hash, (err, fetchedNode) => { |
92 |
| - expect(err).to.not.exist |
93 |
| - const unmarsh = UnixFS.unmarshal(fetchedNode.data) |
94 |
| - expect(unmarsh.data).to.deep.equal(data.stream._readableState.buffer[0]) |
95 |
| - done() |
96 |
| - }) |
| 27 | + const unmarsh = UnixFS.unmarshal(fetchedNode.data) |
| 28 | + expect(unmarsh.data).to.deep.equal(data.stream._readableState.buffer[0]) |
| 29 | + done() |
97 | 30 | })
|
98 | 31 | })
|
| 32 | + }) |
99 | 33 |
|
100 |
| - it('export a small file with links', (done) => { |
101 |
| - const hash = 'QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q' |
102 |
| - const bs = new BlockService(repo) |
103 |
| - const ds = new DAGService(bs) |
104 |
| - const testExport = exporter(hash, ds) |
105 |
| - testExport.on('file', (data) => { |
106 |
| - expect(data.stream).to.exist |
107 |
| - done() |
108 |
| - }) |
| 34 | + it('export a small file with links', (done) => { |
| 35 | + const hash = 'QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q' |
| 36 | + const bs = new BlockService(repo) |
| 37 | + const ds = new DAGService(bs) |
| 38 | + const testExport = exporter(hash, ds) |
| 39 | + testExport.on('file', (data) => { |
| 40 | + expect(data.stream).to.exist |
| 41 | + done() |
109 | 42 | })
|
| 43 | + }) |
110 | 44 |
|
111 |
| - it('export a large file > 5mb', (done) => { |
112 |
| - const hash = 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE' |
113 |
| - const bs = new BlockService(repo) |
114 |
| - const ds = new DAGService(bs) |
115 |
| - const testExport = exporter(hash, ds) |
116 |
| - testExport.on('file', (data) => { |
117 |
| - expect(data.stream).to.exist |
118 |
| - done() |
119 |
| - }) |
| 45 | + it('export a large file > 5mb', (done) => { |
| 46 | + const hash = 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE' |
| 47 | + const bs = new BlockService(repo) |
| 48 | + const ds = new DAGService(bs) |
| 49 | + const testExport = exporter(hash, ds) |
| 50 | + testExport.on('file', (data) => { |
| 51 | + expect(data.stream).to.exist |
| 52 | + done() |
120 | 53 | })
|
| 54 | + }) |
121 | 55 |
|
122 |
| - it('export a directory', (done) => { |
123 |
| - const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN' |
124 |
| - const bs = new BlockService(repo) |
125 |
| - const ds = new DAGService(bs) |
126 |
| - const testExport = exporter(hash, ds) |
127 |
| - var fs = [] |
128 |
| - testExport.on('file', (data) => { |
129 |
| - fs.push(data) |
130 |
| - }) |
131 |
| - setTimeout(() => { |
132 |
| - expect(fs[0].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/200Bytes.txt') |
133 |
| - expect(fs[1].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/dir-another') |
134 |
| - expect(fs[2].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt') |
135 |
| - expect(fs[3].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/level-2') |
136 |
| - done() |
137 |
| - }, 1000) |
| 56 | + it('export a directory', (done) => { |
| 57 | + const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN' |
| 58 | + const bs = new BlockService(repo) |
| 59 | + const ds = new DAGService(bs) |
| 60 | + const testExport = exporter(hash, ds) |
| 61 | + var fs = [] |
| 62 | + testExport.on('file', (data) => { |
| 63 | + fs.push(data) |
138 | 64 | })
|
| 65 | + setTimeout(() => { |
| 66 | + expect(fs[0].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/200Bytes.txt') |
| 67 | + expect(fs[1].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/dir-another') |
| 68 | + expect(fs[2].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt') |
| 69 | + expect(fs[3].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/level-2') |
| 70 | + done() |
| 71 | + }, 1000) |
139 | 72 | })
|
140 | 73 | }
|
0 commit comments