1
1
'use strict'
2
2
3
3
const Command = require ( 'ronin' ) . Command
4
- const IPFS = require ( '../../../core ' )
4
+ const utils = require ( '../../utils ' )
5
5
const debug = require ( 'debug' )
6
6
const log = debug ( 'cli:version' )
7
7
log . error = debug ( 'cli:version:error' )
8
8
const bs58 = require ( 'bs58' )
9
+ const streamifier = require ( 'streamifier' )
10
+ const fs = require ( 'fs' )
11
+ const async = require ( 'async' )
12
+ const pathj = require ( 'path' )
13
+
14
+ function addStream ( pair ) {
15
+ utils . getIPFS ( ( err , ipfs ) => {
16
+ if ( err ) {
17
+ throw err
18
+ }
19
+ if ( utils . isDaemonOn ( ) ) {
20
+ throw new Error ( 'daemon running is not supported yet' )
21
+ /*return ipfs.add(pair.stream, (err, res) => {
22
+ if (err) {
23
+ log.error(err)
24
+ throw err
25
+ }
26
+ console.log('added', res[0].Hash)
27
+ })*/
28
+ }
29
+ console . log ( pair . path )
30
+ ipfs . files . add ( pair , ( err , res ) => {
31
+ if ( err ) {
32
+ throw err
33
+ }
34
+ res . on ( 'file' , ( file ) => {
35
+ console . log ( 'added' , bs58 . encode ( file . multihash ) . toString ( ) , file . path )
36
+ } )
37
+ res . finish ( )
38
+ } )
39
+ } )
40
+ }
41
+
42
+
43
+ function addDir ( path ) {
44
+ const files = fs . readdirSync ( path )
45
+ //console.log(path)
46
+ async . forEachSeries ( files , ( res , callback ) => {
47
+ var nestedPath = pathj . join ( path , res )
48
+ const l = process . cwd ( ) . length
49
+ const filepath = nestedPath . substring ( l + 1 , nestedPath . length )
50
+ //console.log(filepath)
51
+ const stat = fs . statSync ( nestedPath )
52
+ if ( stat . isFile ( ) ) {
53
+ const buffered = fs . readFileSync ( nestedPath )
54
+ const r = streamifier . createReadStream ( buffered )
55
+ const filePair = { path : filepath , stream : r }
56
+ addStream ( filePair )
57
+ }
58
+ if ( stat . isDirectory ( ) ) {
59
+ addDir ( nestedPath )
60
+ }
61
+ callback ( )
62
+ } , ( err ) => {
63
+ if ( err ) {
64
+ throw err
65
+ }
66
+ console . log ( 'done' )
67
+ return
68
+ } )
69
+ }
70
+
71
+ function readPath ( recursive , path ) {
72
+ console . log ( utils . isDaemonOn ( ) )
73
+ //console.log(path)
74
+ const stats = fs . statSync ( path )
75
+ if ( stats . isFile ( ) ) {
76
+ const buffered = fs . readFileSync ( path )
77
+ const r = streamifier . createReadStream ( buffered )
78
+ path = path . substring ( path . lastIndexOf ( '/' ) + 1 , path . length )
79
+ const filePair = { path : path , stream : r }
80
+ addStream ( filePair )
81
+ } else if ( stats . isDirectory ( ) && recursive ) {
82
+ addDir ( path )
83
+ }
84
+ }
9
85
10
86
module . exports = Command . extend ( {
11
87
desc : 'Add a file to IPFS using the UnixFS data format' ,
@@ -19,19 +95,35 @@ module.exports = Command.extend({
19
95
} ,
20
96
21
97
run : ( recursive , path ) => {
22
- var node = new IPFS ( )
23
- if ( path . charAt ( 0 ) !== '/' ) {
24
- path = process . cwd ( ) + '/' + path
98
+ if ( ! path ) {
99
+ throw new Error ( 'Error: Argument \'path\' is required' )
25
100
}
26
- node . files . add ( path , {
27
- recursive : recursive
28
- } , ( err , stats ) => {
29
- if ( err ) {
30
- return console . log ( err )
31
- }
32
- if ( stats ) {
33
- console . log ( 'added' , bs58 . encode ( stats . Hash ) . toString ( ) , stats . Name )
34
- }
35
- } )
101
+ if ( path === '.' && recursive === false ) {
102
+ console . log ( 'Error: ' + path + ' is a directory, use the \'-r\' flag to specify directories' )
103
+ } else if ( path === '.' && recursive === true ) {
104
+ path = process . cwd ( )
105
+ }
106
+ readPath ( recursive , path )
107
+
108
+ // console.log(utils.isDaemonOn())
109
+ // utils.getIPFS((err, ipfs) => {
110
+ // if (err) {
111
+ // throw err
112
+ // }
113
+ // //console.log(ipfs)
114
+ // if (path.charAt(0) !== '/') {
115
+ // path = process.cwd() + '/' + path
116
+ // }
117
+ // ipfs.files.add(path, {
118
+ // recursive: recursive
119
+ // }, (err, stats) => {
120
+ // if (err) {
121
+ // return console.log(err)
122
+ // }
123
+ // if (stats) {
124
+ // console.log('added', bs58.encode(stats.Hash).toString(), stats.Name)
125
+ // }
126
+ // })
127
+ // })
36
128
}
37
129
} )
0 commit comments