Skip to content
This repository was archived by the owner on Mar 10, 2020. It is now read-only.

refactor: migrate to hapi 18 #36

Merged
merged 3 commits into from
Jan 31, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,8 @@
"interface-datastore": "~0.6.0",
"ipfs-multipart": "~0.1.0",
"ipfs-unixfs": "~0.1.16",
"ipfs-unixfs-importer": "~0.38.0",
"ipfs-unixfs-exporter": "~0.35.5",
"ipfs-unixfs-importer": "~0.38.0",
"ipld-dag-pb": "~0.15.0",
"is-pull-stream": "~0.0.0",
"is-stream": "^1.1.0",
Expand Down
86 changes: 39 additions & 47 deletions src/http/cp.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,57 +2,49 @@

const Joi = require('joi')

const mfsCp = (api) => {
api.route({
method: 'POST',
path: '/api/v0/files/cp',
config: {
handler: (request, reply) => {
const {
ipfs
} = request.server.app
const {
arg,
parents,
format,
hashAlg,
shardSplitThreshold
} = request.query
const mfsCp = {
method: 'POST',
path: '/api/v0/files/cp',
async handler (request, h) {
const {
ipfs
} = request.server.app
const {
arg,
parents,
format,
hashAlg,
shardSplitThreshold
} = request.query

const args = arg.concat({
parents,
format,
hashAlg,
shardSplitThreshold
})
const args = arg.concat({
parents,
format,
hashAlg,
shardSplitThreshold
})

return ipfs.files.cp.apply(null, args)
.then(() => reply())
.catch(error => {
reply({
Message: error.message,
Code: error.code || 0,
Type: 'error'
}).code(500).takeover()
})
await ipfs.files.cp.apply(null, args)

return h.response()
},
options: {
validate: {
options: {
allowUnknown: true,
stripUnknown: true
},
validate: {
options: {
allowUnknown: true,
stripUnknown: true
},
query: Joi.object().keys({
arg: Joi.array().items(Joi.string()).min(2),
parents: Joi.boolean().default(false),
format: Joi.string().valid([
'dag-pb',
'dag-cbor'
]).default('dag-pb'),
hashAlg: Joi.string().default('sha2-256')
})
}
query: Joi.object().keys({
arg: Joi.array().items(Joi.string()).min(2),
parents: Joi.boolean().default(false),
format: Joi.string().valid([
'dag-pb',
'dag-cbor'
]).default('dag-pb'),
hashAlg: Joi.string().default('sha2-256')
})
}
})
}
}

module.exports = mfsCp
54 changes: 23 additions & 31 deletions src/http/flush.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,40 +2,32 @@

const Joi = require('joi')

const mfsFlush = (api) => {
api.route({
method: 'POST',
path: '/api/v0/files/flush',
config: {
handler: (request, reply) => {
const {
ipfs
} = request.server.app
const {
arg
} = request.query
const mfsFlush = {
method: 'POST',
path: '/api/v0/files/flush',
async handler (request, h) {
const {
ipfs
} = request.server.app
const {
arg
} = request.query

return ipfs.files.flush.call(null, arg)
.then(() => reply())
.catch(error => {
reply({
Message: error.message,
Code: error.code || 0,
Type: 'error'
}).code(500).takeover()
})
await ipfs.files.flush.call(null, arg)

return h.response()
},
options: {
validate: {
options: {
allowUnknown: true,
stripUnknown: true
},
validate: {
options: {
allowUnknown: true,
stripUnknown: true
},
query: Joi.object().keys({
arg: Joi.string().required()
})
}
query: Joi.object().keys({
arg: Joi.string().required()
})
}
})
}
}

module.exports = mfsFlush
22 changes: 11 additions & 11 deletions src/http/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,14 @@ const rm = require('./rm')
const stat = require('./stat')
const write = require('./write')

module.exports = (api) => {
cp(api)
flush(api)
ls(api)
mkdir(api)
mv(api)
read(api)
rm(api)
stat(api)
write(api)
}
module.exports = [
cp,
flush,
ls,
mkdir,
mv,
read,
rm,
stat,
write
]
148 changes: 64 additions & 84 deletions src/http/ls.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,103 +14,83 @@ const mapEntry = (entry) => {
}
}

const mfsLs = (api) => {
api.route({
method: 'POST',
path: '/api/v0/files/ls',
config: {
handler: (request, reply) => {
const {
ipfs
} = request.server.app
const {
arg,
const mfsLs = {
method: 'POST',
path: '/api/v0/files/ls',
async handler (request, h) {
const {
ipfs
} = request.server.app
const {
arg,
long,
cidBase,
stream
} = request.query

if (stream) {
const responseStream = await new Promise((resolve, reject) => {
const readableStream = ipfs.files.lsReadableStream(arg, {
long,
cidBase,
stream
} = request.query
cidBase
})

if (stream) {
const readableStream = ipfs.files.lsReadableStream(arg, {
long,
cidBase
})
let passThrough

if (!readableStream._read) {
// make the stream look like a Streams2 to appease Hapi
readableStream._read = () => {}
readableStream._readableState = {}
readableStream.on('data', (entry) => {
if (!passThrough) {
passThrough = new PassThrough()
resolve(passThrough)
}

let passThrough

readableStream.on('data', (entry) => {
if (!passThrough) {
passThrough = new PassThrough()

reply(passThrough)
.header('X-Stream-Output', '1')
}
passThrough.write(JSON.stringify(mapEntry(entry)) + '\n')
})

passThrough.write(JSON.stringify(mapEntry(entry)) + '\n')
})
readableStream.once('end', (entry) => {
if (passThrough) {
passThrough.end(entry ? JSON.stringify(mapEntry(entry)) + '\n' : undefined)
}
})

readableStream.once('end', (entry) => {
if (passThrough) {
passThrough.end(entry ? JSON.stringify(mapEntry(entry)) + '\n' : undefined)
}
})
readableStream.once('error', (error) => {
reject(error)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What would happen here if an error occurred during writing (so we'd already resolved the promise)?

Copy link
Author

@alanshaw alanshaw Jan 31, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

A promise can only be resolved/rejected once, and first one wins, so nothing really. Hapi would have already started streaming to the client. At that point we've handed over responsibility of responding (by passing the stream to hapi) and my expectation would be for either Hapi or Node.js to handle an error in the stream and close the connection...

Previously we'd have called reply again, which definitely wouldn't have worked as the HTTP headers would have already been sent and possibly some data as well.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should probably at least log the error then as presumably it would get swallowed.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Happy to do this later though.

})
})

readableStream.once('error', (error) => {
reply({
Message: error.message,
Code: error.code || 0,
Type: 'error'
}).code(500).takeover()
})
return h.response(responseStream).header('X-Stream-Output', '1')
}

return
}
const files = await ipfs.files.ls(arg, {
long,
cidBase
})

return ipfs.files.ls(arg, {
long,
cidBase
})
.then(files => {
reply({
Entries: files.map(mapEntry)
})
})
.catch(error => {
reply({
Message: error.message,
Code: error.code || 0,
Type: 'error'
}).code(500).takeover()
})
return h.response({
Entries: files.map(mapEntry)
})
},
options: {
validate: {
options: {
allowUnknown: true,
stripUnknown: true
},
validate: {
options: {
allowUnknown: true,
stripUnknown: true
},
query: Joi.object().keys({
arg: Joi.string().default('/'),
long: Joi.boolean().default(false),
cidBase: Joi.string().default('base58btc'),
stream: Joi.boolean().default(false)
query: Joi.object().keys({
arg: Joi.string().default('/'),
long: Joi.boolean().default(false),
cidBase: Joi.string().default('base58btc'),
stream: Joi.boolean().default(false)
})
.rename('l', 'long', {
override: true,
ignoreUndefined: true
})
.rename('s', 'stream', {
override: true,
ignoreUndefined: true
})
.rename('l', 'long', {
override: true,
ignoreUndefined: true
})
.rename('s', 'stream', {
override: true,
ignoreUndefined: true
})
}
}
})
}
}

module.exports = mfsLs
Loading