Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Commit

Permalink
refactor: use new IPFS async/await APIs (#30)
Browse files Browse the repository at this point in the history
BREAKING CHANGE: Switch to using async/await and async iterators.
  • Loading branch information
Alan Shaw authored and vasco-santos committed Jan 7, 2020
1 parent 8828822 commit 68f1204
Show file tree
Hide file tree
Showing 9 changed files with 324 additions and 388 deletions.
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ stages:

node_js:
- '10'
- '12'

os:
- linux
Expand Down
17 changes: 10 additions & 7 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,19 +36,22 @@
"debug": "^4.1.1",
"file-type": "^8.0.0",
"filesize": "^3.6.1",
"get-stream": "^3.0.0",
"ipfs-unixfs": "~0.1.16",
"it-buffer": "^0.1.1",
"it-concat": "^1.0.0",
"it-reader": "^2.1.0",
"it-to-stream": "^0.1.1",
"mime-types": "^2.1.21",
"multihashes": "~0.4.14",
"p-try-each": "^1.0.1",
"stream-to-blob": "^2.0.0"
"p-try-each": "^1.0.1"
},
"devDependencies": {
"aegir": "^18.0.3",
"aegir": "^20.5.0",
"chai": "^4.2.0",
"dirty-chai": "^2.0.1",
"ipfs": "0.36.0",
"ipfsd-ctl": "~0.42.2"
"get-stream": "^3.0.0",
"ipfs": "github:ipfs/js-ipfs#refactor/async-await-roundup",
"ipfsd-ctl": "^1.0.2",
"it-all": "^1.0.1"
},
"contributors": [
"Alex Potsides <[email protected]>",
Expand Down
3 changes: 1 addition & 2 deletions src/dir-view/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ function getParentHref (path) {
function buildFilesList (path, links) {
const rows = links.map((link) => {
let row = [
`<div class="ipfs-icon ipfs-_blank">&nbsp;</div>`,
'<div class="ipfs-icon ipfs-_blank">&nbsp;</div>',
`<a href="${path}${path.endsWith('/') ? '' : '/'}${link.Name}">${link.Name}</a>`,
filesize(link.Tsize)
]
Expand Down Expand Up @@ -78,5 +78,4 @@ function render (path, links) {
`
}

exports = module.exports
exports.render = render
130 changes: 52 additions & 78 deletions src/index.js
Original file line number Diff line number Diff line change
@@ -1,115 +1,89 @@
/* global Response */
/* global Response, Blob */

'use strict'

const stream = require('stream')
const toBlob = require('stream-to-blob')

const debug = require('debug')
const log = debug('ipfs:http:response')
const toStream = require('it-to-stream')
const concat = require('it-concat')
const toBuffer = require('it-buffer')
const log = require('debug')('ipfs:http:response')

const resolver = require('./resolver')
const pathUtils = require('./utils/path')
const detectContentType = require('./utils/content-type')

// TODO: pass path and add Etag and X-Ipfs-Path + tests
const header = (status = 200, statusText = 'OK', headers = {}) => ({
const getHeader = (status = 200, statusText = 'OK', headers = {}) => ({
status,
statusText,
headers
})

const response = async (ipfsNode, ipfsPath) => {
// handle hash resolve error (simple hash, test for directory now)
const handleResolveError = async (node, path, error) => {
if (error) {
const errorString = error.toString()

if (errorString.includes('dag node is a directory')) {
try {
const content = await resolver.directory(node, path, error.cid)
// dir render
if (typeof content === 'string') {
return new Response(content, header(200, 'OK', { 'Content-Type': 'text/html' }))
}

// redirect to dir entry point (index)
return Response.redirect(pathUtils.joinURLParts(path, content[0].Name))
} catch (error) {
log(error)
return new Response(errorString, header(500, error.toString()))
}
}

if (errorString.startsWith('Error: no link named')) {
return new Response(errorString, header(404, errorString))
}
// handle hash resolve error (simple hash, test for directory now)
const handleResolveError = async (node, path, error) => {
const errorString = error.toString()

if (errorString.startsWith('Error: multihash length inconsistent') || errorString.startsWith('Error: Non-base58 character')) {
return new Response(errorString, header(400, errorString))
if (errorString.includes('dag node is a directory')) {
try {
const content = await resolver.directory(node, path, error.cid)
// dir render
if (typeof content === 'string') {
return new Response(content, getHeader(200, 'OK', { 'Content-Type': 'text/html' }))
}

// redirect to dir entry point (index)
return Response.redirect(pathUtils.joinURLParts(path, content[0].Name))
} catch (error) {
log(error)
return new Response(errorString, header(500, errorString))
return new Response(errorString, getHeader(500, error.toString()))
}
}

if (errorString.startsWith('Error: no link named')) {
return new Response(errorString, getHeader(404, errorString))
}

if (errorString.startsWith('Error: multihash length inconsistent') || errorString.startsWith('Error: Non-base58 character')) {
return new Response(errorString, getHeader(400, errorString))
}

return new Response(errorString, getHeader(500, errorString))
}

const getResponse = async (ipfsNode, ipfsPath) => {
// remove trailing slash for files if needed
if (ipfsPath.endsWith('/')) {
return Response.redirect(pathUtils.removeTrailingSlash(ipfsPath))
}

try {
const resolvedData = await resolver.cid(ipfsNode, ipfsPath)
const { source, contentType } = await detectContentType(ipfsPath, ipfsNode.cat(resolvedData.cid))

const readableStream = ipfsNode.catReadableStream(resolvedData.cid)
const responseStream = new stream.PassThrough({ highWaterMark: 1 })
readableStream.pipe(responseStream)

return new Promise((resolve, reject) => {
readableStream.once('error', (error) => {
if (error) {
log(error)
return resolve(new Response(error.toString(), header(500, 'Error fetching the file')))
}
})

// return only after first chunk being checked
let contentTypeDetected = false
readableStream.on('data', async (chunk) => {
// check mime on first chunk
if (contentTypeDetected) {
return
}

contentTypeDetected = true
// return Response with mime type
const contentType = detectContentType(ipfsPath, chunk)

if (typeof Blob === 'undefined') {
return contentType
? resolve(new Response(responseStream, header(200, 'OK', { 'Content-Type': contentType })))
: resolve(new Response(responseStream, header()))
}

try {
const blob = await toBlob(responseStream)

return contentType
? resolve(new Response(blob, header(200, 'OK', { 'Content-Type': contentType })))
: resolve(new Response(blob, header()))
} catch (err) {
return resolve(new Response(err.toString(), header(500, 'Error fetching the file')))
}
})
})
if (typeof Blob === 'undefined') {
const responseStream = toStream.readable(toBuffer(source))

return contentType
? new Response(responseStream, getHeader(200, 'OK', { 'Content-Type': contentType }))
: new Response(responseStream, getHeader())
}

try {
const data = await concat(source)
const blob = new Blob([data.slice()])

return contentType
? new Response(blob, getHeader(200, 'OK', { 'Content-Type': contentType }))
: new Response(blob, getHeader())
} catch (err) {
return new Response(err.toString(), getHeader(500, 'Error fetching the file'))
}
} catch (error) {
log(error)
return handleResolveError(ipfsNode, ipfsPath, error)
}
}

module.exports = {
getResponse: response,
resolver: resolver
getResponse,
resolver
}
15 changes: 6 additions & 9 deletions src/resolver.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

const pTryEach = require('p-try-each')
const mh = require('multihashes')
const CID = require('cids')
const debug = require('debug')
const log = debug('jsipfs:http:response:resolver')
log.error = debug('jsipfs:http:response:resolver:error')
Expand All @@ -21,7 +20,7 @@ const findIndexFile = (ipfs, path) => {

return {
name: file,
cid: new CID(stats.hash)
cid: stats.cid
}
}
}))
Expand All @@ -48,18 +47,16 @@ const directory = async (ipfs, path, cid) => {
const cid = async (ipfs, path) => {
const stats = await ipfs.files.stat(path)

const cid = new CID(stats.hash)

if (stats.type.includes('directory')) {
const err = new Error('This dag node is a directory')
err.cid = cid
err.cid = stats.cid
err.fileName = stats.name
err.dagDirType = stats.type

throw err
}

return { cid }
return { cid: stats.cid }
}

const multihash = async (ipfs, path) => {
Expand All @@ -71,7 +68,7 @@ const multihash = async (ipfs, path) => {
}

module.exports = {
directory: directory,
cid: cid,
multihash: multihash
directory,
cid,
multihash
}
26 changes: 23 additions & 3 deletions src/utils/content-type.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,40 @@

const fileType = require('file-type')
const mime = require('mime-types')
const Reader = require('it-reader')

const detectContentType = (path, chunk) => {
const detectContentType = async (path, source) => {
let fileSignature

// try to guess the filetype based on the first bytes
// note that `file-type` doesn't support svgs, therefore we assume it's a svg if path looks like it
if (!path.endsWith('.svg')) {
fileSignature = fileType(chunk)
try {
const reader = Reader(source)
const { value, done } = await reader.next(fileType.minimumBytes)

if (done) return { source: reader }

fileSignature = fileType(value.slice())

source = (async function * () { // eslint-disable-line require-await
yield value
yield * reader
})()
} catch (err) {
if (err.code !== 'ERR_UNDER_READ') throw err

// not enough bytes for sniffing, just yield the data
source = (async function * () { // eslint-disable-line require-await
yield err.buffer // these are the bytes that were read (if any)
})()
}
}

// if we were unable to, fallback to the `path` which might contain the extension
const mimeType = mime.lookup(fileSignature ? fileSignature.ext : path)

return mime.contentType(mimeType)
return { source, contentType: mime.contentType(mimeType) }
}

module.exports = detectContentType
2 changes: 1 addition & 1 deletion src/utils/path.js
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ function removeSlashFromBothEnds (url) {

function joinURLParts (...urls) {
urls = urls.filter((url) => url.length > 0)
urls = [ '' ].concat(urls.map((url) => removeSlashFromBothEnds(url)))
urls = [''].concat(urls.map((url) => removeSlashFromBothEnds(url)))

return urls.join('/')
}
Expand Down
Loading

0 comments on commit 68f1204

Please sign in to comment.