diff --git a/.travis.yml b/.travis.yml index 1b67b849fe..acc302d08f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,6 +8,7 @@ stages: node_js: - '10' + - '12' os: - linux diff --git a/package.json b/package.json index 01ba2d3790..1665c8a136 100644 --- a/package.json +++ b/package.json @@ -36,19 +36,22 @@ "debug": "^4.1.1", "file-type": "^8.0.0", "filesize": "^3.6.1", - "get-stream": "^3.0.0", - "ipfs-unixfs": "~0.1.16", + "it-buffer": "^0.1.1", + "it-concat": "^1.0.0", + "it-reader": "^2.1.0", + "it-to-stream": "^0.1.1", "mime-types": "^2.1.21", "multihashes": "~0.4.14", - "p-try-each": "^1.0.1", - "stream-to-blob": "^2.0.0" + "p-try-each": "^1.0.1" }, "devDependencies": { - "aegir": "^18.0.3", + "aegir": "^20.5.0", "chai": "^4.2.0", "dirty-chai": "^2.0.1", - "ipfs": "0.36.0", - "ipfsd-ctl": "~0.42.2" + "get-stream": "^3.0.0", + "ipfs": "github:ipfs/js-ipfs#refactor/async-await-roundup", + "ipfsd-ctl": "^1.0.2", + "it-all": "^1.0.1" }, "contributors": [ "Alex Potsides ", diff --git a/src/dir-view/index.js b/src/dir-view/index.js index 6ee0204a9a..aa42020813 100644 --- a/src/dir-view/index.js +++ b/src/dir-view/index.js @@ -17,7 +17,7 @@ function getParentHref (path) { function buildFilesList (path, links) { const rows = links.map((link) => { let row = [ - `
 
`, + '
 
', `${link.Name}`, filesize(link.Tsize) ] @@ -78,5 +78,4 @@ function render (path, links) { ` } -exports = module.exports exports.render = render diff --git a/src/index.js b/src/index.js index f0625ef83e..023ab2ca5c 100644 --- a/src/index.js +++ b/src/index.js @@ -1,59 +1,55 @@ -/* global Response */ +/* global Response, Blob */ 'use strict' -const stream = require('stream') -const toBlob = require('stream-to-blob') - -const debug = require('debug') -const log = debug('ipfs:http:response') +const toStream = require('it-to-stream') +const concat = require('it-concat') +const toBuffer = require('it-buffer') +const log = require('debug')('ipfs:http:response') const resolver = require('./resolver') const pathUtils = require('./utils/path') const detectContentType = require('./utils/content-type') // TODO: pass path and add Etag and X-Ipfs-Path + tests -const header = (status = 200, statusText = 'OK', headers = {}) => ({ +const getHeader = (status = 200, statusText = 'OK', headers = {}) => ({ status, statusText, headers }) -const response = async (ipfsNode, ipfsPath) => { - // handle hash resolve error (simple hash, test for directory now) - const handleResolveError = async (node, path, error) => { - if (error) { - const errorString = error.toString() - - if (errorString.includes('dag node is a directory')) { - try { - const content = await resolver.directory(node, path, error.cid) - // dir render - if (typeof content === 'string') { - return new Response(content, header(200, 'OK', { 'Content-Type': 'text/html' })) - } - - // redirect to dir entry point (index) - return Response.redirect(pathUtils.joinURLParts(path, content[0].Name)) - } catch (error) { - log(error) - return new Response(errorString, header(500, error.toString())) - } - } - - if (errorString.startsWith('Error: no link named')) { - return new Response(errorString, header(404, errorString)) - } +// handle hash resolve error (simple hash, test for directory now) +const handleResolveError = async (node, path, error) => { + const errorString = error.toString() - if (errorString.startsWith('Error: multihash length inconsistent') || errorString.startsWith('Error: Non-base58 character')) { - return new Response(errorString, header(400, errorString)) + if (errorString.includes('dag node is a directory')) { + try { + const content = await resolver.directory(node, path, error.cid) + // dir render + if (typeof content === 'string') { + return new Response(content, getHeader(200, 'OK', { 'Content-Type': 'text/html' })) } + // redirect to dir entry point (index) + return Response.redirect(pathUtils.joinURLParts(path, content[0].Name)) + } catch (error) { log(error) - return new Response(errorString, header(500, errorString)) + return new Response(errorString, getHeader(500, error.toString())) } } + if (errorString.startsWith('Error: no link named')) { + return new Response(errorString, getHeader(404, errorString)) + } + + if (errorString.startsWith('Error: multihash length inconsistent') || errorString.startsWith('Error: Non-base58 character')) { + return new Response(errorString, getHeader(400, errorString)) + } + + return new Response(errorString, getHeader(500, errorString)) +} + +const getResponse = async (ipfsNode, ipfsPath) => { // remove trailing slash for files if needed if (ipfsPath.endsWith('/')) { return Response.redirect(pathUtils.removeTrailingSlash(ipfsPath)) @@ -61,48 +57,26 @@ const response = async (ipfsNode, ipfsPath) => { try { const resolvedData = await resolver.cid(ipfsNode, ipfsPath) + const { source, contentType } = await detectContentType(ipfsPath, ipfsNode.cat(resolvedData.cid)) - const readableStream = ipfsNode.catReadableStream(resolvedData.cid) - const responseStream = new stream.PassThrough({ highWaterMark: 1 }) - readableStream.pipe(responseStream) - - return new Promise((resolve, reject) => { - readableStream.once('error', (error) => { - if (error) { - log(error) - return resolve(new Response(error.toString(), header(500, 'Error fetching the file'))) - } - }) - - // return only after first chunk being checked - let contentTypeDetected = false - readableStream.on('data', async (chunk) => { - // check mime on first chunk - if (contentTypeDetected) { - return - } - - contentTypeDetected = true - // return Response with mime type - const contentType = detectContentType(ipfsPath, chunk) - - if (typeof Blob === 'undefined') { - return contentType - ? resolve(new Response(responseStream, header(200, 'OK', { 'Content-Type': contentType }))) - : resolve(new Response(responseStream, header())) - } - - try { - const blob = await toBlob(responseStream) - - return contentType - ? resolve(new Response(blob, header(200, 'OK', { 'Content-Type': contentType }))) - : resolve(new Response(blob, header())) - } catch (err) { - return resolve(new Response(err.toString(), header(500, 'Error fetching the file'))) - } - }) - }) + if (typeof Blob === 'undefined') { + const responseStream = toStream.readable(toBuffer(source)) + + return contentType + ? new Response(responseStream, getHeader(200, 'OK', { 'Content-Type': contentType })) + : new Response(responseStream, getHeader()) + } + + try { + const data = await concat(source) + const blob = new Blob([data.slice()]) + + return contentType + ? new Response(blob, getHeader(200, 'OK', { 'Content-Type': contentType })) + : new Response(blob, getHeader()) + } catch (err) { + return new Response(err.toString(), getHeader(500, 'Error fetching the file')) + } } catch (error) { log(error) return handleResolveError(ipfsNode, ipfsPath, error) @@ -110,6 +84,6 @@ const response = async (ipfsNode, ipfsPath) => { } module.exports = { - getResponse: response, - resolver: resolver + getResponse, + resolver } diff --git a/src/resolver.js b/src/resolver.js index 1b93b86363..2796adc663 100644 --- a/src/resolver.js +++ b/src/resolver.js @@ -2,7 +2,6 @@ const pTryEach = require('p-try-each') const mh = require('multihashes') -const CID = require('cids') const debug = require('debug') const log = debug('jsipfs:http:response:resolver') log.error = debug('jsipfs:http:response:resolver:error') @@ -21,7 +20,7 @@ const findIndexFile = (ipfs, path) => { return { name: file, - cid: new CID(stats.hash) + cid: stats.cid } } })) @@ -48,18 +47,16 @@ const directory = async (ipfs, path, cid) => { const cid = async (ipfs, path) => { const stats = await ipfs.files.stat(path) - const cid = new CID(stats.hash) - if (stats.type.includes('directory')) { const err = new Error('This dag node is a directory') - err.cid = cid + err.cid = stats.cid err.fileName = stats.name err.dagDirType = stats.type throw err } - return { cid } + return { cid: stats.cid } } const multihash = async (ipfs, path) => { @@ -71,7 +68,7 @@ const multihash = async (ipfs, path) => { } module.exports = { - directory: directory, - cid: cid, - multihash: multihash + directory, + cid, + multihash } diff --git a/src/utils/content-type.js b/src/utils/content-type.js index f4074f3425..545965597c 100644 --- a/src/utils/content-type.js +++ b/src/utils/content-type.js @@ -2,20 +2,40 @@ const fileType = require('file-type') const mime = require('mime-types') +const Reader = require('it-reader') -const detectContentType = (path, chunk) => { +const detectContentType = async (path, source) => { let fileSignature // try to guess the filetype based on the first bytes // note that `file-type` doesn't support svgs, therefore we assume it's a svg if path looks like it if (!path.endsWith('.svg')) { - fileSignature = fileType(chunk) + try { + const reader = Reader(source) + const { value, done } = await reader.next(fileType.minimumBytes) + + if (done) return { source: reader } + + fileSignature = fileType(value.slice()) + + source = (async function * () { // eslint-disable-line require-await + yield value + yield * reader + })() + } catch (err) { + if (err.code !== 'ERR_UNDER_READ') throw err + + // not enough bytes for sniffing, just yield the data + source = (async function * () { // eslint-disable-line require-await + yield err.buffer // these are the bytes that were read (if any) + })() + } } // if we were unable to, fallback to the `path` which might contain the extension const mimeType = mime.lookup(fileSignature ? fileSignature.ext : path) - return mime.contentType(mimeType) + return { source, contentType: mime.contentType(mimeType) } } module.exports = detectContentType diff --git a/src/utils/path.js b/src/utils/path.js index bad148e56a..6b3ea90b26 100644 --- a/src/utils/path.js +++ b/src/utils/path.js @@ -43,7 +43,7 @@ function removeSlashFromBothEnds (url) { function joinURLParts (...urls) { urls = urls.filter((url) => url.length > 0) - urls = [ '' ].concat(urls.map((url) => removeSlashFromBothEnds(url))) + urls = [''].concat(urls.map((url) => removeSlashFromBothEnds(url))) return urls.join('/') } diff --git a/test/index.spec.js b/test/index.spec.js index 8b20bd9340..0d270d488b 100644 --- a/test/index.spec.js +++ b/test/index.spec.js @@ -7,15 +7,22 @@ const expect = chai.expect chai.use(dirtyChai) const loadFixture = require('aegir/fixtures') -const ipfs = require('ipfs') -const DaemonFactory = require('ipfsd-ctl') +const Ctl = require('ipfsd-ctl') const getStream = require('get-stream') const CID = require('cids') +const all = require('it-all') const { getResponse } = require('../src') const makeWebResponseEnv = require('./utils/web-response-env') -const df = DaemonFactory.create({ type: 'proc', exec: ipfs }) +const factory = Ctl.createFactory({ + test: true, + type: 'proc', + ipfsModule: { + ref: require('ipfs'), + path: require.resolve('ipfs') + } +}) describe('resolve file (CIDv0)', function () { let ipfs = null @@ -26,26 +33,20 @@ describe('resolve file (CIDv0)', function () { data: loadFixture('test/fixtures/testfile.txt') } - before(function (done) { + before(async function () { this.timeout(20 * 1000) Object.assign(global, makeWebResponseEnv()) - df.spawn({ initOptions: { bits: 512 } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsd.api + ipfsd = await factory.spawn() + ipfs = ipfsd.api - ipfs.add(file.data, { cidVersion: 0 }, (err, filesAdded) => { - expect(err).to.not.exist() - expect(filesAdded).to.have.length(1) + const filesAdded = await all(ipfs.add(file.data, { cidVersion: 0 })) - const retrievedFile = filesAdded[0] - expect(new CID(retrievedFile.hash)).to.deep.equal(new CID(file.cid)) - expect(retrievedFile.size, 'ipfs.add result size should not be smaller than input buffer').greaterThan(file.data.length) + expect(filesAdded).to.have.length(1) - done() - }) - }) + const retrievedFile = filesAdded[0] + expect(retrievedFile.cid).to.deep.equal(new CID(file.cid)) + expect(retrievedFile.size, 'ipfs.add result size should not be smaller than input buffer').greaterThan(file.data.length) }) it('should resolve a CIDv0', async () => { @@ -70,25 +71,20 @@ describe('resolve file (CIDv1)', function () { data: loadFixture('test/fixtures/testfile.txt') } - before(function (done) { + before(async function () { this.timeout(20 * 1000) Object.assign(global, makeWebResponseEnv()) - df.spawn({ initOptions: { bits: 512 } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsd.api + ipfsd = await factory.spawn() + ipfs = ipfsd.api - ipfs.add(file.data, { cidVersion: 1 }, (err, filesAdded) => { - expect(err).to.not.exist() - expect(filesAdded).to.have.length(1) - const retrievedFile = filesAdded[0] - expect(new CID(retrievedFile.hash)).to.deep.equal(new CID(file.cid)) - // expect(retrievedFile.size, 'ipfs.add result size should not be smaller than input buffer').greaterThan(file.data.length) + const filesAdded = await all(ipfs.add(file.data, { cidVersion: 1 })) - done() - }) - }) + expect(filesAdded).to.have.length(1) + + const retrievedFile = filesAdded[0] + expect(retrievedFile.cid).to.deep.equal(new CID(file.cid)) + // expect(retrievedFile.size, 'ipfs.add result size should not be smaller than input buffer').greaterThan(file.data.length) }) it('should resolve a CIDv1', async () => { @@ -116,38 +112,31 @@ describe('resolve directory (CIDv0)', function () { } } - before(function (done) { + before(async function () { this.timeout(20 * 1000) Object.assign(global, makeWebResponseEnv()) - df.spawn({ initOptions: { bits: 512 } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsd.api - - const content = (name) => ({ - path: `test-folder/${name}`, - content: directory.files[name] - }) + ipfsd = await factory.spawn() + ipfs = ipfsd.api - const dirs = [ - content('pp.txt'), - content('holmes.txt') - ] + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) - ipfs.add(dirs, { cidVersion: 0 }, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] + const dirs = [ + content('pp.txt'), + content('holmes.txt') + ] - expect(root.path).to.equal('test-folder') - expect(new CID(root.hash)).to.deep.equal(new CID(directory.cid)) + const res = await all(ipfs.add(dirs, { cidVersion: 0 })) + const root = res[res.length - 1] - expect(res[0].size, 'ipfs.add 1st result size should not be smaller than 1st input buffer').greaterThan(dirs[0].content.length) - expect(res[1].size, 'ipfs.add 2nd result size should not be smaller than 2nd input buffer').greaterThan(dirs[1].content.length) + expect(root.path).to.equal('test-folder') + expect(root.cid).to.deep.equal(new CID(directory.cid)) - done() - }) - }) + expect(res[0].size, 'ipfs.add 1st result size should not be smaller than 1st input buffer').greaterThan(dirs[0].content.length) + expect(res[1].size, 'ipfs.add 2nd result size should not be smaller than 2nd input buffer').greaterThan(dirs[1].content.length) }) it('should return the list of files of a directory', async () => { @@ -188,35 +177,29 @@ describe('resolve directory (CIDv1)', function () { } } - before(function (done) { + before(async function () { this.timeout(20 * 1000) Object.assign(global, makeWebResponseEnv()) - df.spawn({ initOptions: { bits: 512 } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsd.api - - const content = (name) => ({ - path: `test-folder/${name}`, - content: directory.files[name] - }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt') - ] - - ipfs.add(dirs, { cidVersion: 1 }, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - expect(root.path).to.equal('test-folder') - // expect(res[0].size, 'ipfs.files.add 1st result size should not be smaller than 1st input buffer').greaterThan(dirs[0].content.length) - // expect(res[1].size, 'ipfs.files.add 2nd result size should not be smaller than 2nd input buffer').greaterThan(dirs[1].content.length) - expect(new CID(root.hash)).to.deep.equal(new CID(directory.cid)) - done() - }) + ipfsd = await factory.spawn() + ipfs = ipfsd.api + + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt') + ] + + const res = await all(ipfs.add(dirs, { cidVersion: 1 })) + const root = res[res.length - 1] + expect(root.path).to.equal('test-folder') + // expect(res[0].size, 'ipfs.files.add 1st result size should not be smaller than 1st input buffer').greaterThan(dirs[0].content.length) + // expect(res[1].size, 'ipfs.files.add 2nd result size should not be smaller than 2nd input buffer').greaterThan(dirs[1].content.length) + expect(root.cid).to.deep.equal(new CID(directory.cid)) }) it('should return the list of files of a directory', async () => { @@ -258,35 +241,29 @@ describe('resolve web page (CIDv0)', function () { } } - before(function (done) { + before(async function () { this.timeout(20 * 1000) Object.assign(global, makeWebResponseEnv()) - df.spawn({ initOptions: { bits: 512 } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsd.api - - const content = (name) => ({ - path: `test-site/${name}`, - content: webpage.files[name] - }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('index.html') - ] - - ipfs.add(dirs, { cidVersion: 0 }, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - - expect(root.path).to.equal('test-site') - expect(new CID(root.hash)).to.deep.equal(new CID(webpage.cid)) - done() - }) + ipfsd = await factory.spawn() + ipfs = ipfsd.api + + const content = (name) => ({ + path: `test-site/${name}`, + content: webpage.files[name] }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('index.html') + ] + + const res = await all(ipfs.add(dirs, { cidVersion: 0 })) + const root = res[res.length - 1] + + expect(root.path).to.equal('test-site') + expect(root.cid).to.deep.equal(new CID(webpage.cid)) }) it('should return the entry point of a web page when a trying to fetch a directory containing a web page', async () => { @@ -310,34 +287,29 @@ describe('resolve web page (CIDv1)', function () { } } - before(function (done) { + before(async function () { this.timeout(20 * 1000) Object.assign(global, makeWebResponseEnv()) - df.spawn({ initOptions: { bits: 512 } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsd.api - - const content = (name) => ({ - path: `test-site/${name}`, - content: webpage.files[name] - }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('index.html') - ] - - ipfs.add(dirs, { cidVersion: 1 }, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - expect(root.path).to.equal('test-site') - expect(new CID(root.hash)).to.deep.equal(new CID(webpage.cid)) - done() - }) + ipfsd = await factory.spawn() + ipfs = ipfsd.api + + const content = (name) => ({ + path: `test-site/${name}`, + content: webpage.files[name] }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('index.html') + ] + + const res = await all(ipfs.add(dirs, { cidVersion: 1 })) + const root = res[res.length - 1] + + expect(root.path).to.equal('test-site') + expect(root.cid).to.deep.equal(new CID(webpage.cid)) }) it('should return the entry point of a web page when a trying to fetch a directory containing a web page', async () => { @@ -364,37 +336,31 @@ describe('mime-types', () => { } } - before(function (done) { + before(async function () { this.timeout(20 * 1000) Object.assign(global, makeWebResponseEnv()) - df.spawn({ initOptions: { bits: 512 } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsd.api - - const content = (name) => ({ - path: `test-mime-types/${name}`, - content: webpage.files[name] - }) - - const dirs = [ - content('cat.jpg'), - content('hexagons-xml.svg'), - content('hexagons.svg'), - content('pp.txt'), - content('index.html') - ] - - ipfs.add(dirs, { cidVersion: 0 }, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - - expect(root.path).to.equal('test-mime-types') - expect(new CID(root.hash)).to.deep.equal(new CID(webpage.cid)) - done() - }) + ipfsd = await factory.spawn() + ipfs = ipfsd.api + + const content = (name) => ({ + path: `test-mime-types/${name}`, + content: webpage.files[name] }) + + const dirs = [ + content('cat.jpg'), + content('hexagons-xml.svg'), + content('hexagons.svg'), + content('pp.txt'), + content('index.html') + ] + + const res = await all(ipfs.add(dirs, { cidVersion: 0 })) + const root = res[res.length - 1] + + expect(root.path).to.equal('test-mime-types') + expect(root.cid).to.deep.equal(new CID(webpage.cid)) }) it('should return the correct mime-type for pp.txt', async () => { diff --git a/test/resolver.spec.js b/test/resolver.spec.js index 6246e3aa3a..96220cca82 100644 --- a/test/resolver.spec.js +++ b/test/resolver.spec.js @@ -7,14 +7,21 @@ const expect = chai.expect chai.use(dirtyChai) const loadFixture = require('aegir/fixtures') -const ipfs = require('ipfs') -const DaemonFactory = require('ipfsd-ctl') +const Ctl = require('ipfsd-ctl') const CID = require('cids') const mh = require('multihashes') +const all = require('it-all') const ipfsResolver = require('../src/resolver') -const df = DaemonFactory.create({ type: 'proc', exec: ipfs }) +const factory = Ctl.createFactory({ + test: true, + type: 'proc', + ipfsModule: { + ref: require('ipfs'), + path: require.resolve('ipfs') + } +}) describe('resolve file (CIDv0)', function () { let ipfs = null @@ -25,23 +32,17 @@ describe('resolve file (CIDv0)', function () { data: loadFixture('test/fixtures/testfile.txt') } - before(function (done) { + before(async function () { this.timeout(20 * 1000) - df.spawn({ initOptions: { bits: 512 } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsd.api - ipfs.add(file.data, { cidVersion: 0 }, (err, filesAdded) => { - expect(err).to.not.exist() - expect(filesAdded).to.have.length(1) + ipfsd = await factory.spawn() + ipfs = ipfsd.api - const retrievedFile = filesAdded[0] - expect(new CID(retrievedFile.hash)).to.deep.equal(new CID(file.cid)) + const filesAdded = await all(ipfs.add(file.data, { cidVersion: 0 })) + expect(filesAdded).to.have.length(1) - done() - }) - }) + const retrievedFile = filesAdded[0] + expect(retrievedFile.cid).to.deep.equal(new CID(file.cid)) }) it('should resolve a multihash', async () => { @@ -74,23 +75,18 @@ describe('resolve file (CIDv1)', function () { data: loadFixture('test/fixtures/testfile.txt') } - before(function (done) { + before(async function () { this.timeout(20 * 1000) - df.spawn({ initOptions: { bits: 512 } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsd.api - - ipfs.add(file.data, { cidVersion: 1 }, (err, filesAdded) => { - expect(err).to.not.exist() - expect(filesAdded).to.have.length(1) - // console.log('ipfs.files.add result', filesAdded) - const retrievedFile = filesAdded[0] - expect(new CID(retrievedFile.hash)).to.deep.equal(new CID(file.cid)) - // expect(retrievedFile.size, 'ipfs.files.add result size should not be smaller than input buffer').greaterThan(file.data.length) - done() - }) - }) + + ipfsd = await factory.spawn() + ipfs = ipfsd.api + + const filesAdded = await all(ipfs.add(file.data, { cidVersion: 1 })) + expect(filesAdded).to.have.length(1) + // console.log('ipfs.files.add result', filesAdded) + const retrievedFile = filesAdded[0] + expect(retrievedFile.cid).to.deep.equal(new CID(file.cid)) + // expect(retrievedFile.size, 'ipfs.files.add result size should not be smaller than input buffer').greaterThan(file.data.length) }) it('should resolve a multihash', async () => { @@ -126,32 +122,27 @@ describe('resolve directory (CIDv0)', function () { } } - before(function (done) { + before(async function () { this.timeout(20 * 1000) - df.spawn({ initOptions: { bits: 512 } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsd.api - - const content = (name) => ({ - path: `test-folder/${name}`, - content: directory.files[name] - }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt') - ] - - ipfs.add(dirs, { cidVersion: 0 }, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - - expect(root.path).to.equal('test-folder') - expect(new CID(root.hash)).to.deep.equal(new CID(directory.cid)) - done() - }) + + ipfsd = await factory.spawn() + ipfs = ipfsd.api + + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt') + ] + + const res = await all(ipfs.add(dirs, { cidVersion: 0 })) + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.cid).to.deep.equal(new CID(directory.cid)) }) it('should throw an error when trying to fetch a directory', async () => { @@ -184,34 +175,29 @@ describe('resolve directory (CIDv1)', function () { } } - before(function (done) { + before(async function () { this.timeout(20 * 1000) - df.spawn({ initOptions: { bits: 512 } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsd.api - - const content = (name) => ({ - path: `test-folder/${name}`, - content: directory.files[name] - }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt') - ] - - ipfs.add(dirs, { cidVersion: 1 }, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - // console.log('ipfs.files.add result', res) - expect(root.path).to.equal('test-folder') - // expect(res[0].size, 'ipfs.files.add 1st result size should not be smaller than 1st input buffer').greaterThan(dirs[0].content.length) - // expect(res[1].size, 'ipfs.files.add 2nd result size should not be smaller than 2nd input buffer').greaterThan(dirs[1].content.length) - expect(new CID(root.hash)).to.deep.equal(new CID(directory.cid)) - done() - }) + + ipfsd = await factory.spawn() + ipfs = ipfsd.api + + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt') + ] + + const res = await all(ipfs.add(dirs, { cidVersion: 1 })) + const root = res[res.length - 1] + // console.log('ipfs.files.add result', res) + expect(root.path).to.equal('test-folder') + // expect(res[0].size, 'ipfs.files.add 1st result size should not be smaller than 1st input buffer').greaterThan(dirs[0].content.length) + // expect(res[1].size, 'ipfs.files.add 2nd result size should not be smaller than 2nd input buffer').greaterThan(dirs[1].content.length) + expect(root.cid).to.deep.equal(new CID(directory.cid)) }) it('should throw an error when trying to fetch a directory', async () => { @@ -246,33 +232,28 @@ describe('resolve web page (CIDv0)', function () { } } - before(function (done) { + before(async function () { this.timeout(20 * 1000) - df.spawn({ initOptions: { bits: 512 } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsd.api - - const content = (name) => ({ - path: `test-site/${name}`, - content: webpage.files[name] - }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('index.html') - ] - - ipfs.add(dirs, { cidVersion: 0 }, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - - expect(root.path).to.equal('test-site') - expect(new CID(root.hash)).to.deep.equal(new CID(webpage.cid)) - done() - }) + + ipfsd = await factory.spawn() + ipfs = ipfsd.api + + const content = (name) => ({ + path: `test-site/${name}`, + content: webpage.files[name] }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('index.html') + ] + + const res = await all(ipfs.add(dirs, { cidVersion: 0 })) + const root = res[res.length - 1] + + expect(root.path).to.equal('test-site') + expect(root.cid).to.deep.equal(new CID(webpage.cid)) }) it('should throw an error when trying to fetch a directory containing a web page', async () => { @@ -308,33 +289,28 @@ describe('resolve web page (CIDv1)', function () { } } - before(function (done) { + before(async function () { this.timeout(20 * 1000) - df.spawn({ initOptions: { bits: 512 } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsd.api - - const content = (name) => ({ - path: `test-site/${name}`, - content: webpage.files[name] - }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('index.html') - ] - - ipfs.add(dirs, { cidVersion: 1 }, (err, res) => { - expect(err).to.not.exist() - // console.log(res) - const root = res[res.length - 1] - expect(root.path).to.equal('test-site') - expect(new CID(root.hash)).to.deep.equal(new CID(webpage.cid)) - done() - }) + + ipfsd = await factory.spawn() + ipfs = ipfsd.api + + const content = (name) => ({ + path: `test-site/${name}`, + content: webpage.files[name] }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('index.html') + ] + + const res = await all(ipfs.add(dirs, { cidVersion: 1 })) + // console.log(res) + const root = res[res.length - 1] + expect(root.path).to.equal('test-site') + expect(root.cid).to.deep.equal(new CID(webpage.cid)) }) it('should throw an error when trying to fetch a directory containing a web page', async () => {