diff --git a/package-lock.json b/package-lock.json index d2def85d..97b2e17f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13443,6 +13443,12 @@ "integrity": "sha1-z/yvcC2vZeo5u04PorKZzsGhvkY=", "dev": true }, + "spy": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/spy/-/spy-1.0.0.tgz", + "integrity": "sha1-Kqoirx2GlrMNBZbbHRJ4HJGs9q0=", + "dev": true + }, "sshpk": { "version": "1.16.0", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.0.tgz", @@ -15214,7 +15220,7 @@ }, "hash.js": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.0.3.tgz", + "resolved": "http://registry.npmjs.org/hash.js/-/hash.js-1.0.3.tgz", "integrity": "sha1-EzL/ABVsCg/92CNgE9B7d6BFFXM=", "dev": true, "requires": { @@ -15231,7 +15237,7 @@ }, "lodash": { "version": "3.10.1", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz", + "resolved": "http://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz", "integrity": "sha1-W/Rejkm6QYnhfUgnid/RW9FAt7Y=", "dev": true } diff --git a/package.json b/package.json index fd9ebb83..bbc70415 100644 --- a/package.json +++ b/package.json @@ -42,6 +42,7 @@ "mocha-headless-chrome": "~2.0.1", "orbit-db-keystore": "github:orbitdb/orbit-db-keystore", "rimraf": "~2.6.1", + "spy": "~1.0.0", "standard": "~12.0.1", "webpack": "~4.28.0", "webpack-cli": "~3.1.2" diff --git a/src/entry.js b/src/entry.js index 57983441..d00c5de9 100644 --- a/src/entry.js +++ b/src/entry.js @@ -48,7 +48,8 @@ class Entry { entry.identity = identity.toJSON() entry.sig = signature entry.cid = await Entry.toCID(ipfs, entry) - return entry + + return Entry.ensureInterop(entry) } /** @@ -129,7 +130,7 @@ class Entry { */ static async toMultihash (ipfs, entry) { if (!ipfs) throw IpfsNotDefinedError() - if (!Entry.isEntry(entry)) throw new Error('Invalid object format, cannot generate entry multihash') + if (!Entry.isEntry(entry)) throw new Error('Invalid object format, cannot generate entry CID') // Ensure `entry` follows the correct format const e = { @@ -165,7 +166,7 @@ class Entry { const e = await dagNode.read(ipfs, cid, IPLD_LINKS) let entry = { - cid, + [getCidProp(e)]: cid, id: e.id, payload: e.payload, next: e.next, @@ -177,7 +178,7 @@ class Entry { if (e.identity) Object.assign(entry, { identity: e.identity }) if (e.sig) Object.assign(entry, { sig: e.sig }) - return entry + return Entry.ensureInterop(entry) } /** @@ -192,9 +193,6 @@ class Entry { * @deprecated */ static async fromMultihash (ipfs, multihash) { - if (!ipfs) throw IpfsNotDefinedError() - if (!multihash) throw new Error(`Invalid multihash: ${multihash}`) - return Entry.fromCID(ipfs, multihash) } @@ -212,6 +210,32 @@ class Entry { obj.clock !== undefined } + /** + * Ensures that this entry is interoperable between earlier versions + * and the most recent one (and vice-versa). + * @param {Entry} entry The entry to ensure interoperability + * @return {Entry} entry The same entry but with backwards and forward interoperability + */ + static ensureInterop (entry) { + if (entry.cid && entry.hash) { + return entry + } + + const prop = getCidProp(entry) + const accessorProp = prop === 'hash' ? 'cid' : 'hash' + + Object.defineProperty(entry, accessorProp, { + get () { + return this[prop] + }, + set (value) { + this[prop] = value + } + }) + + return entry + } + /** * Compares two entries. * @param {Entry} a diff --git a/src/log-io.js b/src/log-io.js index 44d3353d..79f1a5d3 100644 --- a/src/log-io.js +++ b/src/log-io.js @@ -11,32 +11,32 @@ const last = (arr, n) => arr.slice(arr.length - n, arr.length) class LogIO { /** - * Get the multihash of a Log. + * Get the CID of a Log. * @param {IPFS} ipfs An IPFS instance - * @param {Log} log Log to get a multihash for + * @param {Log} log Log to get a CID for * @returns {Promise} - * @deprecated */ - static async toMultihash (ipfs, log) { + static async toCID (ipfs, log) { if (!isDefined(ipfs)) throw LogError.IPFSNotDefinedError() if (!isDefined(log)) throw LogError.LogNotDefinedError() if (log.values.length < 1) throw new Error(`Can't serialize an empty log`) - return dagNode.write(ipfs, 'dag-pb', log.toJSON(), IPLD_LINKS) + return dagNode.write(ipfs, 'dag-cbor', log.toJSON(), IPLD_LINKS) } /** - * Get the CID of a Log. + * Get the multihash of a Log. * @param {IPFS} ipfs An IPFS instance - * @param {Log} log Log to get a CID for + * @param {Log} log Log to get a multihash for * @returns {Promise} + * @deprecated */ - static async toCID (ipfs, log) { + static async toMultihash (ipfs, log) { if (!isDefined(ipfs)) throw LogError.IPFSNotDefinedError() if (!isDefined(log)) throw LogError.LogNotDefinedError() if (log.values.length < 1) throw new Error(`Can't serialize an empty log`) - return dagNode.write(ipfs, 'dag-cbor', log.toJSON(), IPLD_LINKS) + return dagNode.write(ipfs, 'dag-pb', log.toJSON(), IPLD_LINKS) } /** @@ -76,19 +76,16 @@ class LogIO { } /** - * Create a log from a multihash. - * @param {IPFS} ipfs An IPFS instance - * @param {string} multihash Multihash (as a Base58 encoded string) to create the Log from - * @param {number} [length=-1] How many items to include in the log - * @param {Array} [exclude] Entries to not fetch (cached) - * @param {function(cid, entry, parent, depth)} onProgressCallback - * @returns {Promise} - * @deprecated - */ + * Create a log from a multihash. + * @param {IPFS} ipfs An IPFS instance + * @param {string} multihash Multihash (as a Base58 encoded string) to create the Log from + * @param {number} [length=-1] How many items to include in the log + * @param {Array} [exclude] Entries to not fetch (cached) + * @param {function(cid, entry, parent, depth)} onProgressCallback + * @returns {Promise} + * @deprecated + */ static async fromMultihash (ipfs, multihash, length = -1, exclude, onProgressCallback) { - if (!isDefined(ipfs)) throw LogError.IPFSNotDefinedError() - if (!isDefined(multihash)) throw new Error(`Invalid multihash: ${multihash}`) - return LogIO.fromCID(ipfs, multihash, length, exclude, onProgressCallback) } @@ -113,6 +110,7 @@ class LogIO { static async fromJSON (ipfs, json, length = -1, timeout, onProgressCallback) { if (!isDefined(ipfs)) throw LogError.IPFSNotDefinedError() + json.heads.forEach(Entry.ensureInterop) const headCids = json.heads.map(e => e.cid) const entries = await EntryIO.fetchParallel(ipfs, headCids, length, [], 16, timeout, onProgressCallback) const finalEntries = entries.slice().sort(Entry.compare) @@ -144,6 +142,7 @@ class LogIO { if (!Array.isArray(sourceEntries)) { sourceEntries = [sourceEntries] } + sourceEntries.forEach(Entry.ensureInterop) // Fetch given length, return size at least the given input entries length = length > -1 ? Math.max(length, sourceEntries.length) : length diff --git a/src/log.js b/src/log.js index 4750593b..8538c6ac 100644 --- a/src/log.js +++ b/src/log.js @@ -415,6 +415,14 @@ class Log extends GSet { log._entryIndex !== undefined } + /** + * Get the log's CID. + * @returns {Promise} The Log CID + */ + toCID () { + return LogIO.toCID(this._storage, this) + } + /** * Get the log's multihash. * @returns {Promise} Multihash of the Log as Base58 encoded string @@ -425,11 +433,21 @@ class Log extends GSet { } /** - * Get the log's CID. - * @returns {Promise} The Log CID + * Create a log from a CID. + * @param {IPFS} ipfs An IPFS instance + * @param {AccessController} access The access controller instance + * @param {Identity} identity The identity instance + * @param {string} cid The log CID + * @param {number} [length=-1] How many items to include in the log + * @param {Array} [exclude] Entries to not fetch (cached) + * @param {function(cid, entry, parent, depth)} onProgressCallback + * @returns {Promise} + * @deprecated */ - toCID () { - return LogIO.toCID(this._storage, this) + static async fromCID (ipfs, access, identity, cid, length = -1, exclude, onProgressCallback) { + // TODO: need to verify the entries with 'key' + const data = await LogIO.fromCID(ipfs, cid, length, exclude, onProgressCallback) + return new Log(ipfs, access, identity, data.id, data.values, data.heads, data.clock) } /** @@ -445,44 +463,40 @@ class Log extends GSet { * @deprecated */ static async fromMultihash (ipfs, access, identity, multihash, length = -1, exclude, onProgressCallback) { - // TODO: need to verify the entries with 'key' - const data = await LogIO.fromMultihash(ipfs, multihash, length, exclude, onProgressCallback) - return new Log(ipfs, access, identity, data.id, data.values, data.heads, data.clock) + return Log.fromCID(ipfs, access, identity, multihash, length, exclude, onProgressCallback) } /** - * Create a log from a CID. + * Create a log from a single entry's CID. * @param {IPFS} ipfs An IPFS instance * @param {AccessController} access The access controller instance * @param {Identity} identity The identity instance - * @param {string} cid The log CID - * @param {number} [length=-1] How many items to include in the log - * @param {Array} [exclude] Entries to not fetch (cached) + * @param {string} cid The entry's CID + * @param {string} [logId] The ID of the log + * @param {number} [length=-1] How many entries to include in the log * @param {function(cid, entry, parent, depth)} onProgressCallback - * @returns {Promise} - * @deprecated + * @return {Promise} New Log */ - static async fromCID (ipfs, access, identity, cid, length = -1, exclude, onProgressCallback) { + static async fromEntryCid (ipfs, access, identity, cid, logId, length = -1, exclude, onProgressCallback) { // TODO: need to verify the entries with 'key' - const data = await LogIO.fromCID(ipfs, cid, length, exclude, onProgressCallback) - return new Log(ipfs, access, identity, data.id, data.values, data.heads, data.clock) + const data = await LogIO.fromEntryCid(ipfs, cid, length, exclude, onProgressCallback) + return new Log(ipfs, access, identity, logId, data.values) } /** - * Create a log from a single entry's CID. + * Create a log from a single entry's multihash. * @param {IPFS} ipfs An IPFS instance * @param {AccessController} access The access controller instance * @param {Identity} identity The identity instance - * @param {string} cid The entry's CID + * @param {string} multihash The entry's multihash * @param {string} [logId] The ID of the log * @param {number} [length=-1] How many entries to include in the log * @param {function(cid, entry, parent, depth)} onProgressCallback - * @return {Promise} New Log + * @return {Promise} New Log + * @deprecated */ - static async fromEntryCid (ipfs, access, identity, cid, logId, length = -1, exclude, onProgressCallback) { - // TODO: need to verify the entries with 'key' - const data = await LogIO.fromEntryCid(ipfs, cid, length, exclude, onProgressCallback) - return new Log(ipfs, access, identity, logId, data.values) + static async fromEntryHash (ipfs, access, identity, multihash, logId, length = -1, exclude, onProgressCallback) { + return Log.fromEntryCid(ipfs, access, identity, multihash, logId, length, exclude, onProgressCallback) } /** diff --git a/test/entry.spec.js b/test/entry.spec.js index 8cae8081..84dcbec8 100644 --- a/test/entry.spec.js +++ b/test/entry.spec.js @@ -1,6 +1,7 @@ 'use strict' const assert = require('assert') +const spy = require('spy') const rmrf = require('rimraf') const Entry = require('../src/entry') const Log = require('../src/log') @@ -80,6 +81,13 @@ Object.keys(testAPIs).forEach((IPFS) => { assert.strictEqual(entry2.clock.time, 1) }) + it('should return an entry interopable with older versions', async () => { + const expectedCid = 'zdpuAnVZkmiNbtgwCguuphDe2qojCGN4EztkSGNyiJxwizudY' + const entry = await Entry.create(ipfs, testIdentity, 'A', 'hello') + assert.strictEqual(entry.cid, entry.hash) + assert.strictEqual(entry.cid, expectedCid) + }) + it('`next` parameter can be an array of strings', async () => { const entry1 = await Entry.create(ipfs, testIdentity, 'A', 'hello1', []) const entry2 = await Entry.create(ipfs, testIdentity, 'A', 'hello2', [entry1.cid]) @@ -158,6 +166,13 @@ Object.keys(testAPIs).forEach((IPFS) => { assert.strictEqual(cid, expectedCid) }) + it('returns the correct ipfs CID (multihash) for a v0 entry', async () => { + const expectedMultihash = 'QmV5NpvViHHouBfo7CSnfX2iB4t5PVWNJG8doKt5cwwnxY' + const entry = v0Entries.hello + const multihash = await Entry.toMultihash(ipfs, entry) + assert.strictEqual(multihash, expectedMultihash) + }) + it('throws an error if ipfs is not defined', async () => { let err try { @@ -222,7 +237,7 @@ Object.keys(testAPIs).forEach((IPFS) => { err1 = e } - assert.strictEqual(err1.message, 'Invalid object format, cannot generate entry multihash') + assert.strictEqual(err1.message, 'Invalid object format, cannot generate entry CID') try { const entry = await Entry.create(ipfs, testIdentity, 'A', 'hello', []) @@ -231,7 +246,7 @@ Object.keys(testAPIs).forEach((IPFS) => { } catch (e) { err2 = e } - assert.strictEqual(err2.message, 'Invalid object format, cannot generate entry multihash') + assert.strictEqual(err2.message, 'Invalid object format, cannot generate entry CID') }) }) @@ -252,23 +267,6 @@ Object.keys(testAPIs).forEach((IPFS) => { assert.strictEqual(final.cid, expectedCid) }) - it('creates a entry from ipfs multihash', async () => { - const expectedCid = 'QmRyrbupohhmSoXxQ56XthEiK6YEqK5ZN8SysJdo8GGdHq' - const payload1 = 'hello world' - const payload2 = 'hello again' - const entry1 = await Entry.create(ipfs, testIdentity, 'A', payload1, []) - const entry2 = await Entry.create(ipfs, testIdentity, 'A', payload2, [entry1]) - const entry2Multihash = await Entry.toMultihash(ipfs, entry2) - const final = await Entry.fromCID(ipfs, entry2Multihash) - - assert.strictEqual(final.id, 'A') - assert.strictEqual(final.payload, payload2) - assert.strictEqual(final.next.length, 1) - assert.strictEqual(final.next[0], entry1.cid) - assert.strictEqual(final.v, 0) - assert.strictEqual(final.cid, expectedCid) - }) - it('creates a entry from ipfs multihash of v0 entries', async () => { const expectedCid = 'QmTLLKuNVXC95rGcnrL1M3xKf4dWYuu3MeAM3LUh3YNDJ7' const entry1Cid = await dagNode.write(ipfs, 'dag-pb', v0Entries.helloWorld) @@ -285,6 +283,24 @@ Object.keys(testAPIs).forEach((IPFS) => { assert.strictEqual(final.cid, expectedCid) }) + it('should return an entry interopable with older and newer versions', async () => { + const expectedCidV1 = 'zdpuAnVZkmiNbtgwCguuphDe2qojCGN4EztkSGNyiJxwizudY' + const entryV1 = await Entry.create(ipfs, testIdentity, 'A', 'hello', []) + const finalV1 = await Entry.fromCID(ipfs, entryV1.cid) + assert.strictEqual(finalV1.cid, finalV1.hash) + assert.strictEqual(finalV1.cid, expectedCidV1) + assert.strictEqual(Object.assign({}, finalV1).cid, expectedCidV1) + assert.strictEqual(Object.assign({}, finalV1).hash, undefined) + + const expectedCidV0 = 'QmderYccue9XqB7V4EYf71ZygWELdzdbVqo1oxR4nMRrCh' + const entryCidV0 = await dagNode.write(ipfs, 'dag-pb', v0Entries.helloWorld) + const finalV0 = await Entry.fromCID(ipfs, entryCidV0) + assert.strictEqual(finalV0.cid, finalV0.hash) + assert.strictEqual(finalV0.cid, expectedCidV0) + assert.strictEqual(Object.assign({}, finalV0).cid, undefined) + assert.strictEqual(Object.assign({}, finalV0).hash, expectedCidV0) + }) + it('throws an error if ipfs is not present', async () => { let err try { @@ -307,58 +323,22 @@ Object.keys(testAPIs).forEach((IPFS) => { }) describe('fromMultihash', () => { - it('creates a entry from ipfs multihash', async () => { - const expectedMultihash = 'QmRyrbupohhmSoXxQ56XthEiK6YEqK5ZN8SysJdo8GGdHq' - const payload1 = 'hello world' - const payload2 = 'hello again' - const entry1 = await Entry.create(ipfs, testIdentity, 'A', payload1, []) - const entry2 = await Entry.create(ipfs, testIdentity, 'A', payload2, [entry1]) - const entry2Multihash = await Entry.toMultihash(ipfs, entry2) - const final = await Entry.fromMultihash(ipfs, entry2Multihash) - - assert.strictEqual(final.id, 'A') - assert.strictEqual(final.payload, payload2) - assert.strictEqual(final.next.length, 1) - assert.strictEqual(final.next[0], entry1.cid) - assert.strictEqual(final.v, 0) - assert.strictEqual(final.cid, expectedMultihash) + afterEach(() => { + if (Entry.fromCID.restore) { + Entry.fromCID.restore() + } }) - it('creates a entry from ipfs multihash of v0 entries', async () => { + it('call fromCID', async () => { + const s = spy(Entry, 'fromCID') const expectedCid = 'QmTLLKuNVXC95rGcnrL1M3xKf4dWYuu3MeAM3LUh3YNDJ7' - const entry1Cid = await dagNode.write(ipfs, 'dag-pb', v0Entries.helloWorld) + await dagNode.write(ipfs, 'dag-pb', v0Entries.helloWorld) const entry2Cid = await dagNode.write(ipfs, 'dag-pb', v0Entries.helloAgain) - const final = await Entry.fromMultihash(ipfs, entry2Cid) + const final = await Entry.fromCID(ipfs, entry2Cid) - assert.strictEqual(final.id, 'A') - assert.strictEqual(final.payload, v0Entries.helloAgain.payload) - assert.strictEqual(final.next.length, 1) - assert.strictEqual(final.next[0], v0Entries.helloAgain.next[0]) - assert.strictEqual(final.next[0], entry1Cid) - assert.strictEqual(final.v, 0) - assert.strictEqual(final.cid, entry2Cid) + assert(s.calledWith(ipfs, entry2Cid)) assert.strictEqual(final.cid, expectedCid) }) - - it('throws an error if ipfs is not present', async () => { - let err - try { - await Entry.fromMultihash() - } catch (e) { - err = e - } - assert.strictEqual(err.message, 'Ipfs instance not defined') - }) - - it('throws an error if CID is undefined', async () => { - let err - try { - await Entry.fromMultihash(ipfs) - } catch (e) { - err = e - } - assert.strictEqual(err.message, 'Invalid multihash: undefined') - }) }) describe('isParent', () => { diff --git a/test/log.spec.js b/test/log.spec.js index 5c8c2350..16710612 100644 --- a/test/log.spec.js +++ b/test/log.spec.js @@ -1,6 +1,7 @@ 'use strict' const assert = require('assert') +const spy = require('spy') const rmrf = require('rimraf') const dagPB = require('ipld-dag-pb') const pify = require('pify') @@ -535,7 +536,14 @@ Object.keys(testAPIs).forEach((IPFS) => { }) describe('fromMultihash', async () => { - it('creates a log from ipfs multihash - one entry', async () => { + afterEach(() => { + if (Log.fromCID.restore) { + Log.fromCID.restore() + } + }) + + it('calls fromCID', async () => { + const s = spy(Log, 'fromCID') const expectedData = { id: 'X', heads: ['zdpuB2NAQ7cSh9MAfY91QC6Va56pQMJBXBaLoS6uQ1qNxqija'] @@ -544,156 +552,9 @@ Object.keys(testAPIs).forEach((IPFS) => { await log.append('one') const multihash = await log.toMultihash() const res = await Log.fromMultihash(ipfs, testACL, testIdentity, multihash, -1) + assert.strictEqual(s.callCount, 1) + assert(s.calledWith(ipfs, testACL, testIdentity, multihash, -1)) assert.strictEqual(JSON.stringify(res.toJSON()), JSON.stringify(expectedData)) - assert.strictEqual(res.length, 1) - assert.strictEqual(res.values[0].payload, 'one') - assert.strictEqual(res.values[0].clock.id, testIdentity.publicKey) - assert.strictEqual(res.values[0].clock.time, 1) - }) - - it('creates a log from ipfs multihash - three entries', async () => { - const multihash = await log.toMultihash() - const res = await Log.fromMultihash(ipfs, testACL, testIdentity, multihash, -1) - assert.strictEqual(res.length, 3) - assert.strictEqual(res.values[0].payload, 'one') - assert.strictEqual(res.values[0].clock.time, 1) - assert.strictEqual(res.values[1].payload, 'two') - assert.strictEqual(res.values[1].clock.time, 2) - assert.strictEqual(res.values[2].payload, 'three') - assert.strictEqual(res.values[2].clock.time, 3) - }) - - it('has the right sequence number after creation and appending', async () => { - const multihash = await log.toMultihash() - let res = await Log.fromMultihash(ipfs, testACL, testIdentity, multihash, -1) - assert.strictEqual(res.length, 3) - await res.append('four') - assert.strictEqual(res.length, 4) - assert.strictEqual(res.values[3].payload, 'four') - assert.strictEqual(res.values[3].clock.time, 4) - }) - - it('creates a log from ipfs multihash that has three heads', async () => { - let log1 = new Log(ipfs, testACL, testIdentity, 'A') - let log2 = new Log(ipfs, testACL, testIdentity2, 'A') - let log3 = new Log(ipfs, testACL, testIdentity3, 'A') - await log1.append('one') // order is determined by the identity's publicKey - await log3.append('two') - await log2.append('three') - await log1.join(log2) - await log1.join(log3) - const multihash = await log1.toMultihash() - const res = await Log.fromMultihash(ipfs, testACL, testIdentity, multihash, -1) - assert.strictEqual(res.length, 3) - assert.strictEqual(res.heads.length, 3) - assert.strictEqual(res.heads[0].payload, 'three') - assert.strictEqual(res.heads[1].payload, 'two') // order is determined by the identity's publicKey - assert.strictEqual(res.heads[2].payload, 'one') - }) - - it('creates a log from ipfs multihash up to a size limit', async () => { - const amount = 100 - const size = amount / 2 - let log = new Log(ipfs, testACL, testIdentity, 'A') - for (let i = 0; i < amount; i++) { - await log.append(i.toString()) - } - const multihash = await log.toMultihash() - const res = await Log.fromMultihash(ipfs, testACL, testIdentity, multihash, size) - assert.strictEqual(res.length, size) - }) - - it('creates a log from ipfs multihash up without size limit', async () => { - const amount = 100 - let log = new Log(ipfs, testACL, testIdentity, 'A') - for (let i = 0; i < amount; i++) { - await log.append(i.toString()) - } - const multihash = await log.toMultihash() - const res = await Log.fromMultihash(ipfs, testACL, testIdentity, multihash, -1) - assert.strictEqual(res.length, amount) - }) - - it('throws an error if ipfs is not defined', async () => { - let err - try { - await Log.fromMultihash() - } catch (e) { - err = e - } - assert.notStrictEqual(err, null) - assert.strictEqual(err.message, 'IPFS instance not defined') - }) - - it('throws an error if multihash is not defined', async () => { - let err - try { - await Log.fromMultihash(ipfs) - } catch (e) { - err = e - } - assert.notStrictEqual(err, null) - assert.strictEqual(err.message, 'Invalid multihash: undefined') - }) - - it('throws an error when data from multihash is not instance of Log', async () => { - const dagNode = await createPbDagNode(Buffer.from('{}')) - const cid = await ipfs.dag.put(dagNode, { - format: 'dag-pb', - hashAlg: 'sha2-256' - }) - let err - try { - await Log.fromMultihash(ipfs, testACL, testIdentity, cid.toV0().toBaseEncodedString()) - } catch (e) { - err = e - } - assert.strictEqual(err.message, 'Given argument is not an instance of Log') - }) - - it('throws an error if data from multihash is not valid JSON', async () => { - const dagNode = await createPbDagNode(Buffer.from('hello')) - let cid = await ipfs.dag.put(dagNode, { - hashAlg: 'sha2-256', - format: 'dag-pb' - }) - let err - try { - await Log.fromMultihash(ipfs, testACL, testIdentity, cid.toV0().toBaseEncodedString()) - } catch (e) { - err = e - } - assert.strictEqual(err.message, 'Unexpected token h in JSON at position 0') - }) - - it('onProgress callback is fired for each entry', async () => { - const amount = 100 - let log = new Log(ipfs, testACL, testIdentity, 'A') - for (let i = 0; i < amount; i++) { - await log.append(i.toString()) - } - - const items = log.values - let i = 0 - const loadProgressCallback = (cid, entry, depth) => { - assert.notStrictEqual(entry, null) - assert.strictEqual(cid, items[items.length - i - 1].cid) - assert.strictEqual(entry.cid, items[items.length - i - 1].cid) - assert.strictEqual(entry.payload, items[items.length - i - 1].payload) - assert.strictEqual(depth - 1, i) - i++ - } - - const multihash = await log.toMultihash() - const result = await Log.fromMultihash(ipfs, testACL, testIdentity, multihash, -1, [], loadProgressCallback) - - // Make sure the onProgress callback was called for each entry - assert.strictEqual(i, amount) - // Make sure the log entries are correct ones - assert.strictEqual(result.values[0].clock.time, 1) - assert.strictEqual(result.values[0].payload, '0') - assert.strictEqual(result.values[result.length - 1].clock.time, 100) - assert.strictEqual(result.values[result.length - 1].payload, '99') }) }) })