Skip to content
This repository has been archived by the owner on Sep 30, 2023. It is now read-only.

Commit

Permalink
Improve backwards compatibility in regards to previous commit
Browse files Browse the repository at this point in the history
- Restore fromEntryHash
- Make fromMultihash simply call fromCID
  • Loading branch information
satazor committed Jan 10, 2019
1 parent 040ff46 commit 2fc6dbe
Show file tree
Hide file tree
Showing 7 changed files with 152 additions and 267 deletions.
10 changes: 8 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
"mocha-headless-chrome": "~2.0.1",
"orbit-db-keystore": "github:orbitdb/orbit-db-keystore",
"rimraf": "~2.6.1",
"spy": "~1.0.0",
"standard": "~12.0.1",
"webpack": "~4.28.0",
"webpack-cli": "~3.1.2"
Expand Down
38 changes: 31 additions & 7 deletions src/entry.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@ class Entry {
entry.identity = identity.toJSON()
entry.sig = signature
entry.cid = await Entry.toCID(ipfs, entry)
return entry

return Entry.ensureInterop(entry)
}

/**
Expand Down Expand Up @@ -129,7 +130,7 @@ class Entry {
*/
static async toMultihash (ipfs, entry) {
if (!ipfs) throw IpfsNotDefinedError()
if (!Entry.isEntry(entry)) throw new Error('Invalid object format, cannot generate entry multihash')
if (!Entry.isEntry(entry)) throw new Error('Invalid object format, cannot generate entry CID')

// Ensure `entry` follows the correct format
const e = {
Expand Down Expand Up @@ -165,7 +166,7 @@ class Entry {
const e = await dagNode.read(ipfs, cid, IPLD_LINKS)

let entry = {
cid,
[getCidProp(e)]: cid,
id: e.id,
payload: e.payload,
next: e.next,
Expand All @@ -177,7 +178,7 @@ class Entry {
if (e.identity) Object.assign(entry, { identity: e.identity })
if (e.sig) Object.assign(entry, { sig: e.sig })

return entry
return Entry.ensureInterop(entry)
}

/**
Expand All @@ -192,9 +193,6 @@ class Entry {
* @deprecated
*/
static async fromMultihash (ipfs, multihash) {
if (!ipfs) throw IpfsNotDefinedError()
if (!multihash) throw new Error(`Invalid multihash: ${multihash}`)

return Entry.fromCID(ipfs, multihash)
}

Expand All @@ -212,6 +210,32 @@ class Entry {
obj.clock !== undefined
}

/**
* Ensures that this entry is interoperable between earlier versions
* and the most recent one (and vice-versa).
* @param {Entry} entry The entry to ensure interoperability
* @return {Entry} entry The same entry but with backwards and forward interoperability
*/
static ensureInterop (entry) {
if (entry.cid && entry.hash) {
return entry
}

const prop = getCidProp(entry)
const accessorProp = prop === 'hash' ? 'cid' : 'hash'

Object.defineProperty(entry, accessorProp, {
get () {
return this[prop]
},
set (value) {
this[prop] = value
}
})

return entry
}

/**
* Compares two entries.
* @param {Entry} a
Expand Down
41 changes: 20 additions & 21 deletions src/log-io.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,32 +11,32 @@ const last = (arr, n) => arr.slice(arr.length - n, arr.length)

class LogIO {
/**
* Get the multihash of a Log.
* Get the CID of a Log.
* @param {IPFS} ipfs An IPFS instance
* @param {Log} log Log to get a multihash for
* @param {Log} log Log to get a CID for
* @returns {Promise<string>}
* @deprecated
*/
static async toMultihash (ipfs, log) {
static async toCID (ipfs, log) {
if (!isDefined(ipfs)) throw LogError.IPFSNotDefinedError()
if (!isDefined(log)) throw LogError.LogNotDefinedError()
if (log.values.length < 1) throw new Error(`Can't serialize an empty log`)

return dagNode.write(ipfs, 'dag-pb', log.toJSON(), IPLD_LINKS)
return dagNode.write(ipfs, 'dag-cbor', log.toJSON(), IPLD_LINKS)
}

/**
* Get the CID of a Log.
* Get the multihash of a Log.
* @param {IPFS} ipfs An IPFS instance
* @param {Log} log Log to get a CID for
* @param {Log} log Log to get a multihash for
* @returns {Promise<string>}
* @deprecated
*/
static async toCID (ipfs, log) {
static async toMultihash (ipfs, log) {
if (!isDefined(ipfs)) throw LogError.IPFSNotDefinedError()
if (!isDefined(log)) throw LogError.LogNotDefinedError()
if (log.values.length < 1) throw new Error(`Can't serialize an empty log`)

return dagNode.write(ipfs, 'dag-cbor', log.toJSON(), IPLD_LINKS)
return dagNode.write(ipfs, 'dag-pb', log.toJSON(), IPLD_LINKS)
}

/**
Expand Down Expand Up @@ -76,19 +76,16 @@ class LogIO {
}

/**
* Create a log from a multihash.
* @param {IPFS} ipfs An IPFS instance
* @param {string} multihash Multihash (as a Base58 encoded string) to create the Log from
* @param {number} [length=-1] How many items to include in the log
* @param {Array<Entry>} [exclude] Entries to not fetch (cached)
* @param {function(cid, entry, parent, depth)} onProgressCallback
* @returns {Promise<Log>}
* @deprecated
*/
* Create a log from a multihash.
* @param {IPFS} ipfs An IPFS instance
* @param {string} multihash Multihash (as a Base58 encoded string) to create the Log from
* @param {number} [length=-1] How many items to include in the log
* @param {Array<Entry>} [exclude] Entries to not fetch (cached)
* @param {function(cid, entry, parent, depth)} onProgressCallback
* @returns {Promise<Log>}
* @deprecated
*/
static async fromMultihash (ipfs, multihash, length = -1, exclude, onProgressCallback) {
if (!isDefined(ipfs)) throw LogError.IPFSNotDefinedError()
if (!isDefined(multihash)) throw new Error(`Invalid multihash: ${multihash}`)

return LogIO.fromCID(ipfs, multihash, length, exclude, onProgressCallback)
}

Expand All @@ -113,6 +110,7 @@ class LogIO {

static async fromJSON (ipfs, json, length = -1, timeout, onProgressCallback) {
if (!isDefined(ipfs)) throw LogError.IPFSNotDefinedError()
json.heads.forEach(Entry.ensureInterop)
const headCids = json.heads.map(e => e.cid)
const entries = await EntryIO.fetchParallel(ipfs, headCids, length, [], 16, timeout, onProgressCallback)
const finalEntries = entries.slice().sort(Entry.compare)
Expand Down Expand Up @@ -144,6 +142,7 @@ class LogIO {
if (!Array.isArray(sourceEntries)) {
sourceEntries = [sourceEntries]
}
sourceEntries.forEach(Entry.ensureInterop)

// Fetch given length, return size at least the given input entries
length = length > -1 ? Math.max(length, sourceEntries.length) : length
Expand Down
60 changes: 37 additions & 23 deletions src/log.js
Original file line number Diff line number Diff line change
Expand Up @@ -415,6 +415,14 @@ class Log extends GSet {
log._entryIndex !== undefined
}

/**
* Get the log's CID.
* @returns {Promise<string>} The Log CID
*/
toCID () {
return LogIO.toCID(this._storage, this)
}

/**
* Get the log's multihash.
* @returns {Promise<string>} Multihash of the Log as Base58 encoded string
Expand All @@ -425,11 +433,21 @@ class Log extends GSet {
}

/**
* Get the log's CID.
* @returns {Promise<string>} The Log CID
* Create a log from a CID.
* @param {IPFS} ipfs An IPFS instance
* @param {AccessController} access The access controller instance
* @param {Identity} identity The identity instance
* @param {string} cid The log CID
* @param {number} [length=-1] How many items to include in the log
* @param {Array<Entry>} [exclude] Entries to not fetch (cached)
* @param {function(cid, entry, parent, depth)} onProgressCallback
* @returns {Promise<Log>}
* @deprecated
*/
toCID () {
return LogIO.toCID(this._storage, this)
static async fromCID (ipfs, access, identity, cid, length = -1, exclude, onProgressCallback) {
// TODO: need to verify the entries with 'key'
const data = await LogIO.fromCID(ipfs, cid, length, exclude, onProgressCallback)
return new Log(ipfs, access, identity, data.id, data.values, data.heads, data.clock)
}

/**
Expand All @@ -445,44 +463,40 @@ class Log extends GSet {
* @deprecated
*/
static async fromMultihash (ipfs, access, identity, multihash, length = -1, exclude, onProgressCallback) {
// TODO: need to verify the entries with 'key'
const data = await LogIO.fromMultihash(ipfs, multihash, length, exclude, onProgressCallback)
return new Log(ipfs, access, identity, data.id, data.values, data.heads, data.clock)
return Log.fromCID(ipfs, access, identity, multihash, length, exclude, onProgressCallback)
}

/**
* Create a log from a CID.
* Create a log from a single entry's CID.
* @param {IPFS} ipfs An IPFS instance
* @param {AccessController} access The access controller instance
* @param {Identity} identity The identity instance
* @param {string} cid The log CID
* @param {number} [length=-1] How many items to include in the log
* @param {Array<Entry>} [exclude] Entries to not fetch (cached)
* @param {string} cid The entry's CID
* @param {string} [logId] The ID of the log
* @param {number} [length=-1] How many entries to include in the log
* @param {function(cid, entry, parent, depth)} onProgressCallback
* @returns {Promise<Log>}
* @deprecated
* @return {Promise<Log>} New Log
*/
static async fromCID (ipfs, access, identity, cid, length = -1, exclude, onProgressCallback) {
static async fromEntryCid (ipfs, access, identity, cid, logId, length = -1, exclude, onProgressCallback) {
// TODO: need to verify the entries with 'key'
const data = await LogIO.fromCID(ipfs, cid, length, exclude, onProgressCallback)
return new Log(ipfs, access, identity, data.id, data.values, data.heads, data.clock)
const data = await LogIO.fromEntryCid(ipfs, cid, length, exclude, onProgressCallback)
return new Log(ipfs, access, identity, logId, data.values)
}

/**
* Create a log from a single entry's CID.
* Create a log from a single entry's multihash.
* @param {IPFS} ipfs An IPFS instance
* @param {AccessController} access The access controller instance
* @param {Identity} identity The identity instance
* @param {string} cid The entry's CID
* @param {string} multihash The entry's multihash
* @param {string} [logId] The ID of the log
* @param {number} [length=-1] How many entries to include in the log
* @param {function(cid, entry, parent, depth)} onProgressCallback
* @return {Promise<Log>} New Log
* @return {Promise<Log>} New Log
* @deprecated
*/
static async fromEntryCid (ipfs, access, identity, cid, logId, length = -1, exclude, onProgressCallback) {
// TODO: need to verify the entries with 'key'
const data = await LogIO.fromEntryCid(ipfs, cid, length, exclude, onProgressCallback)
return new Log(ipfs, access, identity, logId, data.values)
static async fromEntryHash (ipfs, access, identity, multihash, logId, length = -1, exclude, onProgressCallback) {
return Log.fromEntryCid(ipfs, access, identity, multihash, logId, length, exclude, onProgressCallback)
}

/**
Expand Down
Loading

0 comments on commit 2fc6dbe

Please sign in to comment.