diff --git a/building-blocks/autobase.md b/building-blocks/autobase.md index 2f45849..6260e18 100644 --- a/building-blocks/autobase.md +++ b/building-blocks/autobase.md @@ -17,32 +17,32 @@ Notable features include: - [Create a new instance](autobase.md#installation) - Basic: - Properties: - - [base.inputs](autobase.md#baseinputs) - - [base.outputs](autobase.md#baseoutputs) - - [base.localInput](autobase.md#baselocalinput) - - [base.localOutput](autobase.md#baselocaloutput) + - [base.inputs](autobase.md#base.inputs) + - [base.outputs](autobase.md#base.outputs) + - [base.localInput](autobase.md#base.localinput) + - [base.localOutput](autobase.md#base.localoutput) - Methods: - - [base.clock()](autobase.md#const-clock--baseclock) - - [base.isAutobase(core)](autobase.md#await-autobaseisautobasecore) + - [base.clock()](autobase.md#base.clock) + - [base.isAutobase(core)](autobase.md#base.isautobase) - [base.append(value, [clock], [input])](autobase.md#await-baseappendvalue-clock-input) - [base.latest([input1, input2, ...])](autobase.md#const-clock--await-baselatestinput1-input2) - - [base.addInput(input)](autobase.md#await-baseaddinputinput) - - [base.removeInput(input)](autobase.md#await-baseremoveinputinput) - - [base.addOutput(output)](autobase.md#await-baseaddoutputoutput) - - [base.removeOutput(output)](autobase.md#await-baseremoveoutputoutput) + - [base.addInput(input)](autobase.md#base.addinput) + - [base.removeInput(input)](autobase.md#base.removeinput) + - [base.addOutput(output)](autobase.md#base.addoutput) + - [base.removeOutput(output)](autobase.md#base.removeoutput) - Streams: - Methods: - - [base.createCausalStream()](autobase.md#const-stream--basecreatecausalstream) - - [base.createReadStream([options])](autobase.md#const-stream--basecreatereadstreamoptions) + - [base.createCausalStream()](autobase.md#base.createcasualstream) + - [base.createReadStream([options])](autobase.md#base.createreadstream) - Linearized Views: - Properties: - - [view.status](autobase.md#viewstatus) - - [view.length](autobase.md#viewlength) + - [view.status](autobase.md#view.status) + - [view.length](autobase.md#view.length) - Methods: - [base.start({ apply, unwrap } = {})](autobase.md#basestart-apply-unwrap) - - [view.update()](autobase.md#await-viewupdate) - - [view.get(idx, [options])](autobase.md#const-entry--await-viewgetidx-options) - - [view.append([blocks])](autobase.md#await-viewappendblocks) + - [view.update()](autobase.md#view.update) + - [view.get(idx, [options])](autobase.md#view.get) + - [view.append([blocks])](autobase.md#view.append) ### Installation @@ -76,23 +76,23 @@ The following table describes the properties of the optional `options` object. #### Properties -#### **`base.inputs`** +#### **`base.inputs`** {#base.inputs} The list of input Hypercores. -#### **`base.outputs`** +#### **`base.outputs`** {#base.outputs} The list of output Hypercores containing persisted linearized views. -#### **`base.localInput`** +#### **`base.localInput`** {#base.localInput} If non-null, this Hypercore will be appended to in [base.append](autobase.md#await-baseappendvalue-clock-input) operations. -#### **`base.localOutput`** +#### **`base.localOutput`** {#base.localoutput} If non-null `base.view` will be persisted into this Hypercore. -#### **`base.started`** +#### **`base.started`** {#base.started} A Boolean indicating if `base.view` has been created. @@ -103,23 +103,23 @@ See the [linearized views section](autobase.md#linearized-views) for details abo #### Methods -#### **`const clock = base.clock()`** +#### **`const clock = base.clock()`** {#base.clock} Returns a Map containing the latest lengths for all Autobase inputs. The Map has the form: `(hex-encoded-key) -> (Hypercore length)` -#### **`await Autobase.isAutobase(core)`** +#### **`await Autobase.isAutobase(core)`** {#base.isautobase} Returns `true` if `core` is an Autobase input or an output. -#### **`await base.append(value, [clock], [input])`** +#### **`await base.append(value, [clock], [input])`** {#base.append} Append a new value to the autobase. * `clock`: The causal clock defaults to base.latest. -#### **`const clock = await base.latest([input1, input2, ...])`** +#### **`const clock = await base.latest([input1, input2, ...])`** {#base.latest} Generate a causal clock linking the latest entries of each input. @@ -131,13 +131,13 @@ This is unlikely to be needed generally, prefer to use [`append`](autobase.md#aw await base.append('hello world') ``` -#### **`await base.addInput(input)`** +#### **`await base.addInput(input)`** {#base.addinput} Adds a new input Hypercore. * `input` must either be a fresh Hypercore, or a Hypercore that has previously been used as an Autobase input. -#### **`await base.removeInput(input)`** +#### **`await base.removeInput(input)`** {#base.removeinput} Removes an input Hypercore. @@ -149,7 +149,7 @@ Removing an input, and then subsequently linearizing the Autobase into an existi Future releases will see the addition of 'soft removal', which will freeze an input at a specific length, and no process blocks past that length, while still preserving that input's history in linearized views. For most applications, soft removal matches the intuition behind 'removing a user'. {% endhint %} -#### **`await base.addOutput(output)`** +#### **`await base.addOutput(output)`** {#base.addoutput} Adds a new output Hypercore. @@ -157,7 +157,7 @@ Adds a new output Hypercore. If `base.outputs` is not empty, Autobase will do 'remote linearizing': `base.view.update()` will treat these outputs as the 'trunk', minimizing the amount of local re-processing they need to do during updates. -#### **`await base.removeOutput(output)`** +#### **`await base.removeOutput(output)`** {#base.removeoutput} Removes an output Hypercore. `output` can be either a Hypercore or a Hypercore key. @@ -179,7 +179,7 @@ They should fail in the presence of unavailable nodes -- the deterministic order The simplest kind of linearized view (`const view = base.linearize()`), is just a Hypercore containing the results of a causal stream in reversed order (block N in the index will not be causally dependent on block N+1). -#### **`const stream = base.createCausalStream()`** +#### **`const stream = base.createCausalStream()`** {#base.createcasualstream} Generate a Readable stream of input blocks with deterministic, causal ordering. @@ -191,7 +191,7 @@ If an input node is causally-dependent on another node that is not available, th Similar to `Hypercore.createReadStream()`, this stream starts at the beginning of each input, and does not guarantee the same deterministic ordering as the causal stream. Unlike causal streams, which are used mainly for indexing, read streams can be used to observe updates. And as they move forward in time, they can be live. -#### **`const stream = base.createReadStream([options])`** +#### **`const stream = base.createReadStream([options])`** {#base.createreadstream} Generate a Readable stream of input blocks, from earliest to latest. @@ -256,7 +256,7 @@ console.log(base.view.length) #### View Creation -#### **`base.start({ apply, unwrap } = {})`** +#### **`base.start({ apply, unwrap } = {})`** {#base.start} Creates a new linearized view, and sets it on `base.view`. The view mirrors the Hypercore API wherever possible, meaning it can be used as a drop-in replacement for a Hypercore instance. @@ -271,7 +271,7 @@ When calling `base.start` manually, it must only be called once. | **`unwrap`** | Set this to auto unwrap the gets to only return .value | Boolean | `false` | | **`apply`** | The apply function described above | Function | `(batch) => {}` | -#### **`view.status`** +#### **`view.status`** {#view.status} The status of the last linearize operation. @@ -280,15 +280,15 @@ Returns an object of the form `{ added: N, removed: M }` where: * `added` indicates how many nodes were appended to the output during the linearization * `removed` indicates how many nodes were truncated from the output during the linearization -#### **`view.length`** +#### **`view.length`** {#view.length} The length of the view. Similar to `hypercore.length`. -#### **`await view.update()`** +#### **`await view.update()`** {#view.update} Ensures the view is up-to-date. -#### **`const entry = await view.get(idx, [options])`** +#### **`const entry = await view.get(idx, [options])`** {#view.get} Gets an entry from the view. If set `unwrap` to true, it returns `entry.value`. Otherwise, it returns an entry similar to this: @@ -299,6 +299,6 @@ Gets an entry from the view. If set `unwrap` to true, it returns `entry.value`. } ``` -#### **`await view.append([blocks])`** +#### **`await view.append([blocks])`** {#view.append} This operation can only be performed inside the `apply` function. diff --git a/building-blocks/hyperbee.md b/building-blocks/hyperbee.md index 7c313f9..efe6799 100644 --- a/building-blocks/hyperbee.md +++ b/building-blocks/hyperbee.md @@ -10,38 +10,38 @@ Hyperbee is an append only B-tree based on [`Hypercore`](hypercore.md). It provi * [Create a new instance](hyperbee.md#installation): * Basic: * Properties: - * [db.core](./hyperbee.md#dbcore) - * [db.version](./hyperbee.md#dbversion) - * [db.id](./hyperbee.md#dbid) - * [db.key](./hyperbee.md#dbkey) - * [db.discoveryKey](./hyperbee.md#dbdiscoverykey) - * [db.writable](./hyperbee.md#dbwritable) - * [db.readable](./hyperbee.md#dbreadable) + * [db.core](./hyperbee.md#db.core) + * [db.version](./hyperbee.md#db.version) + * [db.id](./hyperbee.md#db.id) + * [db.key](./hyperbee.md#db.key) + * [db.discoveryKey](./hyperbee.md#db.discoverykey) + * [db.writable](./hyperbee.md#db.writable) + * [db.readable](./hyperbee.md#db.readable) * Methods: - * [db.ready()](hyperbee.md#await-dbready) - * [db.close()](hyperbee.md#await-dbclose) - * [db.put(key, \[value\], \[options\])](hyperbee.md#await-dbputkey-value-options) - * [db.get(key, \[options\])](hyperbee.md#const--seq-key-value---await-dbgetkey-options) - * [db.del(key, \[options\])](hyperbee.md#await-dbdelkey-options) - * [db.getBySeq(seq, \[options\])](hyperbee.md#const--key-value---await-dbgetbyseqseq-options) - * [db.replicate(isInitiatorOrStream)](hyperbee.md#const-stream--dbreplicateisinitiatororstream) - * [db.batch()](hyperbee.md#const-batch--dbbatch) - * [batch.put(key, \[value\], \[options\])](hyperbee.md#await-batchputkey-value-options) - * [batch.get(key, \[options\])](hyperbee.md#const--seq-key-value---await-batchgetkey-options) - * [batch.del(key, \[options\])](hyperbee.md#await-batchdelkey-options) - * [batch.flush()](hyperbee.md#await-batchflush) - * [batch.close()](hyperbee.md#await-batchclose) - * [db.createReadStream(\[range\], \[options\])](hyperbee.md#const-stream--dbcreatereadstreamrange-options) - * [db.peek(\[range\], \[options\])](hyperbee.md#const--seq-key-value---await-dbpeekrange-options) - * [db.createHistoryStream(\[options\])](hyperbee.md#const-stream--dbcreatehistorystreamoptions) - * [db.createDiffStream(otherVersion, \[options\])](hyperbee.md#const-stream--dbcreatediffstreamotherversion-options) - * [db.getAndWatch(key, \[options\])](hyperbee.md#const-entrywatcher--await-dbgetandwatchkey-options) - * [db.watch(\[range\])](hyperbee.md#const-watcher--dbwatchrange) - * [db.checkout(version)](hyperbee.md#const-snapshot--dbcheckoutversion) - * [db.snapshot()](hyperbee.md#const-snapshot--dbsnapshot) - * [db.sub('sub-prefix', \[options\])](hyperbee.md#const-sub--dbsubsub-prefix-optionss) - * [db.getHeader(\[options\])](hyperbee.md#const-header--await-dbgetheaderoptions) - * [Hyperbee.isHyperbee(core, \[options\])](hyperbee.md#const-ishyperbee--await-hyperbeeishyperbeecore-options) + * [db.ready()](hyperbee.md#db.ready) + * [db.close()](hyperbee.md#db.close) + * [db.put(key, \[value\], \[options\])](hyperbee.md#db.put) + * [db.get(key, \[options\])](hyperbee.md#db.get) + * [db.del(key, \[options\])](hyperbee.md#db.del) + * [db.getBySeq(seq, \[options\])](hyperbee.md#db.getbyseq) + * [db.replicate(isInitiatorOrStream)](hyperbee.md#db.replicate) + * [db.batch()](hyperbee.md#db.batch) + * [batch.put(key, \[value\], \[options\])](hyperbee.md#batch.put) + * [batch.get(key, \[options\])](hyperbee.md#batch.get) + * [batch.del(key, \[options\])](hyperbee.md#batch.del) + * [batch.flush()](hyperbee.md#batch.flush) + * [batch.close()](hyperbee.md#batch.close) + * [db.createReadStream(\[range\], \[options\])](hyperbee.md#db.createreadstream) + * [db.peek(\[range\], \[options\])](hyperbee.md#db.peek) + * [db.createHistoryStream(\[options\])](hyperbee.md#db.createhistorystream) + * [db.createDiffStream(otherVersion, \[options\])](hyperbee.md#db.creatediffstream) + * [db.getAndWatch(key, \[options\])](hyperbee.md#db.getandwatch) + * [db.watch(\[range\])](hyperbee.md#db.watch) + * [db.checkout(version)](hyperbee.md#db.checkout) + * [db.snapshot()](hyperbee.md#db.snapshot) + * [db.sub('sub-prefix', \[options\])](hyperbee.md#db.sub) + * [db.getHeader(\[options\])](hyperbee.md#db.getheader) + * [Hyperbee.isHyperbee(core, \[options\])](hyperbee.md#db.ishyperbee) ### Installation @@ -70,50 +70,50 @@ Make a new Hyperbee instance. `core` should be a [`Hypercore`](hypercore.md). #### Properties -#### **`db.core`** +#### **`db.core`** {#db.core} The underlying [Hypercore](hypercore.md) backing this bee. -#### **`db.version`** +#### **`db.version`** {#db.version} A number that indicates how many modifications were made, is useful as a version identifier. -#### **`db.id`** +#### **`db.id`** {#db.id} String containing the ID (z-base-32 of the public key) identifying this bee. -#### **`db.key`** +#### **`db.key`** {#db.key} Buffer containing the public key identifying this bee. -#### **`db.discoveryKey`** +#### **`db.discoveryKey`** {#db.discoverykey} Buffer containing a key derived from `db.key`. > This discovery key is not for verifying the data, it's only to announce or look for peers that are sharing the same bee, without leaking the bee key. -#### **`db.writable`** +#### **`db.writable`** {#db.writable} Boolean indicating to put or delete data in this bee. -#### **`db.readable`** +#### **`db.readable`** {#db.readable} Boolean indicating if we can read from this bee. After closing the bee this will be `false`. #### **Methods** -#### **`await db.ready()`** +#### **`await db.ready()`** {#db.ready} Waits until the internal state is loaded. Use it once before reading synchronous properties like `db.version`, unless any of the other APIs have been called first. -#### **`await db.close()`** +#### **`await db.close()`** {#db.close} Fully close this bee, including its core. -#### **`await db.put(key, [value], [options])`** +#### **`await db.put(key, [value], [options])`** {#db.put} Inserts a new key. Value can be optional. @@ -153,7 +153,7 @@ function cas (prev, next) { } ``` -#### **`const { seq, key, value } = await db.get(key, [options])`** +#### **`const { seq, key, value } = await db.get(key, [options])`** {#db.get} Gets a key's value. Returns `null` if the key doesn't exist. @@ -172,7 +172,7 @@ Gets a key's value. Returns `null` if the key doesn't exist. > `db.get(key, [options])` uses the state at the time of initiating the read, so the write operations that complete after `get` is initiated and before it is resolved are ignored. -#### **`await db.del(key, [options])`** +#### **`await db.del(key, [options])`** {#db.del} Delete a key. @@ -207,17 +207,17 @@ function cas (prev) { return prev.value === 'can-be-deleted' } ``` -#### **`const { key, value } = await db.getBySeq(seq, [options])`** +#### **`const { key, value } = await db.getBySeq(seq, [options])`** {#db.getbyseq} Gets the key and value from a block number. `seq` is the Hypercore index. Returns `null` if block doesn't exists. -#### **`const stream = db.replicate(isInitiatorOrStream)`** +#### **`const stream = db.replicate(isInitiatorOrStream)`** {#db.replicate} See more about how replicate works at [core.replicate](hypercore.md#const-stream-core.replicate-isinitiatororreplicationstream). -#### **`const batch = db.batch()`** +#### **`const batch = db.batch()`** {#db.batch} Makes a new atomic batch that is either fully processed or not processed at all. @@ -225,29 +225,29 @@ Makes a new atomic batch that is either fully processed or not processed at all. > If there are several inserts and deletions then a batch can be much faster. -#### **`await batch.put(key, [value], [options])`** +#### **`await batch.put(key, [value], [options])`** {#db.put} Inserts a key into a batch. `options` are the same as **`db.put`** method. -#### **`const { seq, key, value } = await batch.get(key, [options])`** +#### **`const { seq, key, value } = await batch.get(key, [options])`** {#batch.get} Gets a key, and value out of a batch. `options` are the same as **`db.get`** method. -#### **`await batch.del(key, [options])`** +#### **`await batch.del(key, [options])`** {#batch.del} Deletes a key into the batch. `options` are the same as **`db.del`** method. -#### **`await batch.flush()`** +#### **`await batch.flush()`** {#batch.flush} Commits the batch to the database, and releases any locks it has acquired. -#### **`await batch.close()`** +#### **`await batch.close()`** {#batch.close} Destroys a batch, and releases any locks it has acquired on the `db`. @@ -265,7 +265,7 @@ A batch's state snaps at creation time, so write operations applied outside of t -#### **`const stream = db.createReadStream([range], [options])`** +#### **`const stream = db.createReadStream([range], [options])`** {#db.createreadstream} Make a read stream. Sort order is based on the binary value of the keys. All entries in the stream are similar to the ones returned from **`db.get`**. @@ -287,11 +287,11 @@ Make a read stream. Sort order is based on the binary value of the keys. All ent | **`reverse`** | determine order of the keys | Boolean | `false` | | **`limit`** | maximum number of entries needed | Integer | `-1` | -#### **`const { seq, key, value } = await db.peek([range], [options])`** +#### **`const { seq, key, value } = await db.peek([range], [options])`** {#db.peek} Similar to doing a read stream and returning the first value, but a bit faster than that. -#### **`const stream = db.createHistoryStream([options])`** +#### **`const stream = db.createHistoryStream([options])`** {#db.createhistorystream} Create a stream of all entries ever inserted or deleted from the `db`. Each entry has an additional `type` property indicating if it was a `put` or `del` operation. @@ -311,7 +311,7 @@ Create a stream of all entries ever inserted or deleted from the `db`. Each entr > If any of the gte, gt, lte, lt arguments are `< 0` then they'll implicitly be added with the version before starting so doing `{ gte: -1 }` makes a stream starting at the last index. -#### **`const stream = db.createDiffStream(otherVersion, [options])`** +#### **`const stream = db.createDiffStream(otherVersion, [options])`** {#db.creatediffstream} Creates a stream of shallow changes between two versions of the `db`. @@ -331,7 +331,7 @@ Each entry is sorted by key and looks like this: > If the entries are causally equal (i.e., they have the identical seq), they are not returned, only the diff. -#### `const entryWatcher = await db.getAndWatch(key, [options])` +#### `const entryWatcher = await db.getAndWatch(key, [options])` {#db.getandwatch} Returns a watcher which listens to changes on the given key. @@ -344,7 +344,7 @@ Returns a watcher which listens to changes on the given key. Call `await watcher.close()` to stop the watcher. -#### **`const watcher = db.watch([range])`** +#### **`const watcher = db.watch([range])`** {#db.watch} Listens to changes that are on the optional `range`. @@ -378,15 +378,15 @@ Stops the watcher. Using `break` inside the `for await` loop will also destroy t > Watchers are not supported on subs and checkouts. Instead, use the `range` option to limit the scope. -#### **`const snapshot = db.checkout(version)`** +#### **`const snapshot = db.checkout(version)`** {#db.checkout} Get a read-only snapshot of a previous version. -#### **`const snapshot = db.snapshot()`** +#### **`const snapshot = db.snapshot()`** {#db.snapshot} Shorthand for getting a checkout for the current version. -#### **`const sub = db.sub('sub-prefix', options = {})`** +#### **`const sub = db.sub('sub-prefix', options = {})`** {#db.sub} Create a sub-database where a given value will prefix all entries. @@ -413,13 +413,13 @@ await sub.put('b', 'hello') await sub.get('b') ``` -#### **`const header = await db.getHeader([options])`** +#### **`const header = await db.getHeader([options])`** {#db.getheader} Returns the header contained in the first block. Throws an error if undecodable. `options` are the same as the `core.get` method. -#### **`const isHyperbee = await Hyperbee.isHyperbee(core, [options])`** +#### **`const isHyperbee = await Hyperbee.isHyperbee(core, [options])`** {#db.ishyperbee} Returns `true` if the core contains a Hyperbee, `false` otherwise. diff --git a/building-blocks/hypercore.md b/building-blocks/hypercore.md index c486ed5..b129de6 100644 --- a/building-blocks/hypercore.md +++ b/building-blocks/hypercore.md @@ -16,45 +16,45 @@ Notable features include: * [Creating a new instance](hypercore.md#installation) * Basic: * Properties: - * [core.writable](hypercore.md#corewritable) - * [core.readable](hypercore.md#corereadable) - * [core.id](hypercore.md#coreid) - * [core.key](hypercore.md#corekey) - * [core.keyPair](hypercore.md#corekeypair) - * [core.discoveryKey](hypercore.md#corediscoverykey) - * [core.encryptionKey](hypercore.md#coreencryptionkey) - * [core.length](hypercore.md#corelength) - * [core.contiguousLength](hypercore.md#corecontiguouslength) - * [core.fork](hypercore.md#corefork) - * [core.padding](hypercore.md#corepadding) + * [core.writable](hypercore.md#core.writable) + * [core.readable](hypercore.md#core.readable) + * [core.id](hypercore.md#core.id) + * [core.key](hypercore.md#core.key) + * [core.keyPair](hypercore.md#core.keypair) + * [core.discoveryKey](hypercore.md#core.discoverykey) + * [core.encryptionKey](hypercore.md#core.encryptionkey) + * [core.length](hypercore.md#core.length) + * [core.contiguousLength](hypercore.md#core.contiguouslength) + * [core.fork](hypercore.md#core.fork) + * [core.padding](hypercore.md#core.padding) * Methods: - * [core.append(block)](hypercore.md#const--length-bytelength---await-coreappendblock) - * [core.get(index, \[options\])](hypercore.md#const-block--await-coregetindex-options) - * [core.has(start, \[end\])](hypercore.md#const-has--await-corehasstart-end) - * [core.update()](hypercore.md#const-updated--await-coreupdateoptions) - * [core.seek(byteOffset)](hypercore.md#const-index-relativeoffset--await-coreseekbyteoffset-options) - * [core.createReadStream(\[options\])](hypercore.md#const-stream--corecreatereadstreamoptions) - * [core.createByteStream(\[options\])](hypercore.md#const-stream--corecreatereadstreamoptions) - * [core.clear(start, \[end\], \[options\])](hypercore.md#const-cleared--await-coreclearstart-end-options) - * [core.truncate(newLength, \[forkId\])](hypercore.md#await-coretruncatenewlength-forkid) - * [core.purge()](hypercore.md#await-corepurge) - * [core.treeHash(\[length\])](hypercore.md#const-hash--await-coretreehashlength) - * [core.download(\[range\])](hypercore.md#const-range--coredownloadrange) - * [core.session(\[options\])](hypercore.md#const-session--await-coresessionoptions) - * [core.info(\[options\])](hypercore.md#const-info--await-coreinfooptions) - * [core.close()](hypercore.md#await-coreclose) - * [core.ready()](hypercore.md#await-coreready) - * [core.replicate(isInitiatorOrReplicationStream, \[options\])](hypercore.md#const-stream--corereplicateisinitiatorstream-options) - * [core.findingPeers()](hypercore.md#const-done--corefindingpeers) - * [core.session(\[options\])](hypercore.md#coresessionoptions) - * [core.snapshot(\[options\])](hypercore.md#coresnapshotoptions) + * [core.append(block)](hypercore.md#core.append) + * [core.get(index, \[options\])](hypercore.md#core.get) + * [core.has(start, \[end\])](hypercore.md#core.has) + * [core.update()](hypercore.md#core.update) + * [core.seek(byteOffset)](hypercore.md#core.seek) + * [core.createReadStream(\[options\])](hypercore.md#core.createreadstream) + * [core.createByteStream(\[options\])](hypercore.md#core.createbytestream) + * [core.clear(start, \[end\], \[options\])](hypercore.md#core.clear) + * [core.truncate(newLength, \[forkId\])](hypercore.md#core.truncate) + * [core.purge()](hypercore.md#core.purge) + * [core.treeHash(\[length\])](hypercore.md#core.treehash) + * [core.download(\[range\])](hypercore.md#core.download) + * [core.session(\[options\])](hypercore.md#core.session) + * [core.info(\[options\])](hypercore.md#core.info) + * [core.close()](hypercore.md#core.close) + * [core.ready()](hypercore.md#core.ready) + * [core.replicate(isInitiatorOrReplicationStream, \[options\])](hypercore.md#core.replicate) + * [core.findingPeers()](hypercore.md#core.findingpeers) + * [core.session(\[options\])](hypercore.md#core.session) + * [core.snapshot(\[options\])](hypercore.md#core.snapshot) * Events: - * [append](hypercore.md#coreonappend) - * [truncate](hypercore.md#coreontruncate-ancestors-forkid) - * [ready](hypercore.md#coreonready) - * [close](hypercore.md#coreonclose) - * [peer-add](hypercore.md#coreonpeer-add) - * [peer-remove](hypercore.md#coreonpeer-remove) + * [append](hypercore.md#core.onappend) + * [truncate](hypercore.md#core.ontruncate) + * [ready](hypercore.md#core.onready) + * [close](hypercore.md#core.onclose) + * [peer-add](hypercore.md#core.onpeer-add) + * [peer-remove](hypercore.md#core.onpeer-remove) ### Installation @@ -136,15 +136,15 @@ valueEncodings will be applied to individual blocks, even if we append batches. #### Properties -#### **`core.readable`** +#### **`core.readable`** {#core.readable} Can we read from this core? After [closing](hypercore.md#await-coreclose) the core this will be `false`. -#### **`core.id`** +#### **`core.id`** {#core.id} A string containing the ID (z-base-32 of the public key) that identifies this core. -#### **`core.key`** +#### **`core.key`** {#core.key} Buffer containing the public key identifying this core. @@ -160,50 +160,50 @@ Since the public key is also a read capability, it can't be used to discover oth -#### **`core.keyPair`** +#### **`core.keyPair`** {#core.keyPair} An object containing buffers of the core's public and secret key -#### **`core.discoveryKey`** +#### **`core.discoveryKey`** {#core.discoveryKey} Buffer containing a key derived from the core's public key. In contrast to `core.key,` this key can not be used to verify the data. It can be used to announce or look for peers that are sharing the same core, without leaking the core key. > The above properties are populated after [`ready`](hypercore.md#await-core.ready) has been emitted. Will be `null` before the event. -#### **`core.encryptionKey`** +#### **`core.encryptionKey`** {#core.encryptionKey} Buffer containing the optional block encryption key of this core. Will be `null` unless block encryption is enabled. -#### **`core.writable`** +#### **`core.writable`** {#core.header} Can we append to this core? > Populated after [`ready`](hypercore.md#await-core.ready) has been emitted. Will be `false` before the event. -#### **`core.length`** +#### **`core.length`** {#core.length} The number of blocks of data available on this core. If `sparse: false`, this will equal `core.contiguousLength`. -#### **`core.contiguousLength`** +#### **`core.contiguousLength`** {#core.contiguousLength} The number of blocks contiguously available starting from the first block of this core. -#### **`core.fork`** +#### **`core.fork`** {#core.fork} The current fork id of this core > The above properties are populated after [`ready`](hypercore.md#await-core.ready) has been emitted. Will be `0` before the event. -#### **`core.padding`** +#### **`core.padding`** {#core.padding} The amount of padding applied to each block of this core. Will be `0` unless block encryption is enabled. #### Methods -#### **`const { length, byteLength } = await core.append(block)`** +#### **`const { length, byteLength } = await core.append(block)`** {#core.append} Append a block of data (or an array of blocks) to the core. Returns the new length and byte length of the core. @@ -218,7 +218,7 @@ await core.append(Buffer.from('I am a block of data')) await core.append([Buffer.from('batch block 1'), Buffer.from('batch block 2')]) ``` -#### **`const block = await core.get(index, [options])`** +#### **`const block = await core.get(index, [options])`** {#core.get} Get a block of data. If the data is not available locally this method will prioritize and wait for the data to be downloaded. @@ -243,11 +243,11 @@ const blockLocal = await core.get(44, { wait: false }) | **`valueEncoding`** | One of 'json', 'utf-8', or 'binary' | String | core's valueEncoding | | **`decrypt`** | Automatically decrypts the block if encrypted | Boolean | `true` | -#### **`const has = await core.has(start, [end])`** +#### **`const has = await core.has(start, [end])`** {#core.has} Check if the core has all blocks between `start` and `end`. -#### **`const updated = await core.update([options])`** +#### **`const updated = await core.update([options])`** {#core.update} Wait for the core to try and find a signed update to its length. Does not download any data from peers except for proof of the new core length. @@ -262,7 +262,7 @@ console.log('core was updated?', updated, 'length is', core.length) | :--------: | ------------------------------------------------- | ------- | ------- | | **`wait`** | Wait for the meta-data of hypercore to be updated | Boolean | `replicator.findingPeers > 0` | -#### **`const [index, relativeOffset] = await core.seek(byteOffset, [options])`** +#### **`const [index, relativeOffset] = await core.seek(byteOffset, [options])`** {#core.seek} Seek a byte offset. @@ -283,7 +283,7 @@ const third = await core.seek(5) // returns [2, 1] | **`wait`** | wait for data to be downloaded | Boolean | `true` | | **`timeout`** | wait for given milliseconds | Integer | `core.timeout` | -#### **`const stream = core.createReadStream([options])`** +#### **`const stream = core.createReadStream([options])`** {#core.readstream} Make a read stream to read a range of data out at once. @@ -311,7 +311,7 @@ for await (const data of fullStream) { | **`live`** | Allow realtime data replication | Boolean | `false` | | **`snapshot`** | Auto set end to core.length on open or update it on every read | Boolean | `true` | -#### `const bs = core.createByteStream([options])` +#### `const bs = core.createByteStream([options])` {#core.createbytestream} Make a byte stream to read a range of bytes. @@ -336,7 +336,7 @@ partialStream.pipe(process.stdout) | **`byteLength`** | Number of bytes that will be read | Integer | `core.byteLength - options.byteOffset` | | **`prefetch`** | Controls the number of blocks to preload | Integer | `32` | -#### **`const cleared = await core.clear(start, [end], [options])`** +#### **`const cleared = await core.clear(start, [end], [options])`** {#core.clear} Clears stored blocks between `start` and `end`, reclaiming storage when possible. @@ -353,21 +353,21 @@ await core.clear(0, 10) // clear block 0-10 from local cache The core will also 'gossip' with peers it is connected to, that no longer have these blocks. -#### **`await core.truncate(newLength, [forkId])`** +#### **`await core.truncate(newLength, [forkId])`** {#core.truncate} Truncates the core to a smaller length. Per default, this will update the fork ID of the core to `+ 1`, but we can set the preferred fork ID with the option. Note that the fork ID should be incremented in a monotone manner. -#### `await core.purge()` +#### `await core.purge()` {#core.purge} Purge the Hypercore from storage, completely removing all data. -#### **`const hash = await core.treeHash([length])`** +#### **`const hash = await core.treeHash([length])`** {#core.purge} Get the Merkle Tree hash of the core at a given length, defaulting to the current length of the core. -#### **`const range = core.download([range])`** +#### **`const range = core.download([range])`** {#core.download} Download a range of data. @@ -408,7 +408,7 @@ To cancel downloading a range, simply destroy the range instance: // will stop downloading now range.destroy() ``` -#### **`const session = await core.session([options])`** +#### **`const session = await core.session([options])`** {#core.session} Creates a new Hypercore instance that shares the same underlying core. Options are inherited from the parent instance, unless they are re-set. @@ -416,7 +416,7 @@ Creates a new Hypercore instance that shares the same underlying core. Options a > Be sure to close any sessions made. -#### **`const info = await core.info([options])`** +#### **`const info = await core.info([options])`** {#core.info} Get information about this core, such as its total size in bytes. @@ -446,11 +446,11 @@ Info { | --------- | ------------------------------ | ------- | ------- | | `storage` | get storage estimates in bytes | Boolean | `false` | -#### **`await core.close()`** +#### **`await core.close()`** {#core.close} Close this core and release any underlying resources. -#### **`await core.ready()`** +#### **`await core.ready()`** {#core.ready} Waits for the core to open. @@ -459,7 +459,7 @@ After this has been called `core.length` and other properties have been set. > ℹ️ In general, waiting for `ready` is unnecessary unless there's a need to check a synchronous property (like `key` or `discoverykey`) before any other async API method has been called. All async methods on the public API, await `ready` internally. -#### **`const stream = core.replicate(isInitiator|stream, options)`** +#### **`const stream = core.replicate(isInitiator|stream, options)`** {#core.replicate} Creates a replication stream. We should pipe this to another Hypercore instance. @@ -500,7 +500,7 @@ socket.pipe(localCore.replicate(true)).pipe(socket) > In almost all cases, the use of both Hyperswarm and Corestore Replication is advised and will meet all needs. -#### **`const done = core.findingPeers()`** +#### **`const done = core.findingPeers()`** {#core.findingpeers} Create a hook that tells Hypercore users are finding peers for this core in the background. Call `done` when user current discovery iteration is done. If using Hyperswarm, call this after a `swarm.flush()` finishes. @@ -522,7 +522,7 @@ swarm.flush().then(() => done()) await core.get(0) ``` -#### **`core.session([options])`** +#### **`core.session([options])`** Returns a new session for the Hypercore. @@ -547,7 +547,7 @@ await core.close() // will not close the underlying Hypercore await session1.close() // will close the Hypercore ``` -#### **`core.snapshot([options])`** +#### **`core.snapshot([options])`** {#core.snapshot} Returns a snapshot of the core at that particular time. This is useful for ensuring that multiple `get` operations are acting on a consistent view of the Hypercore (i.e. if the core forks in between two reads, the second should throw an error). @@ -559,26 +559,26 @@ If [`core.update()`](hypercore.md#const-updated--await-coreupdateoptions) is exp #### Events -#### **`core.on('append')`** +#### **`core.on('append')`** {#core.onappend} Emitted when the core has been appended to (i.e., has a new length/byte length), either locally or remotely. -#### **`core.on('truncate', ancestors, forkId)`** +#### **`core.on('truncate', ancestors, forkId)`** {#core.ontruncate} Emitted when the core has been truncated, either locally or remotely. -#### **`core.on('ready')`** +#### **`core.on('ready')`** {#core.onready} Emitted after the core has initially opened all its internal state. -#### **`core.on('close')`** +#### **`core.on('close')`** {#core.onclose} Emitted when the core has been fully closed. -#### **`core.on('peer-add')`** +#### **`core.on('peer-add')`** {#core.onpeer-add} Emitted when a new connection has been established with a peer. -#### **`core.on('peer-remove')`** +#### **`core.on('peer-remove')`** {#core.onpear-remove} Emitted when a peer's connection has been closed. diff --git a/building-blocks/hyperdht.md b/building-blocks/hyperdht.md index 1c3854d..8496702 100644 --- a/building-blocks/hyperdht.md +++ b/building-blocks/hyperdht.md @@ -14,38 +14,38 @@ Notable features include: * [Create a new instance](hyperdht.md#installation) * Basic: * Methods: - * [DHT.keyPair(\[seed\])](hyperdht.md#keypair--dhtkeypairseed) - * [DHT.bootstrapper(port, host, \[options\])](hyperdht.md#node--dhtbootstrapperport-host-options) - * [node.destroy(\[options\])](hyperdht.md#await-nodedestroyoptions) + * [DHT.keyPair(\[seed\])](hyperdht.md#dht.keypair) + * [DHT.bootstrapper(port, host, \[options\])](hyperdht.md#dht.bootstrapper) + * [node.destroy(\[options\])](hyperdht.md#node.destroy) * [Creating P2P servers:](hyperdht.md#creating-p2p-servers) * [node.createServer(\[options\], \[onconnection\])](hyperdht.md#const-server--nodecreateserveroptions-onconnection) * Methods: - * [server.listen(keyPair)](hyperdht.md#await-serverlistenkeypair) - * [server.refresh()](hyperdht.md#serverrefresh) - * [server.address()](hyperdht.md#serveraddress) - * [server.close()](hyperdht.md#await-serverclose) + * [server.listen(keyPair)](hyperdht.md#server.listen) + * [server.refresh()](hyperdht.md#server.refresh) + * [server.address()](hyperdht.md#server.address) + * [server.close()](hyperdht.md#server.close) * Events: - * [connection](hyperdht.md#serveronconnection-socket) - * [listening](hyperdht.md#serveronlistening) - * [close](hyperdht.md#serveronclose) + * [connection](hyperdht.md#server.onconnection) + * [listening](hyperdht.md#server.onlistening) + * [close](hyperdht.md#server.onclose) * [Connecting to P2P servers](hyperdht.md#connecting-to-p2p-servers): - * [node.connect(remotePublicKey, \[options\])](hyperdht.md#const-socket--nodeconnectremotepublickey-options) + * [node.connect(remotePublicKey, \[options\])](hyperdht.md#node.connect) * Properties: - * [socket.remotePublicKey](hyperdht.md#socketremotepublickey) - * [socket.publicKey](hyperdht.md#socketpublickey) + * [socket.remotePublicKey](hyperdht.md#socket.remotepublickey) + * [socket.publicKey](hyperdht.md#socket.publickey) * Events: * [open](hyperdht.md#socketonopen) * [Additional Peer Discovery](hyperdht.md#additional-peer-discovery): * Methods: - * [node.lookup(topic, \[options\])](hyperdht.md#const-stream--nodelookuptopic-options) - * [node.announce(topic, keyPair, \[relayAddresses\], \[options\])](hyperdht.md#const-stream--nodeannouncetopic-keypair-relayaddresses-options) - * [node.unannounce(topic, keyPair, \[options\])](hyperdht.md#await-nodeunannouncetopic-keypair-options) - * [Mutable/immutable records:](hyperdht.md#mutableimmutable-records) + * [node.lookup(topic, \[options\])](hyperdht.md#node.lookup) + * [node.announce(topic, keyPair, \[relayAddresses\], \[options\])](hyperdht.md#node.announce) + * [node.unannounce(topic, keyPair, \[options\])](hyperdht.md#node.unannounce) + * [Mutable/immutable records:](hyperdht.md#mutable-immutable-records) * Methods: - * [node.immutablePut(value, \[options\])](hyperdht.md#const--hash-closestnodes---await-nodeimmutableputvalue-options) - * [node.immutableGet(hash, \[options\])](hyperdht.md#const--value-from---await-nodeimmutablegethash-options) - * [node.mutablePut(keyPair, value, \[options\])](hyperdht.md#const--publickey-closestnodes-seq-signature---await-nodemutableputkeypair-value-options) - * [node.mutableGet(publicKey, \[options\])](hyperdht.md#const--value-from-seq-signature---await-nodemutablegetpublickey-options) + * [node.immutablePut(value, \[options\])](hyperdht.md#node.inmutableput) + * [node.immutableGet(hash, \[options\])](hyperdht.md#node.inmutableget) + * [node.mutablePut(keyPair, value, \[options\])](hyperdht.md#node.mutableput) + * [node.mutableGet(publicKey, \[options\])](hyperdht.md#node.mutableget) ### Installation @@ -74,7 +74,7 @@ See [dht-rpc](https://github.com/holepunchto/dht-rpc) for more options as HyperD #### Methods -#### **`keyPair = DHT.keyPair([seed])`** +#### **`keyPair = DHT.keyPair([seed])`** {#dht.keypair} Generates the required key pair for DHT operations. @@ -82,11 +82,11 @@ Returns an object with `{publicKey, secretKey}`. `publicKey` holds a public key Any options passed are forwarded to dht-rpc. -#### `node = DHT.bootstrapper(port, host, [options])` +#### `node = DHT.bootstrapper(port, host, [options])` {#dht.bootstrapper} Use this method to create a bootstrap node for in order to run a Hyperswarm network. -#### **`await node.destroy([options])`** +#### **`await node.destroy([options])`** {#dht.destroy} Fully destroy this DHT node. @@ -94,7 +94,7 @@ Fully destroy this DHT node. ### Creating P2P Servers -#### **`const server = node.createServer([options], [onconnection])`** +#### **`const server = node.createServer([options], [onconnection])`** {#server.createserver} Creates a new server for accepting incoming encrypted P2P connections. @@ -116,15 +116,15 @@ Creates a new server for accepting incoming encrypted P2P connections. #### Methods -#### **`await server.listen(keyPair)`** +#### **`await server.listen(keyPair)`** {#server.listen} Makes the server listen on a keyPair. To connect to this server use `keyPair.publicKey` as the connect address. -#### **`server.refresh()`** +#### **`server.refresh()`** {#server.refresh} Refreshes the server, causing it to reannounce its address. This is automatically called on network changes. -#### **`server.address()`** +#### **`server.address()`** {#server.address} Returns an object containing the address of the server: @@ -138,13 +138,13 @@ Returns an object containing the address of the server: Information can also be retrieved from `node.remoteAddress()` minus the public key. -#### **`await server.close()`** +#### **`await server.close()`** {#server.close} Stops listening. #### Events -#### **`server.on('connection', socket)`** +#### **`server.on('connection', socket)`** {#server.onconnection} Emitted when a new encrypted connection has passed the firewall check. @@ -152,17 +152,17 @@ Emitted when a new encrypted connection has passed the firewall check. User connections are identifiable by `socket.remotePublicKey` and `socket.handshakeHash` contains a unique hash representing this crypto session (same on both sides). -#### **`server.on('listening')`** +#### **`server.on('listening')`** {#server.onlistening} Emitted when the server is fully listening on a keyPair. -#### **`server.on('close')`** +#### **`server.on('close')`** {#server.onclose} Emitted when the server is fully closed. ### Connecting to P2P Servers -#### **`const socket = node.connect(remotePublicKey, [options])`** +#### **`const socket = node.connect(remotePublicKey, [options])`** {#node.connect} Connect to a remote server. Similar to `createServer` this performs UDP hole punching for P2P connectivity. @@ -182,17 +182,17 @@ const encryptedSocket = node.connect(remotePublicKey) #### Properties -#### **`socket.remotePublicKey`** +#### **`socket.remotePublicKey`** {#socket.remotepublickey} The public key of the remote peer. -#### **`socket.publicKey`** +#### **`socket.publicKey`** {#socket.publickey} -The public key of the connection. +The public key of the connection. #### Events -#### **`socket.on('open')`** +#### **`socket.on('open')`** {#socket.onopen} Emitted when the encrypted connection has been fully established with the server. @@ -202,9 +202,9 @@ encryptedSocket.on('open', function () { }) ``` -### Additional Peer Discovery +### Additional Peer Discovery {#additional-peer-discovery} -#### **`const stream = node.lookup(topic, [options])`** +#### **`const stream = node.lookup(topic, [options])`** {#node.lookup} Look for peers in the DHT on the given topic. The topic should be a 32-byte buffer (normally a hash of something). @@ -227,7 +227,7 @@ Any passed options are forwarded to dht-rpc. #### Methods -#### **`const stream = node.announce(topic, keyPair, [relayAddresses], [options])`** +#### **`const stream = node.announce(topic, keyPair, [relayAddresses], [options])`** {#node.announce} Announces that users are listening on a key pair to the DHT under a specific topic. An announce does a parallel lookup so the stream returned that looks like the lookup stream. @@ -237,35 +237,35 @@ Any passed options are forwarded to `dht-rpc`. > > Creating a server using `dht.createServer` automatically announces itself periodically on the key pair it is listening on. When announcing the server under a specific topic, access the nodes it is close to using `server.nodes`. -#### **`await node.unannounce(topic, keyPair, [options])`** +#### **`await node.unannounce(topic, keyPair, [options])`** {#node.unannounce} Unannounces a key pair. Any passed options are forwarded to dht-rpc. -### Mutable/Immutable Records +### Mutable/Immutable Records {#mutable-inmutable-records} #### Methods -#### **`const { hash, closestNodes } = await node.immutablePut(value, [options])`** +#### **`const { hash, closestNodes } = await node.immutablePut(value, [options])`** {#node.inmutableput} Stores an immutable value in the DHT. When successful, the hash of the value is returned. Any passed options are forwarded to dht-rpc. -#### **`const { value, from } = await node.immutableGet(hash, [options])`** +#### **`const { value, from } = await node.immutableGet(hash, [options])`** {#node.inmutableget} Fetch an immutable value from the DHT. When successful, it returns the value corresponding to the hash. Any passed options are forwarded to dht-rpc. -#### **`const { publicKey, closestNodes, seq, signature } = await node.mutablePut(keyPair, value, [options])`** +#### **`const { publicKey, closestNodes, seq, signature } = await node.mutablePut(keyPair, value, [options])`** {#node.mutableput} Stores a mutable value in the DHT. Any passed options are forwarded to dht-rpc. -#### **`const { value, from, seq, signature } = await node.mutableGet(publicKey, [options])`** +#### **`const { value, from, seq, signature } = await node.mutableGet(publicKey, [options])`** {#node.mutableget} Fetches a mutable value from the DHT. diff --git a/building-blocks/hyperdrive.md b/building-blocks/hyperdrive.md index de006ec..bef226a 100644 --- a/building-blocks/hyperdrive.md +++ b/building-blocks/hyperdrive.md @@ -14,48 +14,48 @@ Notable features include: * [Create a new instance](hyperdrive.md#installation) * Basic: * Properties: - * [drive.corestore](hyperdrive.md#drivecorestore) - * [drive.db](hyperdrive.md#drivedb) - * [drive.core](hyperdrive.md#drivecore) - * [drive.id](hyperdrive.md#driveid) - * [drive.key](hyperdrive.md#drivekey) - * [drive.writable](hyperdrive.md#drivewritable) - * [drive.readable](hyperdrive.md#drivereadable) - * [drive.discoveryKey](hyperdrive.md#drivediscoverykey) - * [drive.contentKey](hyperdrive.md#drivecontentkey) - * [drive.version](hyperdrive.md#driveversion) - * [drive.supportsMetadata](hyperdrive.md#drivesupportsmetadata) + * [drive.corestore](hyperdrive.md#drive.corestore) + * [drive.db](hyperdrive.md#drive.db) + * [drive.core](hyperdrive.md#drive.core) + * [drive.id](hyperdrive.md#drive.id) + * [drive.key](hyperdrive.md#drive.key) + * [drive.writable](hyperdrive.md#drive.writable) + * [drive.readable](hyperdrive.md#drive.readable) + * [drive.discoveryKey](hyperdrive.md#drive.discoverykey) + * [drive.contentKey](hyperdrive.md#drive.contentkey) + * [drive.version](hyperdrive.md#drive.version) + * [drive.supportsMetadata](hyperdrive.md#drive.supportsmetadata) * Methods: - * [drive.ready()](hyperdrive.md#await-driveready) - * [drive.close()](hyperdrive.md#await-driveclose) - * [drive.put(path, buffer, \[options\])](hyperdrive.md#await-driveputpath-buffer-options) - * [drive.get(path, \[options\])](hyperdrive.md#const-buffer--await-drivegetpath-options) - * [drive.entry(path, \[options\])](hyperdrive.md#const-entry--await-driveentrypath-options) - * [drive.exists(path)](hyperdrive.md#const-exists--await-driveexistspath) - * [drive.del(path)](hyperdrive.md#await-drivedelpath) - * [drive.compare(entryA, entryB)](hyperdrive.md#const-comparison--drivecompareentrya-entryb) - * [drive.clear(path, \[options\])](hyperdrive.md#const-cleared--await-driveclearpath-options) - * [drive.clearAll(\[options\])](hyperdrive.md#const-cleared--await-driveclearalloptions) - * [drive.purge()](hyperdrive.md#await-drivepurge) - * [drive.symlink(path, linkname)](hyperdrive.md#await-drivesymlinkpath-linkname) - * [drive.batch()](hyperdrive.md#const-batch--drivebatch) - * [batch.flush()](hyperdrive.md#await-batchflush) - * [drive.list(folder, \[options\])](hyperdrive.md#const-stream--drivelistfolder-options) - * [drive.readdir(folder)](hyperdrive.md#const-stream--drivereaddirfolder) - * [drive.entries(\[range\], \[options\])](hyperdrive.md#const-stream--await-driveentriesrange-options) - * [drive.mirror(out, \[options\])](hyperdrive.md#const-mirror--drivemirrorout-options) - * [drive.watch(\[folder\])](hyperdrive.md#const-watcher--drivewatchfolder) - * [drive.createReadStream(path, \[options\])](hyperdrive.md#const-rs--drivecreatereadstreampath-options) - * [drive.createWriteStream(path, \[options\])](hyperdrive.md#const-ws--drivecreatewritestreampath-options) - * [drive.download(folder, \[options\])](hyperdrive.md#await-drivedownloadfolder-options) - * [drive.checkout(version)](hyperdrive.md#const-snapshot--drivecheckoutversion) - * [drive.diff(version, folder, \[options\])](hyperdrive.md#await-drivedownloaddiffversion-folder-options) - * [drive.downloadDiff(version, folder, \[options\])](hyperdrive.md#await-drivedownloaddiffversion-folder-options) - * [drive.downloadRange(dbRanges, blobRanges)](hyperdrive.md#await-drivedownloadrangedbranges-blobranges) - * [drive.findingPeers()](hyperdrive.md#const-done--drivefindingpeers) - * [drive.replicate(isInitiatorOrStream)](hyperdrive.md#const-stream--drivereplicateisinitiatororstream) - * [drive.update(\[options\])](hyperdrive.md#const-updated--await-driveupdateoptions) - * [drive.getBlobs()](hyperdrive.md#const-blobs--await-drivegetblobs) + * [drive.ready()](hyperdrive.md#drive.ready) + * [drive.close()](hyperdrive.md#drive.close) + * [drive.put(path, buffer, \[options\])](hyperdrive.md#drive.put) + * [drive.get(path, \[options\])](hyperdrive.md#drive.get) + * [drive.entry(path, \[options\])](hyperdrive.md#drive.entry) + * [drive.exists(path)](hyperdrive.md#drive.exists) + * [drive.del(path)](hyperdrive.md#drive.del) + * [drive.compare(entryA, entryB)](hyperdrive.md#drive.compare) + * [drive.clear(path, \[options\])](hyperdrive.md#drive.clear) + * [drive.clearAll(\[options\])](hyperdrive.md#drive.clearall) + * [drive.purge()](hyperdrive.md#drive.purge) + * [drive.symlink(path, linkname)](hyperdrive.md#drive.symlink) + * [drive.batch()](hyperdrive.md#drive.batch) + * [batch.flush()](hyperdrive.md#drive.batch) + * [drive.list(folder, \[options\])](hyperdrive.md#drive.list) + * [drive.readdir(folder)](hyperdrive.md#drive.readdir) + * [drive.entries(\[range\], \[options\])](hyperdrive.md#drive.entries) + * [drive.mirror(out, \[options\])](hyperdrive.md#drive.mirror) + * [drive.watch(\[folder\])](hyperdrive.md#drive.watch) + * [drive.createReadStream(path, \[options\])](hyperdrive.md#drive.createreadstream) + * [drive.createWriteStream(path, \[options\])](hyperdrive.md#drive.createwritestream) + * [drive.download(folder, \[options\])](hyperdrive.md#drive.download) + * [drive.checkout(version)](hyperdrive.md#drive.checkout) + * [drive.diff(version, folder, \[options\])](hyperdrive.md#drive.diff) + * [drive.downloadDiff(version, folder, \[options\])](hyperdrive.md#drive.dowloaddiff) + * [drive.downloadRange(dbRanges, blobRanges)](hyperdrive.md#drive.downloadrange) + * [drive.findingPeers()](hyperdrive.md#drive.findingpeers) + * [drive.replicate(isInitiatorOrStream)](hyperdrive.md#drive.replicate) + * [drive.update(\[options\])](hyperdrive.md#drive.update) + * [drive.getBlobs()](hyperdrive.md#drive.getblobs) ### Installation @@ -75,53 +75,53 @@ By default, it uses the core at `{ name: 'db' }` from `store`, unless the public #### Properties -#### **`drive.corestore`** +#### **`drive.corestore`** {#drive.corestore} The Corestore instance used as storage. -#### **`drive.db`** +#### **`drive.db`** {#drive.db} The underlying Hyperbee backing the drive file structure. -#### **`drive.core`** +#### **`drive.core`** {#drive.core} The Hypercore used for `drive.db`. -#### **`drive.id`** +#### **`drive.id`** {#drive.id} String containing the id (z-base-32 of the public key) identifying this drive. -#### **`drive.key`** +#### **`drive.key`** {#drive.key} The public key of the Hypercore backing the drive. -#### **`drive.writable`** +#### **`drive.writable`** {#drive.writable} Boolean indicating if we can write or delete data in this drive. -#### **`drive.readable`** +#### **`drive.readable`** {#drive.readable} Boolean indicating if we can read from this drive. After closing the drive this will be `false`. -#### **`drive.discoveryKey`** +#### **`drive.discoveryKey`** {#drive.discoverykey} The hash of the public key of the Hypercore backing the drive. It can be used as a `topic` to seed the drive using Hyperswarm. -#### **`drive.contentKey`** +#### **`drive.contentKey`** {#drive.contentkey} The public key of the [Hyperblobs](https://github.com/holepunchto/hyperblobs) instance holding blobs associated with entries in the drive. -#### **`drive.version`** +#### **`drive.version`** {#drive.version} The number that indicates how many modifications were made, it is useful as a version identifier. -#### **`drive.supportsMetadata`** +#### **`drive.supportsMetadata`** {#drive.supportsmetadata} Boolean indicating if the drive handles or not metadata. Always `true`. #### Methods -#### **`await drive.ready()`** +#### **`await drive.ready()`** {#drive.ready} Waits until the internal state is loaded. @@ -129,15 +129,15 @@ Use it once before reading synchronous properties like `drive.discoveryKey`. If any of the other APIs are called first they will wait for readiness so this is only needed to lookup synchronous properties before any API call. -#### **`await drive.close()`** +#### **`await drive.close()`** {#drive.close} Fully close this drive, including its underlying Hypercore backed data structures. -#### **`await drive.put(path, buffer, [options])`** +#### **`await drive.put(path, buffer, [options])`** {#drive.put} Creates a file at `path` in the drive. `options` are the same as in `createWriteStream`. -#### **`const buffer = await drive.get(path, [options])`** +#### **`const buffer = await drive.get(path, [options])`** {#drive.get} Returns the blob at `path` in the drive. If no blob exists, returns `null`. @@ -153,7 +153,7 @@ It also returns `null` for symbolic links. } ``` -#### **`const entry = await drive.entry(path, [options])`** +#### **`const entry = await drive.entry(path, [options])`** {#drive.entry} Returns the entry at `path` in the drive. It looks like this: @@ -185,21 +185,21 @@ Returns the entry at `path` in the drive. It looks like this: } ``` -#### `const exists = await drive.exists(path)` +#### `const exists = await drive.exists(path)` {#drive.exists} Returns `true` if the entry at `path` does exists, otherwise `false`. -#### **`await drive.del(path)`** +#### **`await drive.del(path)`** {#drive.del} Deletes the file at `path` from the drive. > ℹ️ The underlying blob is not deleted, only the reference in the file structure. -#### **`const comparison = drive.compare(entryA, entryB)`** +#### **`const comparison = drive.compare(entryA, entryB)`** {#drive.compare} Returns `0` if entries are the same, `1` if `entryA` is older, and `-1` if `entryB` is older. -#### **`const cleared = await drive.clear(path, [options])`** +#### **`const cleared = await drive.clear(path, [options])`** {#drive.clear} Deletes the blob from storage to free up space, but the file structure reference is kept. @@ -209,7 +209,7 @@ Deletes the blob from storage to free up space, but the file structure reference | ----------------- | --------------------------------------------------------------------- | ------- | ------- | | **`diff`** | Returned `cleared` bytes object is null unless enabled | Boolean | `false` | -#### `const cleared = await drive.clearAll([options])` +#### `const cleared = await drive.clearAll([options])` {#drive.clearall} Deletes all the blobs from storage to free up space, similar to how `drive.clear()` works. @@ -219,25 +219,25 @@ Deletes all the blobs from storage to free up space, similar to how `drive.clear | ----------------- | --------------------------------------------------------------------- | ------- | ------- | | **`diff`** | Returned `cleared` bytes object is null unless enabled | Boolean | `false` | -#### `await drive.purge()` +#### `await drive.purge()` {#drive.purge} Purges both cores (db and blobs) from storage, completely removing all the drive's data. -#### **`await drive.symlink(path, linkname)`** +#### **`await drive.symlink(path, linkname)`** {#drive.symlink} Creates an entry in drive at `path` that points to the entry at `linkname`. If a blob entry currently exists at `path` then it will get overwritten and `drive.get(key)` will return `null`, while `drive.entry(key)` will return the entry with symlink information. -#### **`const batch = drive.batch()`** +#### **`const batch = drive.batch()`** {#drive.batch} Useful for atomically mutating the drive, has the same interface as Hyperdrive. -#### **`await batch.flush()`** +#### **`await batch.flush()`** {#drive.flush} Commit a batch of mutations to the underlying drive. -#### **`const stream = drive.list(folder, [options])`** +#### **`const stream = drive.list(folder, [options])`** {#drive.list} Returns a stream of all entries in the drive at paths prefixed with `folder`. @@ -247,23 +247,23 @@ Returns a stream of all entries in the drive at paths prefixed with `folder`. | --------------- | --------------------------------------------- | ------- | ------- | | **`recursive`** | whether to descend into all subfolders or not | Boolean | `true` | -#### **`const stream = drive.readdir(folder)`** +#### **`const stream = drive.readdir(folder)`** {#drive.readdir} Returns a stream of all subpaths of entries in the drive stored at paths prefixed by `folder`. -#### **`const stream = await drive.entries([range], [options])`** +#### **`const stream = await drive.entries([range], [options])`** {#drive.entries} Returns a read stream of entries in the drive. `options` are the same as `Hyperbee().createReadStream([range], [options])`. -#### **`const mirror = drive.mirror(out, [options])`** +#### **`const mirror = drive.mirror(out, [options])`** {#drive.mirror} Mirrors this drive into another. Returns a [`MirrorDrive`](../helpers/mirrordrive.md) instance constructed with `options`. Call `await mirror.done()` to wait for the mirroring to finish. -#### **`const watcher = drive.watch([folder])`** +#### **`const watcher = drive.watch([folder])`** {#drive.watch} Returns an iterator that listens on `folder` to yield changes, by default on `/`. @@ -291,7 +291,7 @@ Waits until the watcher is loaded and detecting changes. Stops the watcher. I can also be stopped by using `break` in the `for await` loop. -#### **`const rs = drive.createReadStream(path, [options])`** +#### **`const rs = drive.createReadStream(path, [options])`** {#drive.createreadstream} Returns a stream to read out the blob stored in the drive at `path`. @@ -307,7 +307,7 @@ Returns a stream to read out the blob stored in the drive at `path`. } ``` -#### **`const ws = drive.createWriteStream(path, [options])`** +#### **`const ws = drive.createWriteStream(path, [options])`** {#drive.createwritestream} Stream a blob into the drive at `path`. @@ -318,17 +318,17 @@ Stream a blob into the drive at `path`. | **`executable`** | whether the blob is executable or not | Boolean | `true` | | **`metadata`** | Extended file information i.e., arbitrary JSON value | Object | `null` | -#### **`await drive.download(folder, [options])`** +#### **`await drive.download(folder, [options])`** {#drive.download} Downloads the blobs corresponding to all entries in the drive at paths prefixed with `folder`. `options` are the same as those for `drive.list(folder, [options])`. -#### **`const snapshot = drive.checkout(version)`** +#### **`const snapshot = drive.checkout(version)`** {#drive.checkout} Gets a read-only snapshot of a previous version. -#### **`const stream = drive.diff(version, folder, [options])`** +#### **`const stream = drive.diff(version, folder, [options])`** {#drive.diff} Creates a stream of shallow changes to `folder` between `version` and `drive.version`. @@ -343,23 +343,23 @@ Each entry is sorted by key and looks like this: > ℹ️ If an entry exists in `drive.version` of the `folder` but not in `version`, then `left` is set and `right` will be `null`, and vice versa. -#### **`await drive.downloadDiff(version, folder, [options])`** +#### **`await drive.downloadDiff(version, folder, [options])`** {#drive.downloaddiff} Downloads all the blobs in `folder` corresponding to entries in `drive.checkout(version)` that are not in `drive.version`. In other words, downloads all the blobs added to `folder` up to `version` of the drive. -#### **`await drive.downloadRange(dbRanges, blobRanges)`** +#### **`await drive.downloadRange(dbRanges, blobRanges)`** {#drive.downloadrange} Downloads the entries and blobs stored in the [ranges](https://github.com/holepunchto/hypercore#const-range--coredownloadrange) `dbRanges` and `blobRanges`. -#### **`const done = drive.findingPeers()`** +#### **`const done = drive.findingPeers()`** {#drive.findingpeers} Indicates to Hyperdrive that users are finding peers in the background, requests will be on hold until this is done. Call `done()` when the current discovery iteration is done, i.e., after `swarm.flush()` finishes. -#### **`const stream = drive.replicate(isInitiatorOrStream)`** +#### **`const stream = drive.replicate(isInitiatorOrStream)`** {#drive.replicate} Usage example: @@ -373,7 +373,7 @@ swarm.flush().then(done, done) Learn more about how replicate works at [corestore.replicate](https://github.com/holepunchto/corestore#const-stream--storereplicateoptsorstream). -#### **`const updated = await drive.update([options])`** +#### **`const updated = await drive.update([options])`** {#drive.update} Waits for initial proof of the new drive version until all `findingPeers` are done. @@ -387,7 +387,7 @@ Waits for initial proof of the new drive version until all `findingPeers` are do Use `drive.findingPeers()` or `{ wait: true }` to make await `drive.update()` blocking. -#### **`const blobs = await drive.getBlobs()`** +#### **`const blobs = await drive.getBlobs()`** {#drive.getblobs} Returns the [Hyperblobs](https://github.com/holepunchto/hyperblobs) instance storing the blobs indexed by drive entries. diff --git a/building-blocks/hyperswarm.md b/building-blocks/hyperswarm.md index 4973abe..350da78 100644 --- a/building-blocks/hyperswarm.md +++ b/building-blocks/hyperswarm.md @@ -16,35 +16,35 @@ Notable features include: * [Create a new instance](hyperswarm.md#installation) * Basic: * Properties: - * [swarm.connecting](hyperswarm.md#swarmconnecting) - * [swarm.connections](hyperswarm.md#swarmconnections) - * [swarm.peers](hyperswarm.md#swarmpeers) - * [swarm.dht](hyperswarm.md#swarmdht) + * [swarm.connecting](hyperswarm.md#swarm.connecting) + * [swarm.connections](hyperswarm.md#swarm.connections) + * [swarm.peers](hyperswarm.md#swarm.peers) + * [swarm.dht](hyperswarm.md#swarm.dht) * Methods: * [swarm.join(topic, [options])](hyperswarm.md#const-discovery--swarmjointopic-options) * Events: - * [connection](hyperswarm.md#swarmonconnection-socket-peerinfo) - * [update](hyperswarm.md#swarmonupdate) + * [connection](hyperswarm.md#swarm.onconnection) + * [update](hyperswarm.md#swarm.onupdate) * [Clients and Servers:](hyperswarm.md#clients-and-servers) * Methods: - * [swarm.leave(topic)](hyperswarm.md#await-swarmleavetopic) - * [swarm.joinPeer(noisePublicKey)](hyperswarm.md#swarmjoinpeernoisepublickey) - * [swarm.leavePeer(noisePublicKey)](hyperswarm.md#swarmleavepeernoisepublickey) - * [swarm.status(topic)](hyperswarm.md#const-discovery--swarmstatustopic) - * [swarm.listen()](hyperswarm.md#await-swarmlisten) - * [swarm.flush()](hyperswarm.md#await-swarmflush) + * [swarm.leave(topic)](hyperswarm.md#swarm.leave) + * [swarm.joinPeer(noisePublicKey)](hyperswarm.md#swarm.joinpeer) + * [swarm.leavePeer(noisePublicKey)](hyperswarm.md#swarm.leavepeer) + * [swarm.status(topic)](hyperswarm.md#swarm.status) + * [swarm.listen()](hyperswarm.md#swarm.listen) + * [swarm.flush()](hyperswarm.md#swarm.flush) * [Peer info:](hyperswarm.md#peerinfo) * Properties: - * [peerInfo.publicKey](hyperswarm.md#peerinfopublickey) - * [peerInfo.topics](hyperswarm.md#peerinfotopics) - * [peerInfo.prioritized](hyperswarm.md#peerinfoprioritized) + * [peerInfo.publicKey](hyperswarm.md#peerinfo.publickey) + * [peerInfo.topics](hyperswarm.md#peerinfo.topics) + * [peerInfo.prioritized](hyperswarm.md#peerinfo.prioritized) * Methods: - * [peerInfo.ban(banStatus = false)](hyperswarm.md#peerinfobanbanstatus--false) + * [peerInfo.ban(banStatus = false)](hyperswarm.md#peerinfo.ban) * [Peer Discovery:](hyperswarm.md#peer-discovery) * Methods: - * [discovery.flushed()](hyperswarm.md#await-discoveryflushed) - * [discovery.refresh({ client, server })](hyperswarm.md#await-discoveryrefresh-client-server) - * [discovery.destroy()](hyperswarm.md#await-discoverydestroy) + * [discovery.flushed()](hyperswarm.md#discovery.flushed) + * [discovery.refresh({ client, server })](hyperswarm.md#discovery.destroy) + * [discovery.destroy()](hyperswarm.md#discovery.destroy) ### Installation @@ -72,27 +72,27 @@ The following table describes the properties of the optional `options` object. #### **Properties:** -#### **`swarm.connecting`** +#### **`swarm.connecting`** {#swarm.connecting} A number that indicates connections in progress. -#### **`swarm.connections`** +#### **`swarm.connections`** {#swarm.connections} A set of all active client/server connections. -#### **`swarm.peers`** +#### **`swarm.peers`** {#swarm.peers} A Map containing all connected peers, of the form: `(Noise public key hex string) -> PeerInfo object` See the [`PeerInfo`](hyperswarm.md#peerinfo) API for more details. -#### **`swarm.dht`** +#### **`swarm.dht`** {#swarm.dht} A [`HyperDHT`](./hyperdht.md) instance. Useful for lower-level control over Hyperswarm's networking. #### Methods -#### **`const discovery = swarm.join(topic, [options])`** +#### **`const discovery = swarm.join(topic, [options])`** {#swarm.join} Returns a [`PeerDiscovery`](hyperswarm.md#peer-discovery) object. @@ -111,7 +111,7 @@ Start discovering and connecting to peers sharing a common topic. As new peers a #### Events -#### **`swarm.on('connection', (socket, peerInfo) => {})`** +#### **`swarm.on('connection', (socket, peerInfo) => {})`** {#swarm.onconnection} Emitted whenever the swarm connects to a new peer. @@ -119,13 +119,13 @@ Emitted whenever the swarm connects to a new peer. `peerInfo` is a [`PeerInfo`](hyperswarm.md#peerinfo) instance. -#### `swarm.on('update', () => {})` +#### `swarm.on('update', () => {})` {#swarm.onupdate} Emitted when internal values are changed, useful for user interfaces. > For instance, the 'update' event is emitted when `swarm.connecting` or `swarm.connections` changes. -### **Clients and Servers** +### **Clients and Servers** {#clients-and-servers} In Hyperswarm, there are two ways for peers to join the swarm: client mode and server mode. Previously in Hyperswarm v2, these were called 'lookup' and 'announce', but we now think 'client' and 'server' are more descriptive. @@ -135,7 +135,7 @@ When user joins a topic as a client, the swarm will do a query to discover avail #### Methods -#### **`await swarm.leave(topic)`** +#### **`await swarm.leave(topic)`** {#swarm.leave} Stop discovering peers for the given topic. @@ -147,7 +147,7 @@ Stop discovering peers for the given topic. `leave` will **not** close any existing connections. -#### **`swarm.joinPeer(noisePublicKey)`** +#### **`swarm.joinPeer(noisePublicKey)`** {#swarm.joinpeer} Establish a direct connection to a known peer. @@ -155,7 +155,7 @@ Establish a direct connection to a known peer. As with the standard `join` method, `joinPeer` will ensure that peer connections are reestablished in the event of failures. -#### **`swarm.leavePeer(noisePublicKey)`** +#### **`swarm.leavePeer(noisePublicKey)`** {#swarm.leavepeer} Stops attempting direct connections to a known peer. @@ -163,15 +163,15 @@ Stops attempting direct connections to a known peer. > If a direct connection is already established, that connection will **not** be destroyed by `leavePeer`. -#### **`const discovery = swarm.status(topic)`** +#### **`const discovery = swarm.status(topic)`** {#swarm.status} Gets the `PeerDiscovery` object associated with the topic, if it exists. -#### **`await swarm.listen()`** +#### **`await swarm.listen()`** {#swarm.listen} Explicitly starts listening for incoming connections. This will be called internally after the first `join`, so it rarely needs to be called manually. -#### **`await swarm.flush()`** +#### **`await swarm.flush()`** {#swarm.flush} Waits for any pending DHT announcements, and for the swarm to connect to any pending peers (peers that have been discovered, but are still in the queue awaiting processing). @@ -179,7 +179,7 @@ Once a `flush()` has completed, the swarm will have connected to every peer it c > `flush()` is not topic-specific, so it will wait for every pending DHT operation and connection to be processed -- it's quite heavyweight, so it could take a while. In most cases, it's not necessary, as connections are emitted by `swarm.on('connection')` immediately after they're opened. -### PeerInfo +### PeerInfo {#peerinfo} `swarm.on('connection', ...)` emits a `PeerInfo` instance whenever a new connection is established. @@ -187,21 +187,21 @@ There is a one-to-one relationship between connections and `PeerInfo` objects -- #### **Properties:** -#### **`peerInfo.publicKey`** +#### **`peerInfo.publicKey`** {#peerinfo.publickey} The peer's Noise public key. -#### **`peerInfo.topics`** +#### **`peerInfo.topics`** {#peerinfo.topics} An Array of topics that this Peer is associated with -- `topics` will only be updated when the Peer is in client mode. -#### **`peerInfo.prioritized`** +#### **`peerInfo.prioritized`** {#peerinfo.prioritized} If true, the swarm will rapidly attempt to reconnect to this peer. #### **Methods:** -#### **`peerInfo.ban(banStatus = false)`** +#### **`peerInfo.ban(banStatus = false)`** {#peerinfo.ban} Ban or unban the peer. Banning will prevent any future reconnection attempts, but it will **not** close any existing connections. @@ -211,15 +211,15 @@ Ban or unban the peer. Banning will prevent any future reconnection attempts, bu #### Methods -#### **`await discovery.flushed()`** +#### **`await discovery.flushed()`** {#discovery.flushed} Waits until the topic has been fully announced to the DHT. This method is only relevant in server mode. When `flushed()` has completed, the server will be available to the network. -#### **`await discovery.refresh({ client, server })`** +#### **`await discovery.refresh({ client, server })`** {#discovery.refresh} Updates the `PeerDiscovery` configuration, optionally toggling client and server modes. This will also trigger an immediate re-announce of the topic when the `PeerDiscovery` is in server mode. -#### **`await discovery.destroy()`** +#### **`await discovery.destroy()`** {#discovery.destroy} Stops discovering peers for the given topic. diff --git a/lib/docs-viewer.js b/lib/docs-viewer.js index 78cec9e..0265e26 100644 --- a/lib/docs-viewer.js +++ b/lib/docs-viewer.js @@ -8,6 +8,26 @@ */ !(function (e, t) { typeof exports === 'object' && typeof module !== 'undefined' ? t(exports) : typeof define === 'function' && define.amd ? define(['exports'], t) : t((e = typeof globalThis !== 'undefined' ? globalThis : e || self).marked = {}) }(this, function (r) { 'use strict'; function i (e, t) { for (let u = 0; u < t.length; u++) { const n = t[u]; n.enumerable = n.enumerable || !1, n.configurable = !0, 'value' in n && (n.writable = !0), Object.defineProperty(e, (function (e) { e = (function (e, t) { if (typeof e !== 'object' || e === null) return e; let u = e[Symbol.toPrimitive]; if (void 0 === u) return (t === 'string' ? String : Number)(e); u = u.call(e, t || 'default'); if (typeof u !== 'object') return u; throw new TypeError('@@toPrimitive must return a primitive value.') }(e, 'string')); return typeof e === 'symbol' ? e : String(e) }(n.key)), n) } } function g () { return (g = Object.assign ? Object.assign.bind() : function (e) { for (let t = 1; t < arguments.length; t++) { var u; const n = arguments[t]; for (u in n)Object.prototype.hasOwnProperty.call(n, u) && (e[u] = n[u]) } return e }).apply(this, arguments) } function s (e, t) { (t == null || t > e.length) && (t = e.length); for (var u = 0, n = new Array(t); u < t; u++)n[u] = e[u]; return n } function c (e, t) { let u; let n = typeof Symbol !== 'undefined' && e[Symbol.iterator] || e['@@iterator']; if (n) return (n = n.call(e)).next.bind(n); if (Array.isArray(e) || (n = (function (e, t) { let u; if (e) return typeof e === 'string' ? s(e, t) : (u = (u = Object.prototype.toString.call(e).slice(8, -1)) === 'Object' && e.constructor ? e.constructor.name : u) === 'Map' || u === 'Set' ? Array.from(e) : u === 'Arguments' || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(u) ? s(e, t) : void 0 }(e))) || t && e && typeof e.length === 'number') return n && (e = n), u = 0, function () { return u >= e.length ? { done: !0 } : { done: !1, value: e[u++] } }; throw new TypeError('Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.') } let t = 0; function e (e) { return '__private_' + t++ + '_' + e } function F (e, t) { if (Object.prototype.hasOwnProperty.call(e, t)) return e; throw new TypeError('attempted to use private field on non-instance') } function u () { return { async: !1, baseUrl: null, breaks: !1, extensions: null, gfm: !0, headerIds: !0, headerPrefix: '', highlight: null, hooks: null, langPrefix: 'language-', mangle: !0, pedantic: !1, renderer: null, sanitize: !1, sanitizer: null, silent: !1, smartypants: !1, tokenizer: null, walkTokens: null, xhtml: !1 } } function n (e) { r.defaults = e }r.defaults = u(); function a (e) { return j[e] } const o = /[&<>"']/; const P = new RegExp(o.source, 'g'); const l = /[<>"']|&(?!(#\d{1,7}|#[Xx][a-fA-F0-9]{1,6}|\w+);)/; const O = new RegExp(l.source, 'g'); var j = { '&': '&', '<': '<', '>': '>', '"': '"', "'": ''' }; function D (e, t) { if (t) { if (o.test(e)) return e.replace(P, a) } else if (l.test(e)) return e.replace(O, a); return e } const Z = /&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/gi; function x (e) { return e.replace(Z, function (e, t) { return (t = t.toLowerCase()) === 'colon' ? ':' : t.charAt(0) === '#' ? t.charAt(1) === 'x' ? String.fromCharCode(parseInt(t.substring(2), 16)) : String.fromCharCode(+t.substring(1)) : '' }) } const q = /(^|[^\[])\^/g; function p (u, e) { u = typeof u === 'string' ? u : u.source, e = e || ''; var n = { replace: function (e, t) { return t = (t = t.source || t).replace(q, '$1'), u = u.replace(e, t), n }, getRegex: function () { return new RegExp(u, e) } }; return n } const L = /[^\w:]/g; const U = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i; function h (e, t, u) { if (e) { try { n = decodeURIComponent(x(u)).replace(L, '').toLowerCase() } catch (e) { return null } if (n.indexOf('javascript:') === 0 || n.indexOf('vbscript:') === 0 || n.indexOf('data:') === 0) return null } let n; t && !U.test(u) && (e = u, f[' ' + (n = t)] || (Q.test(n) ? f[' ' + n] = n + '/' : f[' ' + n] = A(n, '/', !0)), t = (n = f[' ' + n]).indexOf(':') === -1, u = e.substring(0, 2) === '//' ? t ? e : n.replace(M, '$1') + e : e.charAt(0) === '/' ? t ? e : n.replace(H, '$1') + e : n + e); try { u = encodeURI(u).replace(/%25/g, '%') } catch (e) { return null } return u } var f = {}; var Q = /^[^:]+:\/*[^/]*$/; var M = /^([^:]+:)[\s\S]*$/; var H = /^([^:]+:\/*[^/]*)[\s\S]*$/; var d = { exec: function () {} }; function k (e, t) { const u = e.replace(/\|/g, function (e, t, u) { for (var n = !1, r = t; --r >= 0 && u[r] === '\\';)n = !n; return n ? '|' : ' |' }).split(/ \|/); let n = 0; if (u[0].trim() || u.shift(), u.length > 0 && !u[u.length - 1].trim() && u.pop(), u.length > t)u.splice(t); else for (;u.length < t;)u.push(''); for (;n < u.length; n++)u[n] = u[n].trim().replace(/\\\|/g, '|'); return u } function A (e, t, u) { const n = e.length; if (n === 0) return ''; for (var r = 0; r < n;) { const i = e.charAt(n - r - 1); if ((i !== t || u) && (i === t || !u)) break; r++ } return e.slice(0, n - r) } function C (e, t, u, n) { const r = t.href; var t = t.title ? D(t.title) : null; const i = e[1].replace(/\\([\[\]])/g, '$1'); return e[0].charAt(0) !== '!' ? (n.state.inLink = !0, e = { type: 'link', raw: u, href: r, title: t, text: i, tokens: n.inlineTokens(i) }, n.state.inLink = !1, e) : { type: 'image', raw: u, href: r, title: t, text: D(i) } } const E = (function () { function e (e) { this.options = e || r.defaults } const t = e.prototype; return t.space = function (e) { e = this.rules.block.newline.exec(e); if (e && e[0].length > 0) return { type: 'space', raw: e[0] } }, t.code = function (e) { let t; var e = this.rules.block.code.exec(e); if (e) return t = e[0].replace(/^ {1,4}/gm, ''), { type: 'code', raw: e[0], codeBlockStyle: 'indented', text: this.options.pedantic ? t : A(t, '\n') } }, t.fences = function (e) { let t; let u; let n; let r; var e = this.rules.block.fences.exec(e); if (e) return t = e[0], u = t, n = e[3] || '', u = (u = t.match(/^(\s+)(?:```)/)) === null ? n : (r = u[1], n.split('\n').map(function (e) { const t = e.match(/^\s+/); return t !== null && t[0].length >= r.length ? e.slice(r.length) : e }).join('\n')), { type: 'code', raw: t, lang: e[2] && e[2].trim().replace(this.rules.inline._escapes, '$1'), text: u } }, t.heading = function (e) { let t; let u; var e = this.rules.block.heading.exec(e); if (e) return t = e[2].trim(), /#$/.test(t) && (u = A(t, '#'), !this.options.pedantic && u && !/ $/.test(u) || (t = u.trim())), { type: 'heading', raw: e[0], depth: e[1].length, text: t, tokens: this.lexer.inline(t) } }, t.hr = function (e) { e = this.rules.block.hr.exec(e); if (e) return { type: 'hr', raw: e[0] } }, t.blockquote = function (e) { let t; let u; let n; var e = this.rules.block.blockquote.exec(e); if (e) return t = e[0].replace(/^ *>[ \t]?/gm, ''), u = this.lexer.state.top, this.lexer.state.top = !0, n = this.lexer.blockTokens(t), this.lexer.state.top = u, { type: 'blockquote', raw: e[0], tokens: n, text: t } }, t.list = function (e) { let t = this.rules.block.list.exec(e); if (t) { let u; let n; let r; let i; let s; let a; let o; let l; let D; let c; let p; const h = (g = t[1].trim()).length > 1; const f = { type: 'list', raw: '', ordered: h, start: h ? +g.slice(0, -1) : '', loose: !1, items: [] }; var g = h ? '\\d{1,9}\\' + g.slice(-1) : '\\' + g; this.options.pedantic && (g = h ? g : '[*+-]'); for (let F = new RegExp('^( {0,3}' + g + ')((?:[\t ][^\\n]*)?(?:\\n|$))'); e && (p = !1, t = F.exec(e)) && !this.rules.block.hr.test(e);) { if (u = t[0], e = e.substring(u.length), o = t[2].split('\n', 1)[0].replace(/^\t+/, function (e) { return ' '.repeat(3 * e.length) }), l = e.split('\n', 1)[0], this.options.pedantic ? (i = 2, c = o.trimLeft()) : (i = t[2].search(/[^ ]/), c = o.slice(i = i > 4 ? 1 : i), i += t[1].length), s = !1, !o && /^ *$/.test(l) && (u += l + '\n', e = e.substring(l.length + 1), p = !0), !p) for (let d = new RegExp('^ {0,' + Math.min(3, i - 1) + '}(?:[*+-]|\\d{1,9}[.)])((?:[ \t][^\\n]*)?(?:\\n|$))'), k = new RegExp('^ {0,' + Math.min(3, i - 1) + '}((?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$)'), A = new RegExp('^ {0,' + Math.min(3, i - 1) + '}(?:```|~~~)'), C = new RegExp('^ {0,' + Math.min(3, i - 1) + '}#'); e && (l = D = e.split('\n', 1)[0], this.options.pedantic && (l = l.replace(/^ {1,4}(?=( {4})*[^ ])/g, ' ')), !A.test(l)) && !C.test(l) && !d.test(l) && !k.test(e);) { if (l.search(/[^ ]/) >= i || !l.trim())c += '\n' + l.slice(i); else { if (s) break; if (o.search(/[^ ]/) >= 4) break; if (A.test(o)) break; if (C.test(o)) break; if (k.test(o)) break; c += '\n' + l }s || l.trim() || (s = !0), u += D + '\n', e = e.substring(D.length + 1), o = l.slice(i) }f.loose || (a ? f.loose = !0 : /\n *\n *$/.test(u) && (a = !0)), this.options.gfm && (n = /^\[[ xX]\] /.exec(c)) && (r = n[0] !== '[ ] ', c = c.replace(/^\[[ xX]\] +/, '')), f.items.push({ type: 'list_item', raw: u, task: !!n, checked: r, loose: !1, text: c }), f.raw += u }f.items[f.items.length - 1].raw = u.trimRight(), f.items[f.items.length - 1].text = c.trimRight(), f.raw = f.raw.trimRight(); for (var E, x = f.items.length, m = 0; m < x; m++) this.lexer.state.top = !1, f.items[m].tokens = this.lexer.blockTokens(f.items[m].text, []), f.loose || (E = (E = f.items[m].tokens.filter(function (e) { return e.type === 'space' })).length > 0 && E.some(function (e) { return /\n.*\n/.test(e.raw) }), f.loose = E); if (f.loose) for (m = 0; m < x; m++)f.items[m].loose = !0; return f } }, t.html = function (e) { let t; var e = this.rules.block.html.exec(e); if (e) return t = { type: 'html', block: !0, raw: e[0], pre: !this.options.sanitizer && (e[1] === 'pre' || e[1] === 'script' || e[1] === 'style'), text: e[0] }, this.options.sanitize && (e = this.options.sanitizer ? this.options.sanitizer(e[0]) : D(e[0]), t.type = 'paragraph', t.text = e, t.tokens = this.lexer.inline(e)), t }, t.def = function (e) { let t; let u; let n; var e = this.rules.block.def.exec(e); if (e) return t = e[1].toLowerCase().replace(/\s+/g, ' '), u = e[2] ? e[2].replace(/^<(.*)>$/, '$1').replace(this.rules.inline._escapes, '$1') : '', n = e[3] && e[3].substring(1, e[3].length - 1).replace(this.rules.inline._escapes, '$1'), { type: 'def', tag: t, raw: e[0], href: u, title: n } }, t.table = function (e) { e = this.rules.block.table.exec(e); if (e) { const t = { type: 'table', header: k(e[1]).map(function (e) { return { text: e } }), align: e[2].replace(/^ *|\| *$/g, '').split(/ *\| */), rows: e[3] && e[3].trim() ? e[3].replace(/\n[ \t]*$/, '').split('\n') : [] }; if (t.header.length === t.align.length) { t.raw = e[0]; for (var u, n, r, i = t.align.length, s = 0; s < i; s++)/^ *-+: *$/.test(t.align[s]) ? t.align[s] = 'right' : /^ *:-+: *$/.test(t.align[s]) ? t.align[s] = 'center' : /^ *:-+ *$/.test(t.align[s]) ? t.align[s] = 'left' : t.align[s] = null; for (i = t.rows.length, s = 0; s < i; s++)t.rows[s] = k(t.rows[s], t.header.length).map(function (e) { return { text: e } }); for (i = t.header.length, u = 0; u < i; u++)t.header[u].tokens = this.lexer.inline(t.header[u].text); for (i = t.rows.length, u = 0; u < i; u++) for (r = t.rows[u], n = 0; n < r.length; n++)r[n].tokens = this.lexer.inline(r[n].text); return t } } }, t.lheading = function (e) { e = this.rules.block.lheading.exec(e); if (e) return { type: 'heading', raw: e[0], depth: e[2].charAt(0) === '=' ? 1 : 2, text: e[1], tokens: this.lexer.inline(e[1]) } }, t.paragraph = function (e) { let t; var e = this.rules.block.paragraph.exec(e); if (e) return t = e[1].charAt(e[1].length - 1) === '\n' ? e[1].slice(0, -1) : e[1], { type: 'paragraph', raw: e[0], text: t, tokens: this.lexer.inline(t) } }, t.text = function (e) { e = this.rules.block.text.exec(e); if (e) return { type: 'text', raw: e[0], text: e[0], tokens: this.lexer.inline(e[0]) } }, t.escape = function (e) { e = this.rules.inline.escape.exec(e); if (e) return { type: 'escape', raw: e[0], text: D(e[1]) } }, t.tag = function (e) { e = this.rules.inline.tag.exec(e); if (e) return !this.lexer.state.inLink && /^/i.test(e[0]) && (this.lexer.state.inLink = !1), !this.lexer.state.inRawBlock && /^<(pre|code|kbd|script)(\s|>)/i.test(e[0]) ? this.lexer.state.inRawBlock = !0 : this.lexer.state.inRawBlock && /^<\/(pre|code|kbd|script)(\s|>)/i.test(e[0]) && (this.lexer.state.inRawBlock = !1), { type: this.options.sanitize ? 'text' : 'html', raw: e[0], inLink: this.lexer.state.inLink, inRawBlock: this.lexer.state.inRawBlock, block: !1, text: this.options.sanitize ? this.options.sanitizer ? this.options.sanitizer(e[0]) : D(e[0]) : e[0] } }, t.link = function (e) { e = this.rules.inline.link.exec(e); if (e) { const t = e[2].trim(); if (!this.options.pedantic && /^$/.test(t)) return; var u = A(t.slice(0, -1), '\\'); if ((t.length - u.length) % 2 == 0) return } else { u = (function (e, t) { if (e.indexOf(t[1]) !== -1) for (let u = e.length, n = 0, r = 0; r < u; r++) if (e[r] === '\\')r++; else if (e[r] === t[0])n++; else if (e[r] === t[1] && --n < 0) return r; return -1 }(e[2], '()')); u > -1 && (r = (e[0].indexOf('!') === 0 ? 5 : 4) + e[1].length + u, e[2] = e[2].substring(0, u), e[0] = e[0].substring(0, r).trim(), e[3] = '') } let n; var u = e[2]; var r = ''; return this.options.pedantic ? (n = /^([^'"]*[^\s])\s+(['"])(.*)\2/.exec(u)) && (u = n[1], r = n[3]) : r = e[3] ? e[3].slice(1, -1) : '', u = u.trim(), C(e, { href: (u = /^$/.test(t) ? u.slice(1) : u.slice(1, -1) : u) && u.replace(this.rules.inline._escapes, '$1'), title: r && r.replace(this.rules.inline._escapes, '$1') }, e[0], this.lexer) } }, t.reflink = function (e, t) { let u; if (u = (u = this.rules.inline.reflink.exec(e)) || this.rules.inline.nolink.exec(e)) return (e = t[(e = (u[2] || u[1]).replace(/\s+/g, ' ')).toLowerCase()]) ? C(u, e, u[0], this.lexer) : { type: 'text', raw: t = u[0].charAt(0), text: t } }, t.emStrong = function (e, t, u) { void 0 === u && (u = ''); let n = this.rules.inline.emStrong.lDelim.exec(e); if (n && ((!n[3] || !u.match(/(?:[0-9A-Za-z\xAA\xB2\xB3\xB5\xB9\xBA\xBC-\xBE\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377\u037A-\u037D\u037F\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u048A-\u052F\u0531-\u0556\u0559\u0560-\u0588\u05D0-\u05EA\u05EF-\u05F2\u0620-\u064A\u0660-\u0669\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07C0-\u07EA\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u0860-\u086A\u0870-\u0887\u0889-\u088E\u08A0-\u08C9\u0904-\u0939\u093D\u0950\u0958-\u0961\u0966-\u096F\u0971-\u0980\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09E6-\u09F1\u09F4-\u09F9\u09FC\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A59-\u0A5C\u0A5E\u0A66-\u0A6F\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0AE6-\u0AEF\u0AF9\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C\u0B5D\u0B5F-\u0B61\u0B66-\u0B6F\u0B71-\u0B77\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0\u0BE6-\u0BF2\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C39\u0C3D\u0C58-\u0C5A\u0C5D\u0C60\u0C61\u0C66-\u0C6F\u0C78-\u0C7E\u0C80\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBD\u0CDD\u0CDE\u0CE0\u0CE1\u0CE6-\u0CEF\u0CF1\u0CF2\u0D04-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D\u0D4E\u0D54-\u0D56\u0D58-\u0D61\u0D66-\u0D78\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0DE6-\u0DEF\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E50-\u0E59\u0E81\u0E82\u0E84\u0E86-\u0E8A\u0E8C-\u0EA3\u0EA5\u0EA7-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6\u0ED0-\u0ED9\u0EDC-\u0EDF\u0F00\u0F20-\u0F33\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A\u103F-\u1049\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081\u108E\u1090-\u1099\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u1369-\u137C\u1380-\u138F\u13A0-\u13F5\u13F8-\u13FD\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F8\u1700-\u1711\u171F-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7\u17DC\u17E0-\u17E9\u17F0-\u17F9\u1810-\u1819\u1820-\u1878\u1880-\u1884\u1887-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191E\u1946-\u196D\u1970-\u1974\u1980-\u19AB\u19B0-\u19C9\u19D0-\u19DA\u1A00-\u1A16\u1A20-\u1A54\u1A80-\u1A89\u1A90-\u1A99\u1AA7\u1B05-\u1B33\u1B45-\u1B4C\u1B50-\u1B59\u1B83-\u1BA0\u1BAE-\u1BE5\u1C00-\u1C23\u1C40-\u1C49\u1C4D-\u1C7D\u1C80-\u1C88\u1C90-\u1CBA\u1CBD-\u1CBF\u1CE9-\u1CEC\u1CEE-\u1CF3\u1CF5\u1CF6\u1CFA\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2070\u2071\u2074-\u2079\u207F-\u2089\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2150-\u2189\u2460-\u249B\u24EA-\u24FF\u2776-\u2793\u2C00-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3\u2CFD\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303C\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312F\u3131-\u318E\u3192-\u3195\u31A0-\u31BF\u31F0-\u31FF\u3220-\u3229\u3248-\u324F\u3251-\u325F\u3280-\u3289\u32B1-\u32BF\u3400-\u4DBF\u4E00-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA62B\uA640-\uA66E\uA67F-\uA69D\uA6A0-\uA6EF\uA717-\uA71F\uA722-\uA788\uA78B-\uA7CA\uA7D0\uA7D1\uA7D3\uA7D5-\uA7D9\uA7F2-\uA801\uA803-\uA805\uA807-\uA80A\uA80C-\uA822\uA830-\uA835\uA840-\uA873\uA882-\uA8B3\uA8D0-\uA8D9\uA8F2-\uA8F7\uA8FB\uA8FD\uA8FE\uA900-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF-\uA9D9\uA9E0-\uA9E4\uA9E6-\uA9FE\uAA00-\uAA28\uAA40-\uAA42\uAA44-\uAA4B\uAA50-\uAA59\uAA60-\uAA76\uAA7A\uAA7E-\uAAAF\uAAB1\uAAB5\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uAB30-\uAB5A\uAB5C-\uAB69\uAB70-\uABE2\uABF0-\uABF9\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC\uFF10-\uFF19\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]|\uD800[\uDC00-\uDC0B\uDC0D-\uDC26\uDC28-\uDC3A\uDC3C\uDC3D\uDC3F-\uDC4D\uDC50-\uDC5D\uDC80-\uDCFA\uDD07-\uDD33\uDD40-\uDD78\uDD8A\uDD8B\uDE80-\uDE9C\uDEA0-\uDED0\uDEE1-\uDEFB\uDF00-\uDF23\uDF2D-\uDF4A\uDF50-\uDF75\uDF80-\uDF9D\uDFA0-\uDFC3\uDFC8-\uDFCF\uDFD1-\uDFD5]|\uD801[\uDC00-\uDC9D\uDCA0-\uDCA9\uDCB0-\uDCD3\uDCD8-\uDCFB\uDD00-\uDD27\uDD30-\uDD63\uDD70-\uDD7A\uDD7C-\uDD8A\uDD8C-\uDD92\uDD94\uDD95\uDD97-\uDDA1\uDDA3-\uDDB1\uDDB3-\uDDB9\uDDBB\uDDBC\uDE00-\uDF36\uDF40-\uDF55\uDF60-\uDF67\uDF80-\uDF85\uDF87-\uDFB0\uDFB2-\uDFBA]|\uD802[\uDC00-\uDC05\uDC08\uDC0A-\uDC35\uDC37\uDC38\uDC3C\uDC3F-\uDC55\uDC58-\uDC76\uDC79-\uDC9E\uDCA7-\uDCAF\uDCE0-\uDCF2\uDCF4\uDCF5\uDCFB-\uDD1B\uDD20-\uDD39\uDD80-\uDDB7\uDDBC-\uDDCF\uDDD2-\uDE00\uDE10-\uDE13\uDE15-\uDE17\uDE19-\uDE35\uDE40-\uDE48\uDE60-\uDE7E\uDE80-\uDE9F\uDEC0-\uDEC7\uDEC9-\uDEE4\uDEEB-\uDEEF\uDF00-\uDF35\uDF40-\uDF55\uDF58-\uDF72\uDF78-\uDF91\uDFA9-\uDFAF]|\uD803[\uDC00-\uDC48\uDC80-\uDCB2\uDCC0-\uDCF2\uDCFA-\uDD23\uDD30-\uDD39\uDE60-\uDE7E\uDE80-\uDEA9\uDEB0\uDEB1\uDF00-\uDF27\uDF30-\uDF45\uDF51-\uDF54\uDF70-\uDF81\uDFB0-\uDFCB\uDFE0-\uDFF6]|\uD804[\uDC03-\uDC37\uDC52-\uDC6F\uDC71\uDC72\uDC75\uDC83-\uDCAF\uDCD0-\uDCE8\uDCF0-\uDCF9\uDD03-\uDD26\uDD36-\uDD3F\uDD44\uDD47\uDD50-\uDD72\uDD76\uDD83-\uDDB2\uDDC1-\uDDC4\uDDD0-\uDDDA\uDDDC\uDDE1-\uDDF4\uDE00-\uDE11\uDE13-\uDE2B\uDE3F\uDE40\uDE80-\uDE86\uDE88\uDE8A-\uDE8D\uDE8F-\uDE9D\uDE9F-\uDEA8\uDEB0-\uDEDE\uDEF0-\uDEF9\uDF05-\uDF0C\uDF0F\uDF10\uDF13-\uDF28\uDF2A-\uDF30\uDF32\uDF33\uDF35-\uDF39\uDF3D\uDF50\uDF5D-\uDF61]|\uD805[\uDC00-\uDC34\uDC47-\uDC4A\uDC50-\uDC59\uDC5F-\uDC61\uDC80-\uDCAF\uDCC4\uDCC5\uDCC7\uDCD0-\uDCD9\uDD80-\uDDAE\uDDD8-\uDDDB\uDE00-\uDE2F\uDE44\uDE50-\uDE59\uDE80-\uDEAA\uDEB8\uDEC0-\uDEC9\uDF00-\uDF1A\uDF30-\uDF3B\uDF40-\uDF46]|\uD806[\uDC00-\uDC2B\uDCA0-\uDCF2\uDCFF-\uDD06\uDD09\uDD0C-\uDD13\uDD15\uDD16\uDD18-\uDD2F\uDD3F\uDD41\uDD50-\uDD59\uDDA0-\uDDA7\uDDAA-\uDDD0\uDDE1\uDDE3\uDE00\uDE0B-\uDE32\uDE3A\uDE50\uDE5C-\uDE89\uDE9D\uDEB0-\uDEF8]|\uD807[\uDC00-\uDC08\uDC0A-\uDC2E\uDC40\uDC50-\uDC6C\uDC72-\uDC8F\uDD00-\uDD06\uDD08\uDD09\uDD0B-\uDD30\uDD46\uDD50-\uDD59\uDD60-\uDD65\uDD67\uDD68\uDD6A-\uDD89\uDD98\uDDA0-\uDDA9\uDEE0-\uDEF2\uDF02\uDF04-\uDF10\uDF12-\uDF33\uDF50-\uDF59\uDFB0\uDFC0-\uDFD4]|\uD808[\uDC00-\uDF99]|\uD809[\uDC00-\uDC6E\uDC80-\uDD43]|\uD80B[\uDF90-\uDFF0]|[\uD80C\uD81C-\uD820\uD822\uD840-\uD868\uD86A-\uD86C\uD86F-\uD872\uD874-\uD879\uD880-\uD883\uD885-\uD887][\uDC00-\uDFFF]|\uD80D[\uDC00-\uDC2F\uDC41-\uDC46]|\uD811[\uDC00-\uDE46]|\uD81A[\uDC00-\uDE38\uDE40-\uDE5E\uDE60-\uDE69\uDE70-\uDEBE\uDEC0-\uDEC9\uDED0-\uDEED\uDF00-\uDF2F\uDF40-\uDF43\uDF50-\uDF59\uDF5B-\uDF61\uDF63-\uDF77\uDF7D-\uDF8F]|\uD81B[\uDE40-\uDE96\uDF00-\uDF4A\uDF50\uDF93-\uDF9F\uDFE0\uDFE1\uDFE3]|\uD821[\uDC00-\uDFF7]|\uD823[\uDC00-\uDCD5\uDD00-\uDD08]|\uD82B[\uDFF0-\uDFF3\uDFF5-\uDFFB\uDFFD\uDFFE]|\uD82C[\uDC00-\uDD22\uDD32\uDD50-\uDD52\uDD55\uDD64-\uDD67\uDD70-\uDEFB]|\uD82F[\uDC00-\uDC6A\uDC70-\uDC7C\uDC80-\uDC88\uDC90-\uDC99]|\uD834[\uDEC0-\uDED3\uDEE0-\uDEF3\uDF60-\uDF78]|\uD835[\uDC00-\uDC54\uDC56-\uDC9C\uDC9E\uDC9F\uDCA2\uDCA5\uDCA6\uDCA9-\uDCAC\uDCAE-\uDCB9\uDCBB\uDCBD-\uDCC3\uDCC5-\uDD05\uDD07-\uDD0A\uDD0D-\uDD14\uDD16-\uDD1C\uDD1E-\uDD39\uDD3B-\uDD3E\uDD40-\uDD44\uDD46\uDD4A-\uDD50\uDD52-\uDEA5\uDEA8-\uDEC0\uDEC2-\uDEDA\uDEDC-\uDEFA\uDEFC-\uDF14\uDF16-\uDF34\uDF36-\uDF4E\uDF50-\uDF6E\uDF70-\uDF88\uDF8A-\uDFA8\uDFAA-\uDFC2\uDFC4-\uDFCB\uDFCE-\uDFFF]|\uD837[\uDF00-\uDF1E\uDF25-\uDF2A]|\uD838[\uDC30-\uDC6D\uDD00-\uDD2C\uDD37-\uDD3D\uDD40-\uDD49\uDD4E\uDE90-\uDEAD\uDEC0-\uDEEB\uDEF0-\uDEF9]|\uD839[\uDCD0-\uDCEB\uDCF0-\uDCF9\uDFE0-\uDFE6\uDFE8-\uDFEB\uDFED\uDFEE\uDFF0-\uDFFE]|\uD83A[\uDC00-\uDCC4\uDCC7-\uDCCF\uDD00-\uDD43\uDD4B\uDD50-\uDD59]|\uD83B[\uDC71-\uDCAB\uDCAD-\uDCAF\uDCB1-\uDCB4\uDD01-\uDD2D\uDD2F-\uDD3D\uDE00-\uDE03\uDE05-\uDE1F\uDE21\uDE22\uDE24\uDE27\uDE29-\uDE32\uDE34-\uDE37\uDE39\uDE3B\uDE42\uDE47\uDE49\uDE4B\uDE4D-\uDE4F\uDE51\uDE52\uDE54\uDE57\uDE59\uDE5B\uDE5D\uDE5F\uDE61\uDE62\uDE64\uDE67-\uDE6A\uDE6C-\uDE72\uDE74-\uDE77\uDE79-\uDE7C\uDE7E\uDE80-\uDE89\uDE8B-\uDE9B\uDEA1-\uDEA3\uDEA5-\uDEA9\uDEAB-\uDEBB]|\uD83C[\uDD00-\uDD0C]|\uD83E[\uDFF0-\uDFF9]|\uD869[\uDC00-\uDEDF\uDF00-\uDFFF]|\uD86D[\uDC00-\uDF39\uDF40-\uDFFF]|\uD86E[\uDC00-\uDC1D\uDC20-\uDFFF]|\uD873[\uDC00-\uDEA1\uDEB0-\uDFFF]|\uD87A[\uDC00-\uDFE0]|\uD87E[\uDC00-\uDE1D]|\uD884[\uDC00-\uDF4A\uDF50-\uDFFF]|\uD888[\uDC00-\uDFAF])/)) && (!(n[1] || n[2] || '') || !u || this.rules.inline.punctuation.exec(u)))) { const r = n[0].length - 1; let i = r; let s = 0; const a = n[0][0] === '*' ? this.rules.inline.emStrong.rDelimAst : this.rules.inline.emStrong.rDelimUnd; for (a.lastIndex = 0, t = t.slice(-1 * e.length + r); (n = a.exec(t)) != null;) { var o; let l = n[1] || n[2] || n[3] || n[4] || n[5] || n[6]; if (l) if (l = l.length, n[3] || n[4])i += l; else if ((n[5] || n[6]) && r % 3 && !((r + l) % 3))s += l; else if (!((i -= l) > 0)) return l = Math.min(l, l + i + s), o = e.slice(0, r + n.index + l + 1), Math.min(r, l) % 2 ? (l = o.slice(1, -1), { type: 'em', raw: o, text: l, tokens: this.lexer.inlineTokens(l) }) : (l = o.slice(2, -2), { type: 'strong', raw: o, text: l, tokens: this.lexer.inlineTokens(l) }) } } }, t.codespan = function (e) { let t; let u; let n; var e = this.rules.inline.code.exec(e); if (e) return n = e[2].replace(/\n/g, ' '), t = /[^ ]/.test(n), u = /^ /.test(n) && / $/.test(n), n = D(n = t && u ? n.substring(1, n.length - 1) : n, !0), { type: 'codespan', raw: e[0], text: n } }, t.br = function (e) { e = this.rules.inline.br.exec(e); if (e) return { type: 'br', raw: e[0] } }, t.del = function (e) { e = this.rules.inline.del.exec(e); if (e) return { type: 'del', raw: e[0], text: e[2], tokens: this.lexer.inlineTokens(e[2]) } }, t.autolink = function (e, t) { let u; var e = this.rules.inline.autolink.exec(e); if (e) return t = e[2] === '@' ? 'mailto:' + (u = D(this.options.mangle ? t(e[1]) : e[1])) : u = D(e[1]), { type: 'link', raw: e[0], text: u, href: t, tokens: [{ type: 'text', raw: u, text: u }] } }, t.url = function (e, t) { let u, n, r, i; if (u = this.rules.inline.url.exec(e)) { if (u[2] === '@')r = 'mailto:' + (n = D(this.options.mangle ? t(u[0]) : u[0])); else { for (;i = u[0], u[0] = this.rules.inline._backpedal.exec(u[0])[0], i !== u[0];);n = D(u[0]), r = u[1] === 'www.' ? 'http://' + u[0] : u[0] } return { type: 'link', raw: u[0], text: n, href: r, tokens: [{ type: 'text', raw: n, text: n }] } } }, t.inlineText = function (e, t) { e = this.rules.inline.text.exec(e); if (e) return t = this.lexer.state.inRawBlock ? this.options.sanitize ? this.options.sanitizer ? this.options.sanitizer(e[0]) : D(e[0]) : e[0] : D(this.options.smartypants ? t(e[0]) : e[0]), { type: 'text', raw: e[0], text: t } }, e }()); const m = { newline: /^(?: *(?:\n|$))+/, code: /^( {4}[^\n]+(?:\n(?: *(?:\n|$))*)?)+/, fences: /^ {0,3}(`{3,}(?=[^`\n]*(?:\n|$))|~{3,})([^\n]*)(?:\n|$)(?:|([\s\S]*?)(?:\n|$))(?: {0,3}\1[~`]* *(?=\n|$)|$)/, hr: /^ {0,3}((?:-[\t ]*){3,}|(?:_[ \t]*){3,}|(?:\*[ \t]*){3,})(?:\n+|$)/, heading: /^ {0,3}(#{1,6})(?=\s|$)(.*)(?:\n+|$)/, blockquote: /^( {0,3}> ?(paragraph|[^\n]*)(?:\n|$))+/, list: /^( {0,3}bull)([ \t][^\n]+?)?(?:\n|$)/, html: '^ {0,3}(?:<(script|pre|style|textarea)[\\s>][\\s\\S]*?(?:[^\\n]*\\n+|$)|comment[^\\n]*(\\n+|$)|<\\?[\\s\\S]*?(?:\\?>\\n*|$)|\\n*|$)|\\n*|$)|)[\\s\\S]*?(?:(?:\\n *)+\\n|$)|<(?!script|pre|style|textarea)([a-z][\\w-]*)(?:attribute)*? */?>(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n *)+\\n|$)|(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n *)+\\n|$))', def: /^ {0,3}\[(label)\]: *(?:\n *)?([^<\s][^\s]*|<.*?>)(?:(?: +(?:\n *)?| *\n *)(title))? *(?:\n+|$)/, table: d, lheading: /^((?:(?!^bull ).|\n(?!\n|bull ))+?)\n {0,3}(=+|-+) *(?:\n+|$)/, _paragraph: /^([^\n]+(?:\n(?!hr|heading|lheading|blockquote|fences|list|html|table| +\n)[^\n]+)*)/, text: /^[^\n]+/, _label: /(?!\s*\])(?:\\.|[^\[\]\\])+/, _title: /(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/ }; const b = (m.def = p(m.def).replace('label', m._label).replace('title', m._title).getRegex(), m.bullet = /(?:[*+-]|\d{1,9}[.)])/, m.listItemStart = p(/^( *)(bull) */).replace('bull', m.bullet).getRegex(), m.list = p(m.list).replace(/bull/g, m.bullet).replace('hr', '\\n+(?=\\1?(?:(?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$))').replace('def', '\\n+(?=' + m.def.source + ')').getRegex(), m._tag = 'address|article|aside|base|basefont|blockquote|body|caption|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption|figure|footer|form|frame|frameset|h[1-6]|head|header|hr|html|iframe|legend|li|link|main|menu|menuitem|meta|nav|noframes|ol|optgroup|option|p|param|section|source|summary|table|tbody|td|tfoot|th|thead|title|tr|track|ul', m._comment = /|$)/, m.html = p(m.html, 'i').replace('comment', m._comment).replace('tag', m._tag).replace('attribute', / +[a-zA-Z:_][\w.:-]*(?: *= *"[^"\n]*"| *= *'[^'\n]*'| *= *[^\s"'=<>`]+)?/).getRegex(), m.lheading = p(m.lheading).replace(/bull/g, m.bullet).getRegex(), m.paragraph = p(m._paragraph).replace('hr', m.hr).replace('heading', ' {0,3}#{1,6} ').replace('|lheading', '').replace('|table', '').replace('blockquote', ' {0,3}>').replace('fences', ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n').replace('list', ' {0,3}(?:[*+-]|1[.)]) ').replace('html', ')|<(?:script|pre|style|textarea|!--)').replace('tag', m._tag).getRegex(), m.blockquote = p(m.blockquote).replace('paragraph', m.paragraph).getRegex(), m.normal = g({}, m), m.gfm = g({}, m.normal, { table: '^ *([^\\n ].*\\|.*)\\n {0,3}(?:\\| *)?(:?-+:? *(?:\\| *:?-+:? *)*)(?:\\| *)?(?:\\n((?:(?! *\\n|hr|heading|blockquote|code|fences|list|html).*(?:\\n|$))*)\\n*|$)' }), m.gfm.table = p(m.gfm.table).replace('hr', m.hr).replace('heading', ' {0,3}#{1,6} ').replace('blockquote', ' {0,3}>').replace('code', ' {4}[^\\n]').replace('fences', ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n').replace('list', ' {0,3}(?:[*+-]|1[.)]) ').replace('html', ')|<(?:script|pre|style|textarea|!--)').replace('tag', m._tag).getRegex(), m.gfm.paragraph = p(m._paragraph).replace('hr', m.hr).replace('heading', ' {0,3}#{1,6} ').replace('|lheading', '').replace('table', m.gfm.table).replace('blockquote', ' {0,3}>').replace('fences', ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n').replace('list', ' {0,3}(?:[*+-]|1[.)]) ').replace('html', ')|<(?:script|pre|style|textarea|!--)').replace('tag', m._tag).getRegex(), m.pedantic = g({}, m.normal, { html: p("^ *(?:comment *(?:\\n|\\s*$)|<(tag)[\\s\\S]+? *(?:\\n{2,}|\\s*$)|\\s]*)*?/?> *(?:\\n{2,}|\\s*$))").replace('comment', m._comment).replace(/tag/g, '(?!(?:a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)\\b)\\w+(?!:|[^\\w\\s@]*@)\\b').getRegex(), def: /^ *\[([^\]]+)\]: *]+)>?(?: +(["(][^\n]+[")]))? *(?:\n+|$)/, heading: /^(#{1,6})(.*)(?:\n+|$)/, fences: d, lheading: /^(.+?)\n {0,3}(=+|-+) *(?:\n+|$)/, paragraph: p(m.normal._paragraph).replace('hr', m.hr).replace('heading', ' *#{1,6} *[^\n]').replace('lheading', m.lheading).replace('blockquote', ' {0,3}>').replace('|fences', '').replace('|list', '').replace('|html', '').getRegex() }), { escape: /^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/, autolink: /^<(scheme:[^\s\x00-\x1f<>]*|email)>/, url: d, tag: '^comment|^|^<[a-zA-Z][\\w-]*(?:attribute)*?\\s*/?>|^<\\?[\\s\\S]*?\\?>|^|^', link: /^!?\[(label)\]\(\s*(href)(?:\s+(title))?\s*\)/, reflink: /^!?\[(label)\]\[(ref)\]/, nolink: /^!?\[(ref)\](?:\[\])?/, reflinkSearch: 'reflink|nolink(?!\\()', emStrong: { lDelim: /^(?:\*+(?:((?!\*)[punct])|[^\s*]))|^_+(?:((?!_)[punct])|([^\s_]))/, rDelimAst: /^[^_*]*?__[^_*]*?\*[^_*]*?(?=__)|[^*]+(?=[^*])|(?!\*)[punct](\*+)(?=[\s]|$)|[^punct\s](\*+)(?!\*)(?=[punct\s]|$)|(?!\*)[punct\s](\*+)(?=[^punct\s])|[\s](\*+)(?!\*)(?=[punct])|(?!\*)[punct](\*+)(?!\*)(?=[punct])|[^punct\s](\*+)(?=[^punct\s])/, rDelimUnd: /^[^_*]*?\*\*[^_*]*?_[^_*]*?(?=\*\*)|[^_]+(?=[^_])|(?!_)[punct](_+)(?=[\s]|$)|[^punct\s](_+)(?!_)(?=[punct\s]|$)|(?!_)[punct\s](_+)(?=[^punct\s])|[\s](_+)(?!_)(?=[punct])|(?!_)[punct](_+)(?!_)(?=[punct])/ }, code: /^(`+)([^`]|[^`][\s\S]*?[^`])\1(?!`)/, br: /^( {2,}|\\)\n(?!\s*$)/, del: d, text: /^(`+|[^`])(?:(?= {2,}\n)|[\s\S]*?(?:(?=[\\ 0.5 ? 'x' + t.toString(16) : t) + ';'; return u }b._punctuation = '\\p{P}$+<=>`^|~', b.punctuation = p(b.punctuation, 'u').replace(/punctuation/g, b._punctuation).getRegex(), b.blockSkip = /\[[^[\]]*?\]\([^\(\)]*?\)|`[^`]*?`|<[^<>]*?>/g, b.anyPunctuation = /\\[punct]/g, b._escapes = /\\([punct])/g, b._comment = p(m._comment).replace('(?:--\x3e|$)', '--\x3e').getRegex(), b.emStrong.lDelim = p(b.emStrong.lDelim, 'u').replace(/punct/g, b._punctuation).getRegex(), b.emStrong.rDelimAst = p(b.emStrong.rDelimAst, 'gu').replace(/punct/g, b._punctuation).getRegex(), b.emStrong.rDelimUnd = p(b.emStrong.rDelimUnd, 'gu').replace(/punct/g, b._punctuation).getRegex(), b.anyPunctuation = p(b.anyPunctuation, 'gu').replace(/punct/g, b._punctuation).getRegex(), b._escapes = p(b._escapes, 'gu').replace(/punct/g, b._punctuation).getRegex(), b._scheme = /[a-zA-Z][a-zA-Z0-9+.-]{1,31}/, b._email = /[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/, b.autolink = p(b.autolink).replace('scheme', b._scheme).replace('email', b._email).getRegex(), b._attribute = /\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/, b.tag = p(b.tag).replace('comment', b._comment).replace('attribute', b._attribute).getRegex(), b._label = /(?:\[(?:\\.|[^\[\]\\])*\]|\\.|`[^`]*`|[^\[\]\\`])*?/, b._href = /<(?:\\.|[^\n<>\\])+>|[^\s\x00-\x1f]*/, b._title = /"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/, b.link = p(b.link).replace('label', b._label).replace('href', b._href).replace('title', b._title).getRegex(), b.reflink = p(b.reflink).replace('label', b._label).replace('ref', m._label).getRegex(), b.nolink = p(b.nolink).replace('ref', m._label).getRegex(), b.reflinkSearch = p(b.reflinkSearch, 'g').replace('reflink', b.reflink).replace('nolink', b.nolink).getRegex(), b.normal = g({}, b), b.pedantic = g({}, b.normal, { strong: { start: /^__|\*\*/, middle: /^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/, endAst: /\*\*(?!\*)/g, endUnd: /__(?!_)/g }, em: { start: /^_|\*/, middle: /^()\*(?=\S)([\s\S]*?\S)\*(?!\*)|^_(?=\S)([\s\S]*?\S)_(?!_)/, endAst: /\*(?!\*)/g, endUnd: /_(?!_)/g }, link: p(/^!?\[(label)\]\((.*?)\)/).replace('label', b._label).getRegex(), reflink: p(/^!?\[(label)\]\s*\[([^\]]*)\]/).replace('label', b._label).getRegex() }), b.gfm = g({}, b.normal, { escape: p(b.escape).replace('])', '~|])').getRegex(), _extended_email: /[A-Za-z0-9._+-]+(@)[a-zA-Z0-9-_]+(?:\.[a-zA-Z0-9-_]*[a-zA-Z0-9])+(?![-_])/, url: /^((?:ftp|https?):\/\/|www\.)(?:[a-zA-Z0-9\-]+\.?)+[^\s<]*|^email/, _backpedal: /(?:[^?!.,:;*_'"~()&]+|\([^)]*\)|&(?![a-zA-Z0-9]+;$)|[?!.,:;*_'"~)]+(?!$))+/, del: /^(~~?)(?=[^\s~])([\s\S]*?[^\s~])\1(?=[^~]|$)/, text: /^([`~]+|[^`~])(?:(?= {2,}\n)|(?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)|[\s\S]*?(?:(?=[\\ 0 ? i[i.length - 1].raw += '\n' : i.push(s), 'continue'; if (s = D.tokenizer.code(r)) return r = r.substring(s.raw.length), !(a = i[i.length - 1]) || a.type !== 'paragraph' && a.type !== 'text' ? i.push(s) : (a.raw += '\n' + s.raw, a.text += '\n' + s.text, D.inlineQueue[D.inlineQueue.length - 1].src = a.text), 'continue'; if (s = D.tokenizer.fences(r)) return r = r.substring(s.raw.length), i.push(s), 'continue'; if (s = D.tokenizer.heading(r)) return r = r.substring(s.raw.length), i.push(s), 'continue'; if (s = D.tokenizer.hr(r)) return r = r.substring(s.raw.length), i.push(s), 'continue'; if (s = D.tokenizer.blockquote(r)) return r = r.substring(s.raw.length), i.push(s), 'continue'; if (s = D.tokenizer.list(r)) return r = r.substring(s.raw.length), i.push(s), 'continue'; if (s = D.tokenizer.html(r)) return r = r.substring(s.raw.length), i.push(s), 'continue'; if (s = D.tokenizer.def(r)) return r = r.substring(s.raw.length), !(a = i[i.length - 1]) || a.type !== 'paragraph' && a.type !== 'text' ? D.tokens.links[s.tag] || (D.tokens.links[s.tag] = { href: s.href, title: s.title }) : (a.raw += '\n' + s.raw, a.text += '\n' + s.raw, D.inlineQueue[D.inlineQueue.length - 1].src = a.text), 'continue'; if (s = D.tokenizer.table(r)) return r = r.substring(s.raw.length), i.push(s), 'continue'; if (s = D.tokenizer.lheading(r)) return r = r.substring(s.raw.length), i.push(s), 'continue'; let t, u, n; if (o = r, D.options.extensions && D.options.extensions.startBlock && (t = 1 / 0, u = r.slice(1), D.options.extensions.startBlock.forEach(function (e) { typeof (n = e.call({ lexer: this }, u)) === 'number' && n >= 0 && (t = Math.min(t, n)) }), t < 1 / 0) && t >= 0 && (o = r.substring(0, t + 1)), D.state.top && (s = D.tokenizer.paragraph(o))) return a = i[i.length - 1], l && a.type === 'paragraph' ? (a.raw += '\n' + s.raw, a.text += '\n' + s.text, D.inlineQueue.pop(), D.inlineQueue[D.inlineQueue.length - 1].src = a.text) : i.push(s), l = o.length !== r.length, r = r.substring(s.raw.length), 'continue'; if (s = D.tokenizer.text(r)) return r = r.substring(s.raw.length), (a = i[i.length - 1]) && a.type === 'text' ? (a.raw += '\n' + s.raw, a.text += '\n' + s.text, D.inlineQueue.pop(), D.inlineQueue[D.inlineQueue.length - 1].src = a.text) : i.push(s), 'continue'; if (r) { const e = 'Infinite loop on byte: ' + r.charCodeAt(0); if (D.options.silent) return console.error(e), 'break'; throw new Error(e) } }()); if (e !== 'continue' && e === 'break') break } return this.state.top = !0, i }, n.inline = function (e, t) { return this.inlineQueue.push({ src: e, tokens: t = void 0 === t ? [] : t }), t }, n.inlineTokens = function (r, i) { let s; let a; let o; let e; let l; let D; const c = this; let p = (void 0 === i && (i = []), r); if (this.tokens.links) { const t = Object.keys(this.tokens.links); if (t.length > 0) for (;(e = this.tokenizer.rules.inline.reflinkSearch.exec(p)) != null;)t.includes(e[0].slice(e[0].lastIndexOf('[') + 1, -1)) && (p = p.slice(0, e.index) + '[' + 'a'.repeat(e[0].length - 2) + ']' + p.slice(this.tokenizer.rules.inline.reflinkSearch.lastIndex)) } for (;(e = this.tokenizer.rules.inline.blockSkip.exec(p)) != null;)p = p.slice(0, e.index) + '[' + 'a'.repeat(e[0].length - 2) + ']' + p.slice(this.tokenizer.rules.inline.blockSkip.lastIndex); for (;(e = this.tokenizer.rules.inline.anyPunctuation.exec(p)) != null;)p = p.slice(0, e.index) + '++' + p.slice(this.tokenizer.rules.inline.anyPunctuation.lastIndex); for (;r;) { const u = (function () { if (l || (D = ''), l = !1, c.options.extensions && c.options.extensions.inline && c.options.extensions.inline.some(function (e) { return !!(s = e.call({ lexer: c }, r, i)) && (r = r.substring(s.raw.length), i.push(s), !0) })) return 'continue'; if (s = c.tokenizer.escape(r)) return r = r.substring(s.raw.length), i.push(s), 'continue'; if (s = c.tokenizer.tag(r)) return r = r.substring(s.raw.length), (a = i[i.length - 1]) && s.type === 'text' && a.type === 'text' ? (a.raw += s.raw, a.text += s.text) : i.push(s), 'continue'; if (s = c.tokenizer.link(r)) return r = r.substring(s.raw.length), i.push(s), 'continue'; if (s = c.tokenizer.reflink(r, c.tokens.links)) return r = r.substring(s.raw.length), (a = i[i.length - 1]) && s.type === 'text' && a.type === 'text' ? (a.raw += s.raw, a.text += s.text) : i.push(s), 'continue'; if (s = c.tokenizer.emStrong(r, p, D)) return r = r.substring(s.raw.length), i.push(s), 'continue'; if (s = c.tokenizer.codespan(r)) return r = r.substring(s.raw.length), i.push(s), 'continue'; if (s = c.tokenizer.br(r)) return r = r.substring(s.raw.length), i.push(s), 'continue'; if (s = c.tokenizer.del(r)) return r = r.substring(s.raw.length), i.push(s), 'continue'; if (s = c.tokenizer.autolink(r, w)) return r = r.substring(s.raw.length), i.push(s), 'continue'; if (!c.state.inLink && (s = c.tokenizer.url(r, w))) return r = r.substring(s.raw.length), i.push(s), 'continue'; let t, u, n; if (o = r, c.options.extensions && c.options.extensions.startInline && (t = 1 / 0, u = r.slice(1), c.options.extensions.startInline.forEach(function (e) { typeof (n = e.call({ lexer: this }, u)) === 'number' && n >= 0 && (t = Math.min(t, n)) }), t < 1 / 0) && t >= 0 && (o = r.substring(0, t + 1)), s = c.tokenizer.inlineText(o, N)) return r = r.substring(s.raw.length), s.raw.slice(-1) !== '_' && (D = s.raw.slice(-1)), l = !0, (a = i[i.length - 1]) && a.type === 'text' ? (a.raw += s.raw, a.text += s.text) : i.push(s), 'continue'; if (r) { const e = 'Infinite loop on byte: ' + r.charCodeAt(0); if (c.options.silent) return console.error(e), 'break'; throw new Error(e) } }()); if (u !== 'continue' && u === 'break') break } return i }, n = u, t = [{ key: 'rules', get: function () { return { block: m, inline: b } } }], (e = null) && i(n.prototype, e), t && i(n, t), Object.defineProperty(n, 'prototype', { writable: !1 }), u }()); const y = (function () { function e (e) { this.options = e || r.defaults } const t = e.prototype; return t.code = function (e, t, u) { let n; var t = (t || '').match(/\S*/)[0]; return this.options.highlight && (n = this.options.highlight(e, t)) != null && n !== e && (u = !0, e = n), e = e.replace(/\n$/, '') + '\n', t ? '
' + (u ? e : D(e, !0)) + '
\n' : '
' + (u ? e : D(e, !0)) + '
\n' }, t.blockquote = function (e) { return '
\n' + e + '
\n' }, t.html = function (e, t) { return e }, t.heading = function (e, t, u, n) { return this.options.headerIds ? '' + e + '\n' : '' + e + '\n' }, t.hr = function () { return this.options.xhtml ? '
\n' : '
\n' }, t.list = function (e, t, u) { const n = t ? 'ol' : 'ul'; return '<' + n + (t && u !== 1 ? ' start="' + u + '"' : '') + '>\n' + e + '\n' }, t.listitem = function (e) { return '
  • ' + e + '
  • \n' }, t.checkbox = function (e) { return ' ' }, t.paragraph = function (e) { return '

    ' + e + '

    \n' }, t.table = function (e, t) { return '\n\n' + e + '\n' + (t = t && '' + t + '') + '
    \n' }, t.tablerow = function (e) { return '\n' + e + '\n' }, t.tablecell = function (e, t) { const u = t.header ? 'th' : 'td'; return (t.align ? '<' + u + ' align="' + t.align + '">' : '<' + u + '>') + e + '\n' }, t.strong = function (e) { return '' + e + '' }, t.em = function (e) { return '' + e + '' }, t.codespan = function (e) { return '' + e + '' }, t.br = function () { return this.options.xhtml ? '
    ' : '
    ' }, t.del = function (e) { return '' + e + '' }, t.link = function (e, t, u) { return (e = h(this.options.sanitize, this.options.baseUrl, e)) === null ? u : (e = '
    ' + u + '') }, t.image = function (e, t, u) { return (e = h(this.options.sanitize, this.options.baseUrl, e)) === null ? u : (e = '' + u + '' : '>')) }, t.text = function (e) { return e }, e }()); const v = (function () { function e () {} const t = e.prototype; return t.strong = function (e) { return e }, t.em = function (e) { return e }, t.codespan = function (e) { return e }, t.del = function (e) { return e }, t.html = function (e) { return e }, t.text = function (e) { return e }, t.link = function (e, t, u) { return '' + u }, t.image = function (e, t, u) { return '' + u }, t.br = function () { return '' }, e }()); const _ = (function () { function e () { this.seen = {} } const t = e.prototype; return t.serialize = function (e) { return e.toLowerCase().trim().replace(/<[!\/a-z].*?>/gi, '').replace(/[\u2000-\u206F\u2E00-\u2E7F\\'!"#$%&()*+,./:;<=>?@[\]^`{|}~]/g, '').replace(/\s/g, '-') }, t.getNextSafeSlug = function (e, t) { let u = e; let n = 0; if (this.seen.hasOwnProperty(u)) for (n = this.seen[e]; u = e + '-' + ++n, this.seen.hasOwnProperty(u););return t || (this.seen[e] = n, this.seen[u] = 0), u }, t.slug = function (e, t) { void 0 === t && (t = {}); e = this.serialize(e); return this.getNextSafeSlug(e, t.dryrun) }, e }()); const z = (function () { function u (e) { this.options = e || r.defaults, this.options.renderer = this.options.renderer || new y(), this.renderer = this.options.renderer, this.renderer.options = this.options, this.textRenderer = new v(), this.slugger = new _() }u.parse = function (e, t) { return new u(t).parse(e) }, u.parseInline = function (e, t) { return new u(t).parseInline(e) }; const e = u.prototype; return e.parse = function (e, t) { void 0 === t && (t = !0); for (var u, n, r, i, s, a, o, l, D, c, p, h, f, g, F, d, k = '', A = e.length, C = 0; C < A; C++) if (l = e[C], this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[l.type] && (!1 !== (d = this.options.extensions.renderers[l.type].call({ parser: this }, l)) || !['space', 'hr', 'heading', 'code', 'table', 'blockquote', 'list', 'html', 'paragraph', 'text'].includes(l.type)))k += d || ''; else switch (l.type) { case 'space':continue; case 'hr':k += this.renderer.hr(); continue; case 'heading':k += this.renderer.heading(this.parseInline(l.tokens), l.depth, x(this.parseInline(l.tokens, this.textRenderer)), this.slugger); continue; case 'code':k += this.renderer.code(l.text, l.lang, l.escaped); continue; case 'table':for (a = D = '', r = l.header.length, u = 0; u < r; u++)a += this.renderer.tablecell(this.parseInline(l.header[u].tokens), { header: !0, align: l.align[u] }); for (D += this.renderer.tablerow(a), o = '', r = l.rows.length, u = 0; u < r; u++) { for (a = '', i = (s = l.rows[u]).length, n = 0; n < i; n++)a += this.renderer.tablecell(this.parseInline(s[n].tokens), { header: !1, align: l.align[n] }); o += this.renderer.tablerow(a) }k += this.renderer.table(D, o); continue; case 'blockquote':o = this.parse(l.tokens), k += this.renderer.blockquote(o); continue; case 'list':for (D = l.ordered, E = l.start, c = l.loose, r = l.items.length, o = '', u = 0; u < r; u++)f = (h = l.items[u]).checked, g = h.task, p = '', h.task && (F = this.renderer.checkbox(f), c ? h.tokens.length > 0 && h.tokens[0].type === 'paragraph' ? (h.tokens[0].text = F + ' ' + h.tokens[0].text, h.tokens[0].tokens && h.tokens[0].tokens.length > 0 && h.tokens[0].tokens[0].type === 'text' && (h.tokens[0].tokens[0].text = F + ' ' + h.tokens[0].tokens[0].text)) : h.tokens.unshift({ type: 'text', text: F }) : p += F), p += this.parse(h.tokens, c), o += this.renderer.listitem(p, g, f); k += this.renderer.list(o, D, E); continue; case 'html':k += this.renderer.html(l.text, l.block); continue; case 'paragraph':k += this.renderer.paragraph(this.parseInline(l.tokens)); continue; case 'text':for (o = l.tokens ? this.parseInline(l.tokens) : l.text; C + 1 < A && e[C + 1].type === 'text';)o += '\n' + ((l = e[++C]).tokens ? this.parseInline(l.tokens) : l.text); k += t ? this.renderer.paragraph(o) : o; continue; default:var E = 'Token with "' + l.type + '" type was not found.'; if (this.options.silent) return void console.error(E); throw new Error(E) } return k }, e.parseInline = function (e, t) { t = t || this.renderer; for (var u, n, r = '', i = e.length, s = 0; s < i; s++) if (u = e[s], this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[u.type] && (!1 !== (n = this.options.extensions.renderers[u.type].call({ parser: this }, u)) || !['escape', 'html', 'link', 'image', 'strong', 'em', 'codespan', 'br', 'del', 'text'].includes(u.type)))r += n || ''; else switch (u.type) { case 'escape':r += t.text(u.text); break; case 'html':r += t.html(u.text); break; case 'link':r += t.link(u.href, u.title, this.parseInline(u.tokens, t)); break; case 'image':r += t.image(u.href, u.title, u.text); break; case 'strong':r += t.strong(this.parseInline(u.tokens, t)); break; case 'em':r += t.em(this.parseInline(u.tokens, t)); break; case 'codespan':r += t.codespan(u.text); break; case 'br':r += t.br(); break; case 'del':r += t.del(this.parseInline(u.tokens, t)); break; case 'text':r += t.text(u.text); break; default:var a = 'Token with "' + u.type + '" type was not found.'; if (this.options.silent) return void console.error(a); throw new Error(a) } return r }, u }()); const $ = (function () { function e (e) { this.options = e || r.defaults } const t = e.prototype; return t.preprocess = function (e) { return e }, t.postprocess = function (e) { return e }, e }()); const S = ($.passThroughHooks = new Set(['preprocess', 'postprocess']), e('parseMarkdown')); const T = e('onError'); var d = (function () { function e () { Object.defineProperty(this, T, { value: G }), Object.defineProperty(this, S, { value: X }), this.defaults = u(), this.options = this.setOptions, this.parse = F(this, S)[S](B.lex, z.parse), this.parseInline = F(this, S)[S](B.lexInline, z.parseInline), this.Parser = z, this.parser = z.parse, this.Renderer = y, this.TextRenderer = v, this.Lexer = B, this.lexer = B.lex, this.Tokenizer = E, this.Slugger = _, this.Hooks = $, this.use.apply(this, arguments) } const t = e.prototype; return t.walkTokens = function (e, a) { for (var o, l = this, D = [], t = c(e); !(o = t()).done;)!(function () { const t = o.value; switch (D = D.concat(a.call(l, t)), t.type) { case 'table':for (let e = c(t.header); !(u = e()).done;) { var u = u.value; D = D.concat(l.walkTokens(u.tokens, a)) } for (var n, r = c(t.rows); !(n = r()).done;) for (let i = c(n.value); !(s = i()).done;) { var s = s.value; D = D.concat(l.walkTokens(s.tokens, a)) } break; case 'list':D = D.concat(l.walkTokens(t.items, a)); break; default:l.defaults.extensions && l.defaults.extensions.childTokens && l.defaults.extensions.childTokens[t.type] ? l.defaults.extensions.childTokens[t.type].forEach(function (e) { D = D.concat(l.walkTokens(t[e], a)) }) : t.tokens && (D = D.concat(l.walkTokens(t.tokens, a))) } }()); return D }, t.use = function () { for (var D = this, c = this.defaults.extensions || { renderers: {}, childTokens: {} }, e = arguments.length, t = new Array(e), u = 0; u < e; u++)t[u] = arguments[u]; return t.forEach(function (s) { let u; const e = g({}, s); if (e.async = D.defaults.async || e.async || !1, s.extensions && (s.extensions.forEach(function (r) { if (!r.name) throw new Error('extension name required'); let i; if (r.renderer && (i = c.renderers[r.name], c.renderers[r.name] = i ? function () { for (var e = arguments.length, t = new Array(e), u = 0; u < e; u++)t[u] = arguments[u]; let n = r.renderer.apply(this, t); return n = !1 === n ? i.apply(this, t) : n } : r.renderer), r.tokenizer) { if (!r.level || r.level !== 'block' && r.level !== 'inline') throw new Error("extension level must be 'block' or 'inline'"); c[r.level] ? c[r.level].unshift(r.tokenizer) : c[r.level] = [r.tokenizer], r.start && (r.level === 'block' ? c.startBlock ? c.startBlock.push(r.start) : c.startBlock = [r.start] : r.level === 'inline' && (c.startInline ? c.startInline.push(r.start) : c.startInline = [r.start])) }r.childTokens && (c.childTokens[r.name] = r.childTokens) }), e.extensions = c), s.renderer) { let t; const a = D.defaults.renderer || new y(D.defaults); for (t in s.renderer)!(function (r) { const i = a[r]; a[r] = function () { for (var e = arguments.length, t = new Array(e), u = 0; u < e; u++)t[u] = arguments[u]; let n = s.renderer[r].apply(a, t); return n = !1 === n ? i.apply(a, t) : n } }(t)); e.renderer = a } if (s.tokenizer) { let n; const o = D.defaults.tokenizer || new E(D.defaults); for (n in s.tokenizer)!(function (r) { const i = o[r]; o[r] = function () { for (var e = arguments.length, t = new Array(e), u = 0; u < e; u++)t[u] = arguments[u]; let n = s.tokenizer[r].apply(o, t); return n = !1 === n ? i.apply(o, t) : n } }(n)); e.tokenizer = o } if (s.hooks) { let r; const l = D.defaults.hooks || new $(); for (r in s.hooks)!(function (r) { const i = l[r]; $.passThroughHooks.has(r) ? l[r] = function (e) { return D.defaults.async ? Promise.resolve(s.hooks[r].call(l, e)).then(function (e) { return i.call(l, e) }) : (e = s.hooks[r].call(l, e), i.call(l, e)) } : l[r] = function () { for (var e = arguments.length, t = new Array(e), u = 0; u < e; u++)t[u] = arguments[u]; let n = s.hooks[r].apply(l, t); return n = !1 === n ? i.apply(l, t) : n } }(r)); e.hooks = l }s.walkTokens && (u = D.defaults.walkTokens, e.walkTokens = function (e) { let t = []; return t.push(s.walkTokens.call(this, e)), t = u ? t.concat(u.call(this, e)) : t }), D.defaults = g({}, D.defaults, e) }), this }, t.setOptions = function (e) { return this.defaults = g({}, this.defaults, e), this }, e }()); function X (p, h) { const f = this; return function (e, u, n) { typeof u === 'function' && (n = u, u = null); let t; let r = g({}, u); const i = (u = g({}, f.defaults, r), F(f, T)[T](u.silent, u.async, n)); if (e == null) return i(new Error('marked(): input parameter is undefined or null')); if (typeof e !== 'string') return i(new Error('marked(): input parameter is of type ' + Object.prototype.toString.call(e) + ', string expected')); const console = {warn(){}}; if (r = n, (t = u) && !t.silent && (r && console.warn('marked(): callback is deprecated since version 5.0.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/using_pro#async'), (t.sanitize || t.sanitizer) && console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options'), !t.highlight && t.langPrefix === 'language-' || console.warn('marked(): highlight and langPrefix parameters are deprecated since version 5.0.0, should not be used and will be removed in the future. Instead use https://www.npmjs.com/package/marked-highlight.'), t.mangle && console.warn('marked(): mangle parameter is enabled by default, but is deprecated since version 5.0.0, and will be removed in the future. To clear this warning, install https://www.npmjs.com/package/marked-mangle, or disable by setting `{mangle: false}`.'), t.baseUrl && console.warn('marked(): baseUrl parameter is deprecated since version 5.0.0, should not be used and will be removed in the future. Instead use https://www.npmjs.com/package/marked-base-url.'), t.smartypants && console.warn('marked(): smartypants parameter is deprecated since version 5.0.0, should not be used and will be removed in the future. Instead use https://www.npmjs.com/package/marked-smartypants.'), t.xhtml && console.warn('marked(): xhtml parameter is deprecated since version 5.0.0, should not be used and will be removed in the future. Instead use https://www.npmjs.com/package/marked-xhtml.'), t.headerIds || t.headerPrefix) && console.warn('marked(): headerIds and headerPrefix parameters enabled by default, but are deprecated since version 5.0.0, and will be removed in the future. To clear this warning, install https://www.npmjs.com/package/marked-gfm-heading-id, or disable by setting `{headerIds: false}`.'), u.hooks && (u.hooks.options = u), n) { let s; const a = u.highlight; try { u.hooks && (e = u.hooks.preprocess(e)), s = p(e, u) } catch (e) { return i(e) } let o; const l = function (t) { let e; if (!t) try { u.walkTokens && f.walkTokens(s, u.walkTokens), e = h(s, u), u.hooks && (e = u.hooks.postprocess(e)) } catch (e) { t = e } return u.highlight = a, t ? i(t) : n(null, e) }; return !a || a.length < 3 ? l() : (delete u.highlight, s.length ? (o = 0, f.walkTokens(s, function (u) { u.type === 'code' && (o++, setTimeout(function () { a(u.text, u.lang, function (e, t) { if (e) return l(e); t != null && t !== u.text && (u.text = t, u.escaped = !0), --o === 0 && l() }) }, 0)) }), void (o === 0 && l())) : l()) } if (u.async) return Promise.resolve(u.hooks ? u.hooks.preprocess(e) : e).then(function (e) { return p(e, u) }).then(function (e) { return u.walkTokens ? Promise.all(f.walkTokens(e, u.walkTokens)).then(function () { return e }) : e }).then(function (e) { return h(e, u) }).then(function (e) { return u.hooks ? u.hooks.postprocess(e) : e }).catch(i); try { u.hooks && (e = u.hooks.preprocess(e)); const D = p(e, u); let c = (u.walkTokens && f.walkTokens(D, u.walkTokens), h(D, u)); return c = u.hooks ? u.hooks.postprocess(c) : c } catch (e) { return i(e) } } } function G (u, n, r) { return function (e) { let t; if (e.message += '\nPlease report this to https://github.com/markedjs/marked.', u) return t = '

    An error occurred:

    ' + D(e.message + '', !0) + '
    ', n ? Promise.resolve(t) : r ? void r(null, t) : t; if (n) return Promise.reject(e); if (!r) throw e; r(e) } } const R = new d(r.defaults); function I (e, t, u) { return R.parse(e, t, u) }I.options = I.setOptions = function (e) { return R.setOptions(e), n(I.defaults = R.defaults), I }, I.getDefaults = u, I.defaults = r.defaults, I.use = function () { return R.use.apply(R, arguments), n(I.defaults = R.defaults), I }, I.walkTokens = function (e, t) { return R.walkTokens(e, t) }, I.parseInline = R.parseInline, I.Parser = z, I.parser = z.parse, I.Renderer = y, I.TextRenderer = v, I.Lexer = B, I.lexer = B.lex, I.Tokenizer = E, I.Slugger = _, I.Hooks = $; const V = (I.parse = I).options; const J = I.setOptions; const K = I.use; const W = I.walkTokens; const Y = I.parseInline; const ee = I; const te = z.parse; const ue = B.lex; r.Hooks = $, r.Lexer = B, r.Marked = d, r.Parser = z, r.Renderer = y, r.Slugger = _, r.TextRenderer = v, r.Tokenizer = E, r.getDefaults = u, r.lexer = ue, r.marked = I, r.options = V, r.parse = ee, r.parseInline = Y, r.parser = te, r.setOptions = J, r.use = K, r.walkTokens = W })) /* eslint-enable */ + +// from https://github.com/markedjs/marked-custom-heading-id + +function customHeadingId () { + return { + renderer: { + heading (text, level, raw, slugger) { + const headingIdRegex = /(?: +|^)\{#([a-z][\w-.]*)\}(?: +|$)/i + const hasId = text.match(headingIdRegex) + if (!hasId) { + return false + } + return `${text.replace(headingIdRegex, '')}\n` + } + } + } +} + +marked.use(customHeadingId()) + customElements.define('docs-viewer', class extends HTMLElement { router = null connectedCallback () {