From 6e56fce460f539a0058b0d6415e97120ca6c8496 Mon Sep 17 00:00:00 2001 From: Julia Browne Date: Fri, 15 Mar 2024 21:04:40 -0700 Subject: [PATCH 1/8] Added Jon's new files to this new PR --- ...venting-handler-advancedMutateInArray.adoc | 84 ++++++++ ...venting-handler-advancedMutateInField.adoc | 91 ++++++++ ...venting-handler-advancedSelfRecursion.adoc | 199 ++++++++++++++++++ .../eventing-handler-advancedTouchOp.adoc | 119 +++++++++++ 4 files changed, 493 insertions(+) create mode 100644 modules/eventing/pages/eventing-handler-advancedMutateInArray.adoc create mode 100644 modules/eventing/pages/eventing-handler-advancedMutateInField.adoc create mode 100644 modules/eventing/pages/eventing-handler-advancedSelfRecursion.adoc create mode 100644 modules/eventing/pages/eventing-handler-advancedTouchOp.adoc diff --git a/modules/eventing/pages/eventing-handler-advancedMutateInArray.adoc b/modules/eventing/pages/eventing-handler-advancedMutateInArray.adoc new file mode 100644 index 00000000..17c847be --- /dev/null +++ b/modules/eventing/pages/eventing-handler-advancedMutateInArray.adoc @@ -0,0 +1,84 @@ += Function: Advanced Sub-Document MUTATEIN Array Operation +:description: pass:q[Perform the Advanced Sub-Document MUTATEIN operation on an array where Eventing interacts with the Data Service.] +:page-edition: Enterprise Edition +:tabs: + +{description} + +The `advancedMutateInArray` function: + +* Demonstrates the CAS-free Sub-Document MUTATEIN operation on a document array field +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where the `meta.id` or KEY is `combine_landmark_names` + +For example, you can generate an input document with the KEY `combine_landmark_names` and the DATA `{ "id": "combine_landmark_names", "landmark_names": [] }`, then set the number of workers in the Eventing Function's setting to 18. +Running the Function adds 4,495 landmark names to an array without conflict and in no particular order. + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#sub-document-mutatein-operation[Sub-Document MUTATEIN Operation]. + +[{tabs}] +==== +advancedMutateInArray:: ++ +-- +[source,javascript] +---- +// Configure the settings for the advancedMutateInArray function as follows: +// +// Version 7.6+ +// "Function Scope" +// *.* (or try bulk.data if non-privileged) +// "Listen to Location" +// travel-sample.inventory.landmark +// "Eventing Storage" +// rr100.eventing.metadata +// Binding(s) +// 1. "binding type", "alias name...", "bucket.scope.collection", "Access" +// "bucket alias", "dst_col", "bulk.data.source", "read and write" + +function OnUpdate(doc, meta) { + var accum_meta = {"id": "combine_landmark_names" }; + couchbase.mutateIn(dst_col, accum_meta, [ + couchbase.MutateInSpec.arrayAppend("landmark_names", doc.name), + ]); +} +---- +-- + +Input data before deployment:: ++ +-- +[source,json] +---- +INPUT: KEY combine_landmark_names + +{ + "id": "combine_landmark_names", + "landmark_names": [] +} +---- +-- + +Output data after deployment:: ++ +-- +[source,json] +---- +OUTPUT: KEY combine_landmark_names + +{ + "id": "combine_landmark_names", + "landmark_names": [ + "Gabriel's Wharf", + "Blue Bear Performance Hall", + "Circle Bar", + *** 4490 lines removed *** + "Quarry Bank Mill & Styal Estate", + "Mad Cat Brewery", + "Casbah Café" + ] +} +---- +-- +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedMutateInField.adoc b/modules/eventing/pages/eventing-handler-advancedMutateInField.adoc new file mode 100644 index 00000000..bd98eea2 --- /dev/null +++ b/modules/eventing/pages/eventing-handler-advancedMutateInField.adoc @@ -0,0 +1,91 @@ += Function: Advanced Sub-Document MUTATEIN Operation +:description: pass:q[Perform the Advanced Sub-Document MUTATEIN operation on a field where Eventing interacts with the Data Service.] +:page-edition: Enterprise Edition +:tabs: + +{description} + +The `advancedMutateInField` function: + +* Demonstrates the CAS-free Sub-Document MUTATEIN operation on a document field +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where the `meta.id` or KEY starts with `mutateinfield:` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#sub-document-mutatein-operation[Sub-Document MUTATEIN Operation]. + +[{tabs}] +==== +advancedMutateInField:: ++ +-- +[source,javascript] +---- +// Configure the settings for the advancedMutateInField function as follows: +// +// Version 7.6+ +// "Function Scope" +// *.* (or try bulk.data if non-privileged) +// "Listen to Location" +// bulk.data.source +// "Eventing Storage" +// rr100.eventing.metadata +// Binding(s) +// 1. "binding type", "alias name...", "bucket.scope.collection", "Access" +// "bucket alias", "src_col", "bulk.data.source", "read and write" + +function OnUpdate(doc, meta) { + if (meta.id.startsWith("mutateinfield:") === false) return; + + var meta = { "id": meta.id }; + var res; + var opcnt = 1; + + res = + couchbase.mutateIn(src_col, meta, [ + couchbase.MutateInSpec.insert("testField", "insert") + ]); + log(opcnt++,res); + + res = + couchbase.mutateIn(src_col, meta, [ + couchbase.MutateInSpec.replace("testField", "replace") + ]); + log(opcnt++,res); + + res = + couchbase.mutateIn(src_col, meta, [ + couchbase.MutateInSpec.remove("testField") + ]); + log(opcnt++,res); +} +---- +-- + +Input data:: ++ +-- +[source,json] +---- +INPUT: KEY mutateinfield:001 + +{ + "id": "mutateinfield:001", +} + +---- +-- + +Output data:: ++ +-- +[source,json] +---- +2024-03-15T14:42:53.314-07:00 [INFO] 1 {"meta":{"id":"mutateinfield:001","cas":"1710538973313433600"},"success":true} + +2024-03-15T14:42:53.316-07:00 [INFO] 2 {"meta":{"id":"mutateinfield:001","cas":"1710538973315596288"},"success":true} + +2024-03-15T14:42:53.317-07:00 [INFO] 3 {"meta":{"id":"mutateinfield:001","cas":"1710538973316841472"},"success":true} +---- +-- +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedSelfRecursion.adoc b/modules/eventing/pages/eventing-handler-advancedSelfRecursion.adoc new file mode 100644 index 00000000..613427e5 --- /dev/null +++ b/modules/eventing/pages/eventing-handler-advancedSelfRecursion.adoc @@ -0,0 +1,199 @@ += Function: Advanced Self-Recursion Parameter +:description: pass:q[Perform the Advanced Self-Recursion parameter where Eventing interacts with the Data Service.] +:page-edition: Enterprise Edition +:tabs: + +{description} + +The `advancedSelfRecursion` function: + +* Performs the Advanced Self-Recursion parameter +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where the `meta.id` or KEY starts with `doquery:` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#optional-params-recursion[Optional { "self_recursion": true }` Parameter]. + +The following example shows you how to stop and restart a long-running process like a N1QL query. +It counts the number of hotels that start with a particular letter. + +[{tabs}] +==== +advancedSelfRecursion:: ++ +-- +[source,javascript] +---- +// Configure the settings for the advancedSelfRecursion function as follows: +// +// Version 7.6+ +// "Function Scope" +// *.* (or try bulk.data if non-privileged) +// "Listen to Location" +// bulk.data.source +// "Eventing Storage" +// rr100.eventing.metadata +// Binding(s) +// 1. "binding type", "alias name...", "bucket.scope.collection", "Access" +// "bucket alias", "src_col", "bulk.data.source", "read and write" +// +// You must have the sample dataset travel-sample installed + +function OnUpdate(doc, meta) { + if ( meta.id.startsWith("doquery:") === false ) return; + if (doc.done && doc.done === true) return; + + if (! doc.continue) { + doc.queryBeg = new Date(); + doc.queryCnt = 0; + doc.currentQueryOffset = 0; + doc.namesProcessed = 0; + doc.letterHash = {}; + log(meta.id,'Query initialized at ' + doc.queryBeg); + } + + var offset = doc.currentQueryOffset; + var results = + SELECT name + FROM `travel-sample`.`inventory`.`hotel` + LIMIT 100 + OFFSET $offset; + + doc.queryCnt++; + doc.currentQueryOffset = doc.currentQueryOffset + 100; + + var loopCnt = 0; + for (var item of results) { + loopCnt++; + doc.namesProcessed++; + var name = item.name; + if (name && name.length > 0) { + // Extract the first character and convert it to lowercase + var firstChar = name[0].toLowerCase(); + + // If the letter exists in the hash, increment its count. Otherwise initialize it to 1. + if (doc.letterHash[firstChar]) { + doc.letterHash[firstChar]++; + } else { + doc.letterHash[firstChar] = 1; + } + } + } + results.close(); + + if (loopCnt < 100) { + // we are done + if (doc.continue) delete doc.continue + doc.done = true; + doc.queryEnd = new Date(); + log(meta.id,'Query cnt complete mutations ' + doc.queryCnt + ' namesProcessed ' + doc.namesProcessed ); + log(meta.id,'Query completed at ' + doc.queryEnd); + log(meta.id,'Result hotels starting with "a" ' + doc.letterHash['a'] + ', hotels starting with "b" ' + doc.letterHash['b'] + ', ...'); + // no self recursion + src_col[meta.id] = doc; + } else { + // we are not done + doc.continue = true; + log(meta.id,'Query cnt in progress mutations ' + doc.queryCnt + ' namesProcessed ' + doc.namesProcessed ); + // using self recursion results in a continuation of the query + couchbase.upsert(src_col, meta, doc, { "self_recursion": true }); + } +} +---- +-- + +Input data:: ++ +-- +[source,json] +---- +INPUT: KEY doquery:001 + +{ + "id": "doquery:001" +} + +---- +-- + +Output data:: ++ +-- +[source,json] +---- +OUTPUT: KEY doquery:001 +{ + "id": "doquery:001", + "queryBeg": "2024-03-15T21:07:38.114Z", + "queryCnt": 10, + "currentQueryOffset": 1000, + "namesProcessed": 917, + "letterHash": { + "1": 1, + "5": 2, + "8": 1, + "m": 58, + "t": 127, + "l": 41, + "g": 25, + "w": 27, + "a": 33, + "b": 48, + "r": 35, + "h": 168, + "n": 19, + "o": 15, + "p": 41, + "s": 64, + "c": 84, + "i": 23, + "u": 8, + "k": 15, + "j": 7, + "'": 1, + "e": 16, + "d": 21, + "q": 4, + "f": 16, + "y": 5, + "v": 12 + }, + "done": true, + "queryEnd": "2024-03-15T21:07:38.425Z" +} +---- +-- + +Output log:: ++ +-- +[source,json] +---- +2024-03-15T14:07:38.116-07:00 [INFO] "doquery:001" "Query initialized at Fri Mar 15 2024 14:07:38 GMT-0700 (Pacific Daylight Time)" + +2024-03-15T14:07:38.159-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 1 namesProcessed 100" + +2024-03-15T14:07:38.175-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 2 namesProcessed 200" + +2024-03-15T14:07:38.191-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 3 namesProcessed 300" + +2024-03-15T14:07:38.204-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 4 namesProcessed 400" + +2024-03-15T14:07:38.217-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 5 namesProcessed 500" + +2024-03-15T14:07:38.351-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 6 namesProcessed 600" + +2024-03-15T14:07:38.376-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 7 namesProcessed 700" + +2024-03-15T14:07:38.396-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 8 namesProcessed 800" + +2024-03-15T14:07:38.413-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 9 namesProcessed 900" + +2024-03-15T14:07:38.425-07:00 [INFO] "doquery:001" "Query cnt complete mutations 10 namesProcessed 917" + +2024-03-15T14:07:38.425-07:00 [INFO] "doquery:001" "Query completed at Fri Mar 15 2024 14:07:38 GMT-0700 (Pacific Daylight Time)" + +2024-03-15T14:07:38.425-07:00 [INFO] "doquery:001" "Result hotels starting with \"a\" 33, hotels starting with \"b\" 48, ..." +---- +-- +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedTouchOp.adoc b/modules/eventing/pages/eventing-handler-advancedTouchOp.adoc new file mode 100644 index 00000000..06f82849 --- /dev/null +++ b/modules/eventing/pages/eventing-handler-advancedTouchOp.adoc @@ -0,0 +1,119 @@ += Function: Advanced TOUCH Operation +:description: pass:q[Perform the Advanced TOUCH operation where Eventing interacts with the Data Service.] +:page-edition: Enterprise Edition +:tabs: + +{description} + +The `advancedTouchOp` function: + +* Performs the Advanced TOUCH operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where the `meta.id` or KEY starts with `ten_seconds:` +* Does not require that you send the document back to the Data Service to update the TTL + +For more information about the Advanced TOUCH operation, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-touch-op[Advanced Keyspace Accessors]. + +[{tabs}] +==== +advancedTouchOp:: ++ +-- +[source,javascript] +---- +// Configure the settings for the advancedTouchOp function as follows: +// +// Version 7.6+ +// "Function Scope" +// *.* (or try bulk.data if non-privileged) +// "Listen to Location" +// bulk.data.source +// "Eventing Storage" +// rr100.eventing.metadata +// Binding(s) +// 1. "binding type", "alias name...", "bucket.scope.collection", "Access" +// "bucket alias", "src_col", "bulk.data.source", "read and write" + +function OnUpdate(doc, meta) { + if (! meta.id.startsWith("ten_seconds:") ) return; + + log('input meta', meta); + log('input doc', doc); + + var expiry = new Date(); + expiry.setSeconds(expiry.getSeconds() + 10); + + var req = {"id": meta.id, "expiry_date": expiry}; + var result = couchbase.touch(src_col, req); + if (result.success) { + log('success adv. touch: result', result); + } else { + log('failure adv. touch: id', req.id, 'result', result); + } +} +---- +-- + +Input Data:: ++ +-- +[source,json] +---- +INPUT: KEY ten_seconds:001 + +{ + "id": "ten_seconds:001", + "type": "Auto-deletes in 10 seconds. Keep refreshing to retrieve documents." +} + +---- +-- + +Output Data:: ++ +-- +[source,json] +---- + +2024-03-15T11:57:51.103-07:00 [INFO] "input doc" +{ + "id": "ten_seconds:001", + "type": "Auto-deletes in 10 seconds. Keep refreshing to retrieve documents." +} + +2024-03-15T11:57:51.103-07:00 [INFO] "input meta" +{ + "cas": "1710529071079817216", + "id": "ten_seconds:001", + "expiration": 0, + "flags": 33554438, + "vb": 679, + "seq": 102, + "datatype": "json", + "keyspace": + { + "bucket_name": "travel-sample", + "scope_name": "tenant_agent_00", + "collection_name": "bookings" + }, + "cid": 18 +} + +2024-03-15T11:57:51.108-07:00 [INFO] "success adv. touch: result" +{ + "meta": + { + "id": "ten_seconds:001", + "cas": "1710529071107276800" + }, + "success": true +} + +2024-03-15T11:58:03.302-07:00 [INFO] "Doc deleted/expired" "ten_seconds:001" +{ + "expired": true +} +---- +-- +==== \ No newline at end of file From b4faead76150263c39babf1bbb58a7cd6cfb7dfc Mon Sep 17 00:00:00 2001 From: Julia Browne Date: Fri, 15 Mar 2024 22:21:46 -0700 Subject: [PATCH 2/8] Edited existing pages to match style and format of new pages --- ...dler-advanced-docControlledSelfExpiry.adoc | 85 +++++++++--------- .../eventing-handler-advanced-keepLastN.adoc | 88 +++++++++---------- .../eventing-handler-advancedDecrementOp.adoc | 34 +++---- .../eventing-handler-advancedDeleteOp.adoc | 45 +++++----- .../pages/eventing-handler-advancedGetOp.adoc | 27 +++--- ...enting-handler-advancedGetOpWithCache.adoc | 31 +++---- .../eventing-handler-advancedIncrementOp.adoc | 34 +++---- .../eventing-handler-advancedInsertOp.adoc | 36 ++++---- .../eventing-handler-advancedReplaceOp.adoc | 48 +++++----- .../eventing-handler-advancedUpsertOp.adoc | 39 ++++---- ...nting-handler-multiCollectionEventing.adoc | 51 ++++++----- 11 files changed, 277 insertions(+), 241 deletions(-) diff --git a/modules/eventing/pages/eventing-handler-advanced-docControlledSelfExpiry.adoc b/modules/eventing/pages/eventing-handler-advanced-docControlledSelfExpiry.adoc index 610b50ad..6ec3d26f 100644 --- a/modules/eventing/pages/eventing-handler-advanced-docControlledSelfExpiry.adoc +++ b/modules/eventing/pages/eventing-handler-advanced-docControlledSelfExpiry.adoc @@ -1,43 +1,46 @@ = Function: Advanced Document Controlled Expiry -:description: pass:q[Purge a document automatically based on self-contained start and duration fields.] +:description: pass:q[Purge a document automatically based on the document's self-contained start and duration fields.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} - -* This function *advancedDocControlledSelfExpiry* demonstrates self-expiry of a document; for example, a user trial. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* When documents are created, they will have no expiration value. This function processes the initial mutation to calculate and set the proper TTL. -* In Couchbase, when using a simple integer expiry value (as opposed to a proper date or time object), the expiration can be specified in two ways: -** As an offset from the current time. If the absolute value of the expiry is less than 30 days (60 * 60 * 24 * 30 seconds), it is considered an offset. -** As an absolute Unix time stamp. If the value is greater than 30 days (60 * 60 * 24 * 30 seconds), it is considered an absolute time stamp. -** As described in xref:learn:data/expiration.adoc[Expiration], if a "Bucket Max Time-To-Live" is set (specified in seconds), it is an enforced hard upper limit. As such, any subsequent document mutation (by {sqlpp}, Eventing, or any Couchbase SDK) will result in the document having its expiration adjusted and set to the bucket’s maximum TTL if the operation has: -*** No TTL. -*** A TTL of zero. -*** A TTL greater than the bucket TTL. -* As we are using Advanced Bucket Accessors setting document expirations (or TTLs) we use a JavaScript Data object. -* Will operate on any document with type == "trial_customers". -* Will ignore any doc with a non-zero TTL -* This is different than setting a TTL on a bucket or a collection which will typically update (or extend) the TTL of a document on each mutation. +{description} + +The `advancedDocControlledSelfExpiry` function: + +* Demonstrates the self-expiry of a document (for example, a user trial) +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Processes the initial mutation to calculate and set the TTL of a newly-created document +* Uses a JavaScript data object to set document expiration +* Operates on any document where `type == "trial_custoimers"` +* Ignores any document with a TTL that is not zero + +When you use a simple integer instead of a proper date or time object for your document's expiration value, the expiration value is specified in one of the following ways: + +* As an offset from the current time if the value is less than 30 days (60 * 60 * 24 * 30 seconds). +* As an absolute Unix time stamp if the value is greater than 30 days (60 * 60 * 24 * 30 seconds). + +If a `Bucket Max Time-to-Live` is set and specified in seconds, it's enforced as a hard upper limit. +Any subsequent document mutation, whether by {sqlpp}, Eventing, or a Couchbase SDK, results in the document having its expiration adjusted and set to the bucket's maximum TTL if the operation has: + +* No TTL +* A TTL of zero +* A TTL greater than the bucket's TTL [{tabs}] ==== advancedDocControlledSelfExpiry:: + -- -Two variants of this function are available - a 6.6 version that relies on {sqlpp} and a 6.6.1+/7.0.0+ version (*this Function*) that directly sets the expiration. -You can completely avoid _N1QL(...)_ and use _couchbase.replace(bucket_binding, meta, doc)_ as the advancedDocControlledSelfExpiry variant is much faster. +There are two variants of this function available: a xref:eventing-handler-docControlledSelfExpiry.adoc[Couchbase Server version 6.6 that relies on {sqlpp}], and a Couchbase Server version 6.6.1+/7.0.0+ that directly sets the expiration. -* xref:eventing-handler-docControlledSelfExpiry.adoc[docControlledSelfExpiry (indirect TTL via {sqlpp})] -* <> +You can improve your function's performance by avoiding N1QL() and using `couchbase.replace(bucket_binding, meta, doc)` instead. -[#advancedDocControlledSelfExpiry] -advancedDocControlledSelfExpiry (direct TTL) +The following example directly sets the expiration. [source,javascript] ---- -// To run configure the settings for this Function, advancedDocControlledSelfExpiry, as follows: +// Configure the settings for the advancedDocControlledSelfExpiry function as follows: // // Version 7.1+ // "Function Scope" @@ -61,36 +64,36 @@ advancedDocControlledSelfExpiry (direct TTL) // "bucket alias", "src_col", "source", "read and write" function OnUpdate(doc, meta) { - // Filter items that don't have been updated + // Filter items that have not been updated if (meta.expiration !== 0) { log(meta.id, "IGNORE expiration "+meta.expiration+" !== 0 or "+ new Date(meta.expiration).toString()); return; } - // Optional filter to a specic field like 'type' + // Optional filter to a specific field like 'type' if (doc.type !== 'trial_customers') return; - // Our expiry is based on a JavaScript date parsable field, it must exist + // The expiry is based on a JavaScript date parsable field if (!doc.trialStartDate || !doc.trialDurationDays) return; - // Convert the doc's field timeStamp and convert to unix epoch time (in ms.). + // Convert the doc field timeStamp to Unix epoch time in milliseconds var docTimeStampMs = Date.parse(doc.trialStartDate); var keepDocForMs = doc.trialDurationDays * 1000 * 60 * 60 * 24 ; - var nowMs = Date.now(); // get current unix time (in ms.). + var nowMs = Date.now(); // Get current Unix time in milliseconds - // Archive if we have kept it for too long no need to set the expiration + // Archive if it has been kept for too long; you do not need to set an expiration if( nowMs >= (docTimeStampMs + keepDocForMs) ) { - // Delete the document form the source collection via the map alias + // Delete the document from the source collection through the map alias delete src_col[meta.id]; log(meta.id, "DELETE from src_col to dst_bkt alias as our expiration " + new Date(docTimeStampMs + keepDocForMs).toString()) + " is already past"; } else { var key = meta.id; - //set the meta.expiration=ttlMs + // Set the meta.expiration=ttlMs var ttlMs = docTimeStampMs + keepDocForMs; if (ttlMs !== 0) { @@ -98,7 +101,7 @@ function OnUpdate(doc, meta) { ttlMs+" or " + new Date(ttlMs).toString()); // Advanced Bucket Accessors use JavaScript Date objects var expiryDate = new Date(ttlMs); - // This is 4X to 5X faster than using N1QL(...) and no need to worry about recursion. + // This is 4X to 5X faster than using N1QL(...) and you do not need to worry about recursion var res = couchbase.replace(src_col,{"id":meta.id,"expiry_date":expiryDate},doc); if (!res.success) { log(meta.id,'Setting TTL to',expiryDate,'failed',res); @@ -109,13 +112,13 @@ function OnUpdate(doc, meta) { ---- -- -Input Data/Mutation:: +Input data:: + -- +Create a test set of 4 documents using the Query Editor to insert the data items. +You do not need an Index. -We want to create a test set of four (4) documents, use the Query Editor to insert the the data items (you do not need an index). - -Note, if the today is past 08-25-2021 (MM-DD-YYYY) just change the `trialStartDate` for the last two records to at least 90 days from now. +If today's date is past 08-25-2021 (MM-DD-YYYY), you can change the `trialStartDate` for the last two records to at least 90 days from today. [source,sqlpp] ---- @@ -154,7 +157,7 @@ Note, if the today is past 08-25-2021 (MM-DD-YYYY) just change the `trialStartDa ---- -- -Output Data/Mutation:: +Output data:: + -- [source,json] @@ -179,7 +182,7 @@ NEW/OUTPUT: KEY trial_customers::3 "type": "trial_customers" } -We end up with two (2) of the four documents (obviously you may need to adjust the {sqlpp} INSERT in a few months as all the document would be immediately deleted). +Returns 2 of the 4 documnents. * "trial_customers::0" was deleted * "trial_customers::1" was deleted @@ -188,4 +191,4 @@ We end up with two (2) of the four documents (obviously you may need to adjust t ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advanced-keepLastN.adoc b/modules/eventing/pages/eventing-handler-advanced-keepLastN.adoc index 4c95ccd0..b05bfac2 100644 --- a/modules/eventing/pages/eventing-handler-advanced-keepLastN.adoc +++ b/modules/eventing/pages/eventing-handler-advanced-keepLastN.adoc @@ -1,34 +1,35 @@ = Function: Advanced Keep the Last N User Items -:description: pass:q[Keep the last N user notifications seen related to a user ID (these could be any documents).] +:description: pass:q[Keep the last N user notifications related to a user ID.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedKeepLastN* demonstrates how to keep a user record with the last N activities. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation with a key starting with "nu:" of the form "nu:#:#". -* The key "nu:#:#" contains two numbers. The first # is an increasing notification number, the second # is the user ID. -* Anytime we insert a new record we want to remove the earliest notification record for the user so we only have at most N records for each user. -We assume that nid always increases across time as such we ignore duplicates. -* For our test we will keep just the three (3) most recent notifications per user ID. +The `advancedKeepLastN` function: + +* Demonstrates how to keep a user record with the last N activities +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where the KEY starts with `nu:` in the form `nu:#:#` +** The KEY `nu:#:#` has 2 numbers. The first is an increasing notification number and the second is the user ID. +* Only keeps N records for each user +* Removes the earliest notification record for a user whenever a new record is inserted for that user + +The following example assumes that N always increases across time and ignores any duplicates. +It keeps only the 3 most recent notifications for each user ID. [{tabs}] ==== advancedKeepLastN:: + -- -Two variants of this function are available - a 6.6 version that implements userspace CAS and a 6.6.1+/7.0.0+ version (*this Function*) which uses true CAS. +There are two variants of this function available: a xref:eventing-handler-keepLastN.adoc[Couchbase Server version 6.6 that implements userspace CAS], and a Couchbase Server version 6.6.1+/7.0.0+ that uses true CAS. -* xref:eventing-handler-keepLastN.adoc[keepLastN (userspace CAS)] -* <> +The following example uses true CAS. -[#advancedKeepLastN] -advancedKeepLastN (true CAS) [source,javascript] ---- -// To run configure the settings for this Function, advancedKeepLastN, as follows: +// Configure the settings for the advancedKeepLastN function as follows: // // Version 7.1+ // "Function Scope" @@ -52,53 +53,52 @@ advancedKeepLastN (true CAS) // "bucket alias", "src_col", "source", "read and write" /* - * Process all mutations, however updateNotifyArrayInKV(...) will only - * data with KEYS like nu:#:# + * Process all mutations; updateNotifyArrayInKV(...) only processes data with KEYS like nu:#:# */ function OnUpdate(doc, meta) { - const MAX_ARRAY = 3; // alter to keep 'N' items - const DEBUG = false; // if true very verbose + const MAX_ARRAY = 3; // Keep 'N' items + const DEBUG = false; // If true, the debug log can be too long updateNotifyArrayInKV(doc, meta, MAX_ARRAY, DEBUG); } /* - * manipulates the in memory document to only keep 'MAX_ARRAY' items + * Manipulate the in-memory document to only keep 'MAX_ARRAY' items */ function addToNtfyArray(user_doc, user_id, insert_json, MAX_ARRAY, DEBUG) { var ntfy_id = insert_json.nid; if (user_doc.notifications && user_doc.notifications[0] && user_doc.notifications[0].nid >= ntfy_id && user_doc.notifications.length === MAX_ARRAY) { - // do nothing this is older data, we assume that nid always increases + // Do nothing; this is older data return null; } else { - // find insert position + // Find the insert position for (var i = 0; i <= user_doc.notifications.length + 1; i++) { if (i < user_doc.notifications.length && user_doc.notifications[i].nid === ntfy_id) { - // do nothing this is duplicate data we already have it, assume no updates to notifys + // Do nothing; this is duplicated data if (DEBUG) log('Ignore DUP ntfy_id', ntfy_id, 'user_id', user_id, 'insert_json', insert_json); return null; } if (i == user_doc.notifications.length || user_doc.notifications[i].nid > ntfy_id) { - // add to array middle or end + // Add to middle or end of array user_doc.notifications.splice(i, 0, insert_json); break; } } } while (user_doc.notifications.length > MAX_ARRAY) { - // ensure proper size + // Ensure proper size user_doc.notifications.shift(); } return user_doc; } /* - * creates, gets, and updates (via replace) the KV tracking array document + * Creates, gets, and updates (via replace) the KV tracking array document */ function updateNotifyArrayInKV(doc, meta, MAX_ARRAY, DEBUG) { - // will process ALL data like nu:#:# + // Process ALL data like nu:#:# var parts = meta.id.split(':'); if (!parts || parts.length != 3 || parts[0] != "nu") return; var ntfy_id = parseInt(parts[1]); @@ -109,7 +109,7 @@ function updateNotifyArrayInKV(doc, meta, MAX_ARRAY, DEBUG) { "nid": ntfy_id, doc }; - // In version 6.6.1 we can use CAS in Eventing to avoid race conditions + // In version 6.6.1, use CAS in Eventing to avoid race conditions var res = null; var req_id = "user_plus_ntfys:" + user_id; var req_meta = { @@ -127,11 +127,11 @@ function updateNotifyArrayInKV(doc, meta, MAX_ARRAY, DEBUG) { user_meta = res.meta; } else { if (!res.error.key_not_found) { - // do nothing this is a big error + // Do nothing; this is a big error log("FAILED to insert id: " + meta.id, doc, 'res', res) return; } - // here we just need to create our document or initialize it. + // Create the document and initialize it user_doc = { "type": "user_plus_ntfys", "id": user_id, @@ -139,25 +139,25 @@ function updateNotifyArrayInKV(doc, meta, MAX_ARRAY, DEBUG) { }; res = couchbase.insert(src_col, req_meta, user_doc); if (DEBUG) log('couchbase.insert(src_col,', req_meta, user_doc, ') success==' + res.success, res); - // redo loop just force a couchbase.get (try again or read our insert) + // Redo the loop to force couchbase.get res = null; } if (res !== null) { - // here we had a successful couchbase.get(...) so both 'user_doc' and 'user_meta' - // must be valid so we manipulate our copy of the user_doc to keep only MAX_ARRAY + // Successful couchbase.get(...) for both user_doc and user_meta + // Manipulate the copy of the user_doc to keep only MAX_ARRAY var new_doc = addToNtfyArray(user_doc, user_id, insert_json, MAX_ARRAY, DEBUG); if (new_doc == null) { - // duplicate data we already have it, just ignore/skip + // Ignore or skip duplicated data break; } - // now try to replace the user_doc with new_doc but pass CAS to test for race conditions + // Try to replace the user_doc with new_doc; pass CAS to test for race conditions res = couchbase.replace(src_col, user_meta, new_doc); if (DEBUG) log('couchbase.replace(src_col,', user_meta, new_doc, ') success==' + res.success, res); if (res.success) { - // CAS matched so we are done. + // CAS matches and operation is successful break; } else { - // redo loop try again + // Redo loop and try again res = null; } } @@ -166,11 +166,12 @@ function updateNotifyArrayInKV(doc, meta, MAX_ARRAY, DEBUG) { ---- -- -Input Data/Mutation:: +Input data:: + -- -We want to create a test doc set +Create a new test document set using the Query Editor to insert the data items. +You do not need an Index. [cols="1,3",width=50%,frame=all] |=== @@ -189,9 +190,6 @@ We want to create a test doc set |=== - -Use the Query Editor to insert the above data items (you do not need an Index) - [source,sqlpp] ---- UPSERT INTO `bulk`.`data`.`source` (KEY,VALUE) @@ -208,7 +206,7 @@ Use the Query Editor to insert the above data items (you do not need an Index) ---- -- -Output Data/Mutation:: +Output data:: + -- [source,json] @@ -261,4 +259,4 @@ NEW/OUTPUT: KEY user_plus_ntfys:2 } ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedDecrementOp.adoc b/modules/eventing/pages/eventing-handler-advancedDecrementOp.adoc index 7ee9ebde..6fb8e1aa 100644 --- a/modules/eventing/pages/eventing-handler-advancedDecrementOp.adoc +++ b/modules/eventing/pages/eventing-handler-advancedDecrementOp.adoc @@ -1,15 +1,19 @@ -= Function: Advanced DECREMENT operation -:description: pass:q[Perform the Advanced DECREMENT operation where Eventing interacts with the Data service.] += Function: Advanced DECREMENT Operation +:description: pass:q[Perform the Advanced DECREMENT operation where Eventing interacts with the Data Service.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedDecrementOp* merely demonstrates the Advanced DECREMENT operation. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation and count down (decrement) the mutations subject to DCP dedup. -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-decrement-op[Advanced DECREMENT operation] in the detailed documentation. +The `advancedDecrementOp` function: + +* Performs the Advanced DECREMENT operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation +* Counts down the mutations subject to DCP deduplication + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-decrement-op[Advanced DECREMENT Operation]. [{tabs}] ==== @@ -18,7 +22,7 @@ advancedDecrementOp:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedDecrementOp, as follows: +// Configure the settings for the advancedDecrementOp function as follows: // // Version 7.1+ // "Function Scope" @@ -46,7 +50,7 @@ function OnUpdate(doc, meta) { // log('input meta', meta); // log('input doc ', doc); - // if doc.count doesn't exist it will be created + // Creates doc.count if it does not already exist var ctr_meta = {"id": "my_atomic_counter:1" }; var result = couchbase.decrement(src_col,ctr_meta); if (result.success) { @@ -57,7 +61,7 @@ function OnUpdate(doc, meta) { } ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -76,9 +80,9 @@ UPSERT INTO `bulk`.`data`.`source` (KEY,VALUE) ---- -- + -Output Data:: +Output data:: + -We insert ten (10) documents and count the mutations via decrementing +Insert 10 documents and count the mutations through decrementing. + -- [source,json] @@ -91,7 +95,7 @@ KEY: my_atomic_counter:1 ---- -- -Output Log:: +Output log:: + -- [source,json] @@ -127,4 +131,4 @@ Output Log:: {"doc":{"count":-10},"meta":{"id":"my_atomic_counter:1","cas":"1610137859948412928"},"success":true} ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedDeleteOp.adoc b/modules/eventing/pages/eventing-handler-advancedDeleteOp.adoc index 7970b4e7..3945f9ef 100644 --- a/modules/eventing/pages/eventing-handler-advancedDeleteOp.adoc +++ b/modules/eventing/pages/eventing-handler-advancedDeleteOp.adoc @@ -1,17 +1,20 @@ -= Function: Advanced DELETE operation -:description: pass:q[Perform the Advanced DELETE operation where Eventing interacts with the Data service.] += Function: Advanced DELETE Operation +:description: pass:q[Perform the Advanced DELETE operation where Eventing interacts with the Data Service.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedDeleteOp* merely demonstrates the Advanced DELETE operation. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation where doc.type === "control_adv_delete". -* Always tries to insert the test document, any insert error will be ignored. -* There are 4 modes of operation: no_cas, bad_cas, no_key, and good_cas. -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-delete-op[Advanced DELETE operation] in the detailed documentation. +The `advancedDeleteOp` function: + +* Performs the Advanced DELETE operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where `doc.type === "control_adv_delete"` +* Always tries to insert the test document and ignores insert errors +* Has 4 modes of operation: `no_cas`, `bad_cas`, `good_cas`, and `no_key` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-delete-op[Advanced DELETE Operation]. [{tabs}] ==== @@ -20,7 +23,7 @@ advancedDeleteOp:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedDeleteOp, as follows: +// Configure the settings for the advancedDeleteOp function as follows: // // Version 7.1+ // "Function Scope" @@ -49,23 +52,23 @@ function OnUpdate(doc, meta) { log('input meta', meta); log('input doc ', doc); - // Setup, make sure we have our doc to "delete", ignore any errors + // Setup, make sure there is a doc to "delete", ignore any errors couchbase.insert(src_col,{"id":"test_adv_delete:" + doc.ins_id},{"a:": 1}); var new_meta; if (doc.mode && doc.mode === "no_cas") { - // Here we pass no CAS it will always succeed + // No CAS is passed - it always succeeds new_meta = {"id":"test_adv_delete:" + doc.ins_id}; } if (doc.mode && doc.mode === "bad_cas") { - // Here we pass a non-matching CAS it will always fail + // Pass a non-matching CAS - it always fails new_meta = {"id":"test_adv_delete:" + doc.ins_id, "cas":"1111111111111111111"}; } if (doc.mode && doc.mode === "good_cas") { - // Here we will pass the matching or current CAS it will succeed + // Pass the matching or current CAS - it succeeds var tmp_r = couchbase.get(src_col,{"id":"test_adv_delete:" + doc.ins_id}); if (tmp_r.success) { - // Here we use the current CAS just read via couchbase.get(...) + // Use the current CAS to read via couchbase.get(...) new_meta = {"id":"test_adv_delete:" + doc.ins_id, "cas": tmp_r.meta.cas}; } else { log('Cannot delete due to no such key',"test_adv_delete:" + doc.ins_id); @@ -86,7 +89,7 @@ function OnUpdate(doc, meta) { } ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -137,9 +140,11 @@ INPUT: KEY control_adv_delete::4 ---- -- + -Output Data/Log:: +Output data:: + -We do four (4) deletion attempts the second fails due to a CAS missmatch and the third fails due to no such key. +Perform 4 deletion attempts. +The second attempt fails because of a CAS mismatch. +The third attempt fails because the document key does not exist. + -- [source,json] @@ -257,4 +262,4 @@ Logs from Mutation #4 } ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedGetOp.adoc b/modules/eventing/pages/eventing-handler-advancedGetOp.adoc index 728cf6e3..f890d1c7 100644 --- a/modules/eventing/pages/eventing-handler-advancedGetOp.adoc +++ b/modules/eventing/pages/eventing-handler-advancedGetOp.adoc @@ -1,15 +1,18 @@ -= Function: Advanced GET operation -:description: pass:q[Perform the Advanced GET operation where Eventing interacts with the Data service.] += Function: Advanced GET Operation +:description: pass:q[Perform the Advanced GET operation where Eventing interacts with the Data Service.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedGetOp* merely demonstrates the Advanced GET operation. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation where doc.type === "test_adv_get". -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-get-op[Advanced GET operation] in the detailed documentation. +The `advancedGetOp` function: + +* Performs the Advanced GET operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where `doc.type === "test_adv_get"` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-get-op[Advanced GET Operation]. [{tabs}] ==== @@ -18,7 +21,7 @@ advancedGetOp:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedGetOp, as follows: +// Configure the settings for the advancedGetOp function as follows: // // Version 7.1+ // "Function Scope" @@ -60,7 +63,7 @@ function OnUpdate(doc, meta) { ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -75,7 +78,7 @@ INPUT: KEY test_adv_get::1 ---- -- -Output Data/Logged:: +Output data:: + -- [source,json] @@ -120,4 +123,4 @@ Output Data/Logged:: ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedGetOpWithCache.adoc b/modules/eventing/pages/eventing-handler-advancedGetOpWithCache.adoc index 9f763894..80783c6d 100644 --- a/modules/eventing/pages/eventing-handler-advancedGetOpWithCache.adoc +++ b/modules/eventing/pages/eventing-handler-advancedGetOpWithCache.adoc @@ -1,18 +1,19 @@ -= Function: Advanced GET operation (with cache) -:description: pass:q[Perform the Advanced GET operation where Eventing interacts with the Data service.] += Function: Advanced GET Operation with Cache +:description: pass:q[Perform the Advanced GET operation with cache where Eventing interacts with the Data Service.] :page-edition: Enterprise Edition :tabs: -[.status]#Couchbase Server 7.0.2# -*Goal*: {description} +{description} -* This function *advancedGetOpWithCache* merely demonstrates the Advanced GET operation with Bucket Backed Cache enabled. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation where doc.type === "test_adv_get". -* The optional third parameter to couchbase.get of *{"cache": true}* enables caching of documents for up to 1 second. -* This RYOW caching is 18X-25X faster than reading near static data directly from the Data Service (or KV). -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-get-op[Advanced GET operation] in the detailed documentation. +The `advancedGetOpWithCache` function: + +* Performs the Advanced GET operation with an enabled bucket-backed cache +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where `doc.type === "test_adv_get"` +* Has an optional parameter to `couchbase.get` called `{ "cache": true }`, which enables caching of documents for up to 1 second + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#optional-cache-true-parameter[Optional `{ "cache": true }` Parameter]. [{tabs}] ==== @@ -21,7 +22,7 @@ advancedGetOpWithCache:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedGetOpWithCache, as follows: +// Configure the settings for the advancedGetOpWithCache function as follows: // // Version 7.1+ // "Function Scope" @@ -54,7 +55,7 @@ function OnUpdate(doc, meta) { ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -69,7 +70,7 @@ INPUT: KEY test_adv_get::1 ---- -- -Output Data/Logged:: +Output data:: + -- [source,json] @@ -111,4 +112,4 @@ Output Data/Logged:: ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedIncrementOp.adoc b/modules/eventing/pages/eventing-handler-advancedIncrementOp.adoc index f1bfe773..c83cb1ca 100644 --- a/modules/eventing/pages/eventing-handler-advancedIncrementOp.adoc +++ b/modules/eventing/pages/eventing-handler-advancedIncrementOp.adoc @@ -1,15 +1,19 @@ -= Function: Advanced INCREMENT operation -:description: pass:q[Perform the Advanced INCREMENT operation where Eventing interacts with the Data service.] += Function: Advanced INCREMENT Operation +:description: pass:q[Perform the Advanced INCREMENT operation where Eventing interacts with the Data Service.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedIncrementOp* merely demonstrates the Advanced INCREMENT operation. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation and count the mutations subject to DCP dedup. -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-increment-op[Advanced INCREMENT operation] in the detailed documentation. +The `advancedIncrementOp` function: + +* Performs the Advanced INCREMENT operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation +* Counts the mutations subject to DCP deduplication + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-increment-op[Advanced INCREMENT Operation]. [{tabs}] ==== @@ -18,7 +22,7 @@ advancedIncrementOp:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedIncrementOp, as follows: +// Configure the settings for the advancedIncrementOp function as follows: // // Version 7.1+ // "Function Scope" @@ -46,7 +50,7 @@ function OnUpdate(doc, meta) { // log('input meta', meta); // log('input doc ', doc); - // if doc.count doesn't exist it will be created + // Creates doc.count if it does not already exist var ctr_meta = {"id": "my_atomic_counter:1" }; var result = couchbase.increment(src_col,ctr_meta); if (result.success) { @@ -57,7 +61,7 @@ function OnUpdate(doc, meta) { } ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -76,9 +80,9 @@ UPSERT INTO `bulk`.`data`.`source` (KEY,VALUE) ---- -- -Output Data:: +Output data:: + -We insert ten (10) documents and count the mutations +Insert 10 documents and count the mutations through incrementing. + -- [source,json] @@ -91,7 +95,7 @@ KEY: my_atomic_counter:1 ---- -- -Output Log:: +Output log:: + -- [source,json] @@ -127,4 +131,4 @@ Output Log:: {"doc":{"count":10},"meta":{"id":"my_atomic_counter:1","cas":"1610137859948412928"},"success":true} ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedInsertOp.adoc b/modules/eventing/pages/eventing-handler-advancedInsertOp.adoc index e022382c..d0cac006 100644 --- a/modules/eventing/pages/eventing-handler-advancedInsertOp.adoc +++ b/modules/eventing/pages/eventing-handler-advancedInsertOp.adoc @@ -1,15 +1,18 @@ -= Function: Advanced INSERT operation -:description: pass:q[Perform the Advanced INSERT operation where Eventing interacts with the Data service.]] += Function: Advanced INSERT Operation +:description: pass:q[Perform the Advanced INSERT operation where Eventing interacts with the Data Service.]] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedInsertOp* merely demonstrates the Advanced INSERT operation. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation where doc.type === "control_adv_insert". -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-insert-op[Advanced INSERT operation] in the detailed documentation. +The `advancedInsertOp` function: + +* Performs the Advanced INSERT operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where `doc.type === "control_adv_insert"` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-insert-op[Advanced INSERT Operation]. [{tabs}] ==== @@ -18,7 +21,7 @@ advancedInsertOp:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedInsertOp, as follows: +// Configure the settings for the advancedInsertOp function as follows: // // Version 7.1+ // "Function Scope" @@ -45,7 +48,7 @@ function OnUpdate(doc, meta) { if (!meta.id.startsWith("control_adv_insert")) return; log('input meta', meta); log('input doc ', doc); - // two modes typical insert or setting a expiration/TTL + // two modes: typical insert and setting an expiration/TTL var new_meta = {"id":"test_adv_insert:"+doc.ins_id}; if (doc.set_expiry && doc.set_expiry === true) { new_meta = {"id":"test_adv_insert:"+doc.ins_id, expiry_date: new Date(Date.now() + 60 * 1000)}; @@ -60,7 +63,7 @@ function OnUpdate(doc, meta) { } ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -100,9 +103,12 @@ INPUT: KEY control_adv_insert::3 ---- -- + -Output Data:: +Output data:: + -We try to insert three (3) documents the first two (2) inserts succeed but the second, test_adv_insert:2, will expire in 60 seconds because we set an expiration. The third insert attempt will fail since test_adv_insert:1 already exists. +The output data inserts 3 documents. +The first 2 insertions are successful. +The `test_adv_insert: 2` has an expiration of 60 seconds. +The third insertion attempt fails because `test_adv_insert: 1` already exists. + -- [source,json] @@ -125,7 +131,7 @@ KEY: test_adv_insert:2 ---- -- + -Output Log:: +Output log:: + -- [source,json] @@ -215,4 +221,4 @@ Logs from Mutation #3 } ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedReplaceOp.adoc b/modules/eventing/pages/eventing-handler-advancedReplaceOp.adoc index f425eae9..8cdb54e9 100644 --- a/modules/eventing/pages/eventing-handler-advancedReplaceOp.adoc +++ b/modules/eventing/pages/eventing-handler-advancedReplaceOp.adoc @@ -1,17 +1,20 @@ -= Function: Advanced REPLACE operation -:description: pass:q[Perform the Advanced REPLACE operation where Eventing interacts with the Data service.] += Function: Advanced REPLACE Operation +:description: pass:q[Perform the Advanced REPLACE operation where Eventing interacts with the Data Service.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedReplaceOp* merely demonstrates the Advanced REPLACE operation. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation where doc.type === "control_adv_replace". -* Always tries to insert the test document, any insert error will be ignored. -* There are 3 modes of operation: no_cas, bad_cas, and good_cas. -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-replace-op[Advanced REPLACE operation] in the detailed documentation. +The `advancedReplaceOp` function: + +* Performs the Advanced REPLACE operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where `doc.type === "control_adv_replace"` +* Always tries to insert the test document and ignores insert errors +* Has 3 modes of operation: `no_cas`, `bad_cas`, and `good_cas` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-replace-op[Advanced REPLACE Operation]. [{tabs}] ==== @@ -20,7 +23,7 @@ advancedReplaceOp:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedReplaceOp, as follows: +// Configure the settings for the advancedReplaceOp function as follows: // // Version 7.1+ // "Function Scope" @@ -49,25 +52,25 @@ function OnUpdate(doc, meta) { log('input meta', meta); log('input doc ', doc); - // Setup, make sure we have our doc to "replace", ignore any errors + // Setup, make sure there is a doc to "replace", ignore any errors couchbase.insert(src_col,{"id":"test_adv_replace:" + doc.ins_id},{"a:": 1}); var new_meta; if (doc.mode && doc.mode === "no_cas") { - // Here we pass no CAS it will always succeed + // No CAS is passed - it always succeeds new_meta = {"id":"test_adv_replace:" + doc.ins_id}; - // optional set an expiry 60 seconds in the future + // (Optional) Set an expiry 60 seconds in the future // new_meta.expiry_date = new Date(Date.now() + 60 * 1000); } if (doc.mode && doc.mode === "bad_cas") { - // Here we pass a non-matching CAS it will always fail + // Pass a non-matching CAS - it always fails new_meta = {"id":"test_adv_replace:" + doc.ins_id, "cas":"1111111111111111111"}; } if (doc.mode && doc.mode === "good_cas") { - // Here we will pass the matching or current CAS it will succeed + // Pass the matching or current CAS - it succeeds var tmp_r = couchbase.get(src_col,{"id":"test_adv_replace:" + doc.ins_id}); if (tmp_r.success) { - // Here we use the current CAS just read via couchbase.get(...) + // Use the current CAS to read via couchbase.get(...) new_meta = {"id":"test_adv_replace:" + doc.ins_id, "cas": tmp_r.meta.cas}; } else { log('Cannot replace due to no such key',"test_adv_replace:" + doc.ins_id); @@ -84,7 +87,7 @@ function OnUpdate(doc, meta) { } ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -124,9 +127,10 @@ INPUT: KEY control_adv_replace::3 ---- -- + -Output Data:: +Output data:: + -We try to replace three (3) documents the first and last replacements succeed but the second, fails because of a CAS missmatch. +Replace 3 documents. +The first and last replacements are successful; the second replacement fails because of a CAS mismatch. + -- [source,json] @@ -141,7 +145,7 @@ KEY: test_adv_replace:10 ---- -- + -Output Log:: +Output log:: + -- [source,json] @@ -230,4 +234,4 @@ Logs from Mutation #3 } ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedUpsertOp.adoc b/modules/eventing/pages/eventing-handler-advancedUpsertOp.adoc index 211e4a92..ba5d5420 100644 --- a/modules/eventing/pages/eventing-handler-advancedUpsertOp.adoc +++ b/modules/eventing/pages/eventing-handler-advancedUpsertOp.adoc @@ -1,15 +1,18 @@ -= Function: Advanced UPSERT operation -:description: pass:q[Perform the Advanced UPSERT operation where Eventing interacts with the Data service.] += Function: Advanced UPSERT Operation +:description: pass:q[Perform the Advanced UPSERT operation where Eventing interacts with the Data Service.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedUpsertOp* merely demonstrates the Advanced UPSERT operation. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation where doc.type === "control_adv_insert". -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-upsert-op[Advanced UPSERT operation] in the detailed documentation. +The `advancedUpsertOp` function: + +* Performs the Advanced UPSERT operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where `doc.type === "control_adv_insert"` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-upsert-op[Advanced UPSERT Operation]. [{tabs}] ==== @@ -18,7 +21,7 @@ advancedUpsertOp:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedGetOp, as follows: +// Configure the settings for the advancedUpsertOp function as follows: // // Version 7.1+ // "Function Scope" @@ -45,8 +48,8 @@ function OnUpdate(doc, meta) { if (!meta.id.startsWith("control_adv_upsert")) return; log('input meta', meta); log('input doc ', doc); - // two modes typical upsert or setting a expiration/TTL - // note CAS if supplied will be ignored (use replace for this) + // two modes: typical upsert and setting an expiration/TTL + // note that CAS, if supplied, is ignored. You can use REPLACE instead. var new_meta = {"id":"test_adv_upsert:"+doc.ins_id}; if (doc.set_expiry && doc.set_expiry === true) { new_meta = {"id":"test_adv_upsert:"+doc.ins_id, expiry_date: new Date(Date.now() + 60 * 1000)}; @@ -61,7 +64,7 @@ function OnUpdate(doc, meta) { } ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -101,10 +104,12 @@ INPUT: KEY control_adv_upsert::3 ---- -- + -Output Data:: +Output data:: + -We try to upsert three (3) documents all three (2) upserts succeed but the second, test_adv_upsert:2, will expire in 60 seconds because we set an expiration. -Note, third upsert attempt will overwrite test_adv_upsert:1 thus we are left with two documents. +The output data upserts 3 documents. +The first 2 upsertions are successful. +The `test_adv_insert: 2` has an expiration of 60 seconds. +The third upsertion attempt fails because `test_adv_insert: 1` already exists. + -- [source,json] @@ -127,7 +132,7 @@ KEY: test_adv_upsert:2 ---- -- + -Output Log:: +Output log:: + -- [source,json] @@ -215,4 +220,4 @@ Logs from Mutation #3 } ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-multiCollectionEventing.adoc b/modules/eventing/pages/eventing-handler-multiCollectionEventing.adoc index 4be311e8..1b3c017c 100644 --- a/modules/eventing/pages/eventing-handler-multiCollectionEventing.adoc +++ b/modules/eventing/pages/eventing-handler-multiCollectionEventing.adoc @@ -1,19 +1,22 @@ = Function: Multi Collection Eventing -:description: pass:q[Show how to access the Data Service when Eventing is listening to multiple collections.] +:description: pass:q[Access the Data Service when Eventing is listening to multiple collections.] :page-edition: Enterprise Edition +{description} -*Goal*: {description} +The `multiCollectionEventing` function: -* This function *multiCollectionEventing* demonstrates how to access the Data Service when using wildcard bindings. -* Requires four (4) keyspaces in two buckets "rr100" and "source" -** rr100.eventing.metadata -** source._default._default -** source.myscope.mycol1 -** source.myscope.mycol2 -* Needs two Bindings of type "bucket alias" (as documented in the Scriptlet). -* Will operate on three test documents. Add them one at a time after the function is deployed. -* Highlights the use of _meta.keyspace_ +* Demonstrates how to access the Data Service when using `{asterisk}` wildcard bindings +* Requires 2 bindings of type `bucket alias` +* Requires the following 4 keyspaces in two buckets, `rr100` and source: +** `rr100.eventing.metadata` +** `source._default._default` +** `source.myscope.mycol1` +** `source.myscope.mycol2` +* Operates on 3 test documents +* Highlights the use of `meta.keyspace` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#multiple-collection-functions[Eventing Functions that Listen to Multiple Collections]. [tabs] ==== @@ -22,9 +25,9 @@ multiCollectionEventing:: -- [source, javascript] ---- -// To run configure the settings for this Function, multiCollectionEventing, as follows: +// Configure the settings for the multiCollectionEventing function as follows: // -// Setup four (4) required keyspaces in two buckets "rr100" and "source" +// Set up four (4) required keyspaces in two buckets "rr100" and "source" // rr100.eventing.metadata // source._default._default // source.myscope.mycol1 @@ -58,15 +61,15 @@ function OnUpdate(doc, meta) { {"bucket_name": "source","scope_name": "myscope","collection_name": "mycol2"}}); log('>>>B fixed read',"res1", res1); - // protect against reading from something outside the alias + // Protect against reading from something outside the alias if (meta.keyspace.scope_name == "myscope") { - // TEST GET with keyspace form meta + // TEST GET with keyspace from meta var res2 = couchbase.get(alias_ro,meta); log('>>>C read using passed meta (must be myscope)',"res2", res2); if (meta.keyspace.collection_name == "mycol2") { // TEST UPSERT with hardcode keyspace - // add a field to the doc we read in res1 + // Add a field to the document read in res1 res1.doc.random1 = Math.random(); var res3 = couchbase.upsert(alias_rw,{"id": "doc2", "keyspace": {"bucket_name": "source","scope_name": "myscope","collection_name": "mycol2"}}, res1.doc) @@ -82,13 +85,13 @@ function OnUpdate(doc, meta) { var res5 = couchbase.get(alias_rw,meta); log('>>>F get (show added fields)',"res5", res5); - // TEST DELETE with hardcode keyspace (so we can TEST the insert) + // TEST DELETE with hardcode keyspace (so the insert can be tested) var res6 = couchbase.delete(alias_rw,{"id": "doc2", "keyspace": {"bucket_name": "source","scope_name": "myscope","collection_name": "mycol2"}}) log('>>>G delete',"res6", res6); // TEST INSERT with hardcode keyspace - // now remove the added items put thnigs back + // Remove the added items delete res1.doc.random1; delete res1.doc.random2; var res7 = couchbase.insert(alias_rw,{"id": "doc2", "keyspace": @@ -100,13 +103,13 @@ function OnUpdate(doc, meta) { ---- -- -Input Data/Mutation:: +Input data:: + -- +Create a test document set of 3 documents using the Query Editor to insert the data items. +You do not need an Index. -We want to create a small test doc set of three documents - -Use the Query Editor to insert the above data items (you do not need an Index) add them one at a time and check the Eventing log each time. +Add one test document at a time. [source,sqlpp] ---- @@ -118,7 +121,7 @@ Use the Query Editor to insert the above data items (you do not need an Index) a ---- -- -Output Log (reverse order)/Mutation:: +Output log in reverse order:: + -- [source,log] @@ -158,4 +161,4 @@ Function Log - multiCollectionEventing ---- -- -==== +==== \ No newline at end of file From a776a23e769b1c5fd589a88abf83f3e0f370362c Mon Sep 17 00:00:00 2001 From: Julia Browne Date: Fri, 15 Mar 2024 23:57:39 -0700 Subject: [PATCH 3/8] Added new examples to page and updated formatting and language --- modules/eventing/pages/eventing-examples.adoc | 69 +++++++++++++------ 1 file changed, 49 insertions(+), 20 deletions(-) diff --git a/modules/eventing/pages/eventing-examples.adoc b/modules/eventing/pages/eventing-examples.adoc index e6e4de3c..2a355213 100644 --- a/modules/eventing/pages/eventing-examples.adoc +++ b/modules/eventing/pages/eventing-examples.adoc @@ -1,5 +1,5 @@ = Examples: Using the Eventing Service -:description: This page contains examples of how to use the Eventing Service, using the Couchbase Web Console. +:description: This page contains examples of how to use the Eventing Service with the Couchbase Web Console. :page-edition: Enterprise Edition +++ +++ @@ -10,9 +10,11 @@ == Step by Step Examples [#Couchbase-Eventing-Examples] -*Detailed Examples*: These tutorial-like guides are ideal for a novice to learn the basics of the Eventing Service, via complete detailed step by step start-to-finish instructions. +=== Detailed Examples -[cols="1,1,1"] +The following tutorial-like guides have detailed start-to-finish instructions and are ideal for new users to learn the basics of the Eventing Service. + +[cols="1,1,1,1"] |=== | xref:eventing:eventing-example-data-enrichment.adoc[Data Enrichment] | xref:eventing:eventing-examples-cascade-delete.adoc[Cascade Delete] @@ -22,7 +24,10 @@ | xref:eventing:eventing-examples-cancel-overwrite-timer.adoc[Cancel or Overwrite Timer] | xref:eventing:eventing-examples-recurring-timer.adoc[Recurring Timer] | xref:eventing:eventing-examples-rest-via-curl-get.adoc[External REST via cURL GET] -| xref:eventing:eventing-examples-high-risk.adoc[Risk Assessment] +| xref:eventing:eventing-examples-high-risk.adoc[Risk Assessment] +| +| +| |=== [#examples-scriptlets] @@ -30,10 +35,12 @@ [#Couchbase-Eventing-Scriptlets] [#examples-scriptlets-kv] -*Basic KV Eventing Functions*: The following Scriptlets are essentially stand alone Eventing Functions examples, and introduce more use cases. Here we assume the reader has a good understanding of the Eventing System and requires little guidance. +=== Basic KV Eventing Functions + +The following scriptlets are examples of standadlone Eventing Functions. -[#Couchbase-Eventing-Snipets] -[cols="1,1,1"] +[#Couchbase-Eventing-Snippets] +[cols="1,1,1,1"] |=== | xref:eventing:eventing-handler-basicBucketOps.adoc[basicBucketOps] | xref:eventing:eventing-handler-curl-get.adoc[basicCurlGet] @@ -46,23 +53,28 @@ | xref:eventing:eventing-handler-keepLastN.adoc[keepLastN] | xref:eventing:eventing-handler-docControlledSelfExpiry.adoc[docControlledSelfExpiry] | xref:eventing:eventing-handler-shippingNotifier.adoc[shippingNotifier] -| xref:eventing:eventing-handler-ConvertBucketToCollections.adoc[ConvertBucketToCollections] +| xref:eventing:eventing-handler-ConvertBucketToCollections.adoc[convertBucketToCollections] |=== [#examples-scriptlets-n1ql] -*Basic {sqlpp} Eventing Functions*: The following Scriptlets demonstrate using {sqlpp} or the Query Service from within an Eventing Function. +=== Basic {sqlpp} Eventing Functions -[cols="1,1,1"] +The following scriptlets demonstrate how to use {sqlpp} or the Query Service with an Eventing Function. + +[cols="1,1,1,1"] |=== | xref:eventing:eventing-handler-basicN1qlSelectStmt.adoc[basicN1qlSelectStmt] | xref:eventing:eventing-handler-basicN1qlPreparedSelectStmt.adoc[basicN1qlPreparedSelectStmt] | +| |=== [#examples-scriptlets-generic] -*Generic Manipulation Eventing Functions* The following Scriptlets are more advanced use cases which focus on mutating documents without knowledge of the document's schema. +=== Generic Manipulation Eventing Functions -[cols="1,1,1"] +The following scriptlets are examples of advanced use cases that focus on mutating a document without knowing that document's schema. + +[cols="1,1,1,1"] |=== | xref:eventing:eventing-handler-dateToEpochConversion.adoc[dateToEpochConversion] | xref:eventing:eventing-handler-deepCloneAndModify.adoc[deepCloneAndModify] @@ -72,13 +84,18 @@ | xref:eventing:eventing-handler-genericFlatten.adoc[genericFlatten] | xref:eventing:eventing-handler-convertXMLtoJSON.adoc[convertXMLtoJSON] | xref:eventing:eventing-handler-convertAdvXMLtoJSON.adoc[convertAdvXMLtoJSON] -| |=== [#examples-scriptlets-advanced-accessors] -*Advanced Accessor Eventing Functions*: The following Scriptlets demonstrate using Advanced Bucket Accessors (introduced in version 6.6.1) which allow the use of CAS, ability to set expirations (or TTLs) and the use of distributed atomic counters to increment or decrement counts. +=== Advanced Accessor Eventing Functions -[cols="1,1,1"] +The following scriptlets demonstrate how to use Advanced Keyspace Accessors, which allow you to: + +* Use CAS +* Set expiry (TTL) dates +* Use distributed atomic counters to increment and decrement counts + +[cols="1,1,1,1"] |=== | xref:eventing:eventing-handler-advancedGetOp.adoc[advancedGetOp] | xref:eventing:eventing-handler-advancedGetOpWithCache.adoc[advancedGetOpWithCache] @@ -88,30 +105,42 @@ | xref:eventing:eventing-handler-advancedDeleteOp.adoc[advancedDeleteOp] | xref:eventing:eventing-handler-advancedIncrementOp.adoc[advancedIncrementOp] | xref:eventing:eventing-handler-advancedDecrementOp.adoc[advancedDecrementOp] +| xref:eventing:eventing-handler-advancedTouchOp.adoc[advancedTouchOp] | xref:eventing:eventing-handler-advanced-keepLastN.adoc[advancedKeepLastN] | xref:eventing:eventing-handler-advanced-docControlledSelfExpiry.adoc[advancedDocControlledSelfExpiry] | xref:eventing:eventing-handler-multiCollectionEventing.adoc[multiCollectionEventing] +| xref:eventing:eventing-handler-advancedSelfRecursion.adoc[advancedSelfRecursion] +| xref:eventing:eventing-handler-advancedMutateInField.adoc[advancedMutateInField] +| xref:eventing:eventing-handler-advancedMutateInArray.adoc[advancedMutateInArray] | |=== [#examples-scriptlets-binary-documents] -*Binary Document Support*: The following Scriptlets demonstrate support for binary documents in Eventing. Only a Function with “language compatibility” of 6.6.2 or above in its settings will pass binary documents to the OnUpdate(doc,meta) handler. +=== Binary Document Support + +The following scriptlets demonstrate support for binary documents in Eventing. + +Your Eventing Function must have a language compatibility setting of Couchbase Server version 6.6.2 or above to pass binary documents in its `OnUpdate(doc,meta)` handler. -[cols="1,1,1"] +[cols="1,1,1,1"] |=== | xref:eventing:eventing-handler-basicBinaryKV.adoc[basicBinaryKV] | xref:eventing:eventing-handler-advancedBinaryKV.adoc[advancedBinaryKV] | +| |=== [#examples-scriptlets-performance] -*Performance Eventing Functions* The following Scriptlets are performance oriented and/or benchmarks. +=== Performance Eventing Functions + +The following scriptlets are examples of performance-oriented or benchmark Eventing Functions. -[cols="1,1,1"] +[cols="1,1,1,1"] |=== | xref:eventing:eventing-handler-fasterToLocalString.adoc[fasterToLocalString] | | +| |=== -+++ +++ ++++ +++ \ No newline at end of file From 52205f81223007191fb4709d458563876e282dd3 Mon Sep 17 00:00:00 2001 From: Julia Browne Date: Sat, 16 Mar 2024 00:08:46 -0700 Subject: [PATCH 4/8] Updated nav file with links to new function files --- modules/eventing/pages/eventing-examples.adoc | 8 ++-- modules/eventing/partials/nav.adoc | 48 ++++++++++++------- 2 files changed, 35 insertions(+), 21 deletions(-) diff --git a/modules/eventing/pages/eventing-examples.adoc b/modules/eventing/pages/eventing-examples.adoc index 2a355213..2bbae677 100644 --- a/modules/eventing/pages/eventing-examples.adoc +++ b/modules/eventing/pages/eventing-examples.adoc @@ -7,7 +7,7 @@ {description} [#examples-step-by-step] -== Step by Step Examples +== Step-by-Step Examples [#Couchbase-Eventing-Examples] === Detailed Examples @@ -31,11 +31,11 @@ The following tutorial-like guides have detailed start-to-finish instructions an |=== [#examples-scriptlets] -== Scriptlets or Terse Examples +== Scriptlets [#Couchbase-Eventing-Scriptlets] [#examples-scriptlets-kv] -=== Basic KV Eventing Functions +=== Basic Accessor Eventing Functions The following scriptlets are examples of standadlone Eventing Functions. @@ -108,10 +108,10 @@ The following scriptlets demonstrate how to use Advanced Keyspace Accessors, whi | xref:eventing:eventing-handler-advancedTouchOp.adoc[advancedTouchOp] | xref:eventing:eventing-handler-advanced-keepLastN.adoc[advancedKeepLastN] | xref:eventing:eventing-handler-advanced-docControlledSelfExpiry.adoc[advancedDocControlledSelfExpiry] -| xref:eventing:eventing-handler-multiCollectionEventing.adoc[multiCollectionEventing] | xref:eventing:eventing-handler-advancedSelfRecursion.adoc[advancedSelfRecursion] | xref:eventing:eventing-handler-advancedMutateInField.adoc[advancedMutateInField] | xref:eventing:eventing-handler-advancedMutateInArray.adoc[advancedMutateInArray] +| xref:eventing:eventing-handler-multiCollectionEventing.adoc[multiCollectionEventing] | |=== diff --git a/modules/eventing/partials/nav.adoc b/modules/eventing/partials/nav.adoc index 71bc96ae..001df123 100644 --- a/modules/eventing/partials/nav.adoc +++ b/modules/eventing/partials/nav.adoc @@ -1,24 +1,29 @@ * xref:eventing:eventing-overview.adoc[Eventing] + ** xref:eventing:eventing-Terminologies.adoc[Terminology] + ** xref:eventing:eventing-language-constructs.adoc[Language Constructs] *** xref:eventing:eventing-advanced-keyspace-accessors.adoc[Advanced Keyspace Accessors] *** xref:eventing:eventing-timers.adoc[Timers] *** xref:eventing:eventing-curl-spec.adoc[cURL] + ** xref:eventing:eventing-lifecycle.adoc[Eventing Lifecycle] ** xref:eventing:eventing-buckets-to-collections.adoc[Buckets vs Collections] ** xref:eventing:eventing-rbac.adoc[Eventing Role-Based Access Control] + ** xref:eventing:eventing-examples.adoc[Examples: Using the Eventing Service] - *** xref:eventing:eventing-example-data-enrichment.adoc[Data Enrichment] - *** xref:eventing:eventing-examples-cascade-delete.adoc[Cascade Delete] - *** xref:eventing:eventing-examples-docexpiry.adoc[Document Expiry] - *** xref:eventing:eventing-examples-delete-v-expiry.adoc[Delete v Expiry] - *** xref:eventing:eventing-examples-docarchive.adoc[Document Archival] - *** xref:eventing:eventing-examples-cancel-overwrite-timer.adoc[Cancel or Overwrite Timer] - *** xref:eventing:eventing-examples-recurring-timer.adoc[Recurring Timer] - *** xref:eventing:eventing-examples-rest-via-curl-get.adoc[External REST via cURL GET] - *** xref:eventing:eventing-examples-high-risk.adoc[Risk Assessment] - ** xref:eventing:eventing-examples.adoc#examples-scriptlets[Examples: Terse Scriptlets] - *** xref:eventing:eventing-examples.adoc#examples-scriptlets-kv[Basic KV Functions] + *** Step-by-Step Examples + **** xref:eventing:eventing-example-data-enrichment.adoc[Data Enrichment] + **** xref:eventing:eventing-examples-cascade-delete.adoc[Cascade Delete] + **** xref:eventing:eventing-examples-docexpiry.adoc[Document Expiry] + **** xref:eventing:eventing-examples-delete-v-expiry.adoc[Delete v Expiry] + **** xref:eventing:eventing-examples-docarchive.adoc[Document Archival] + **** xref:eventing:eventing-examples-cancel-overwrite-timer.adoc[Cancel or Overwrite Timer] + **** xref:eventing:eventing-examples-recurring-timer.adoc[Recurring Timer] + **** xref:eventing:eventing-examples-rest-via-curl-get.adoc[External REST via cURL GET] + **** xref:eventing:eventing-examples-high-risk.adoc[Risk Assessment] + + *** Basic Accessor Functions **** xref:eventing:eventing-handler-basicBucketOps.adoc[basicBucketOps] **** xref:eventing:eventing-handler-curl-get.adoc[basicCurlGet] **** xref:eventing:eventing-handler-curl-post.adoc[basicCurlPost] @@ -31,10 +36,12 @@ **** xref:eventing:eventing-handler-docControlledSelfExpiry.adoc[docControlledSelfExpiry] **** xref:eventing:eventing-handler-shippingNotifier.adoc[shippingNotifier] **** xref:eventing:eventing-handler-ConvertBucketToCollections.adoc[ConvertBucketToCollections] - *** xref:eventing:eventing-examples.adoc#examples-scriptlets-n1ql[Basic {sqlpp} Eventing Functions] + + *** Basic {sqlpp} Functions **** xref:eventing:eventing-handler-basicN1qlSelectStmt.adoc[] **** xref:eventing:eventing-handler-basicN1qlPreparedSelectStmt.adoc[] - *** xref:eventing:eventing-examples.adoc#examples-scriptlets-generic[Generic Manipulation Functions] + + *** Generic Manipulation Functions **** xref:eventing:eventing-handler-dateToEpochConversion.adoc[dateToEpochConversion] **** xref:eventing:eventing-handler-deepCloneAndModify.adoc[deepCloneAndModify] **** xref:eventing:eventing-handler-removeObjectStubs.adoc[removeObjectStubs] @@ -43,7 +50,8 @@ **** xref:eventing:eventing-handler-genericFlatten.adoc[genericFlatten] **** xref:eventing:eventing-handler-convertXMLtoJSON.adoc[convertXMLtoJSON] **** xref:eventing:eventing-handler-convertAdvXMLtoJSON.adoc[convertAdvXMLtoJSON] - *** xref:eventing:eventing-examples.adoc#examples-scriptlets-advanced-accessors[Advanced Accessor Functions] + + *** Advanced Accessor Functions **** xref:eventing:eventing-handler-advancedGetOp.adoc[advancedGetOp] **** xref:eventing:eventing-handler-advancedGetOpWithCache.adoc[advancedGetOpWithCache] **** xref:eventing:eventing-handler-advancedInsertOp.adoc[advancedInsertOp] @@ -52,14 +60,20 @@ **** xref:eventing:eventing-handler-advancedDeleteOp.adoc[advancedDeleteOp] **** xref:eventing:eventing-handler-advancedIncrementOp.adoc[advancedIncrementOp] **** xref:eventing:eventing-handler-advancedDecrementOp.adoc[advancedDecrementOp] + **** xref:eventing:eventing-handler-advancedTouchOp.adoc[advancedTouchOp] **** xref:eventing:eventing-handler-advanced-keepLastN.adoc[advancedKeepLastN] - **** xref:eventing:eventing-handler-advanced-docControlledSelfExpiry.adoc[advanced-DocControlledSelfExpiry] + **** xref:eventing:eventing-handler-advanced-docControlledSelfExpiry.adoc[advancedDocControlledSelfExpiry] + **** xref:eventing:eventing-handler-advancedSelfRecursion.adoc[advancedSelfRecursion] + **** xref:eventing:eventing-handler-advancedMutateInField.adoc[advancedMutateInField] + **** xref:eventing:eventing-handler-advancedMutateInArray.adoc[advancedMutateInArray] **** xref:eventing:eventing-handler-multiCollectionEventing.adoc[multiCollectionEventing] - *** xref:eventing:eventing-examples.adoc#examples-scriptlets-binary-documents[Binary Document Support] + + *** Binary Document Support **** xref:eventing:eventing-handler-basicBinaryKV.adoc[basicBinaryKV] **** xref:eventing:eventing-handler-advancedBinaryKV.adoc[advancedBinaryKV] - *** xref:eventing:eventing-examples.adoc#examples-scriptlets-performance[Performance Functions] + *** Performance Functions **** xref:eventing:eventing-handler-fasterToLocalString.adoc[fasterToLocalString] + ** xref:eventing:eventing-debugging-and-diagnosability.adoc[Debugging and Diagnosability] ** xref:eventing:eventing-statistics.adoc[Statistics] ** xref:eventing:troubleshooting-best-practices.adoc[Troubleshooting and Best Practices] From 95ad10083643bac77e4ee88b011a0bc47a44b477 Mon Sep 17 00:00:00 2001 From: Julia Browne Date: Sat, 16 Mar 2024 00:26:29 -0700 Subject: [PATCH 5/8] Updated nav page order --- modules/eventing/pages/eventing-examples.adoc | 2 +- modules/eventing/partials/nav.adoc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/eventing/pages/eventing-examples.adoc b/modules/eventing/pages/eventing-examples.adoc index 2bbae677..86ad7f51 100644 --- a/modules/eventing/pages/eventing-examples.adoc +++ b/modules/eventing/pages/eventing-examples.adoc @@ -108,10 +108,10 @@ The following scriptlets demonstrate how to use Advanced Keyspace Accessors, whi | xref:eventing:eventing-handler-advancedTouchOp.adoc[advancedTouchOp] | xref:eventing:eventing-handler-advanced-keepLastN.adoc[advancedKeepLastN] | xref:eventing:eventing-handler-advanced-docControlledSelfExpiry.adoc[advancedDocControlledSelfExpiry] +| xref:eventing:eventing-handler-multiCollectionEventing.adoc[multiCollectionEventing] | xref:eventing:eventing-handler-advancedSelfRecursion.adoc[advancedSelfRecursion] | xref:eventing:eventing-handler-advancedMutateInField.adoc[advancedMutateInField] | xref:eventing:eventing-handler-advancedMutateInArray.adoc[advancedMutateInArray] -| xref:eventing:eventing-handler-multiCollectionEventing.adoc[multiCollectionEventing] | |=== diff --git a/modules/eventing/partials/nav.adoc b/modules/eventing/partials/nav.adoc index 001df123..fe6974f4 100644 --- a/modules/eventing/partials/nav.adoc +++ b/modules/eventing/partials/nav.adoc @@ -63,10 +63,10 @@ **** xref:eventing:eventing-handler-advancedTouchOp.adoc[advancedTouchOp] **** xref:eventing:eventing-handler-advanced-keepLastN.adoc[advancedKeepLastN] **** xref:eventing:eventing-handler-advanced-docControlledSelfExpiry.adoc[advancedDocControlledSelfExpiry] + **** xref:eventing:eventing-handler-multiCollectionEventing.adoc[multiCollectionEventing] **** xref:eventing:eventing-handler-advancedSelfRecursion.adoc[advancedSelfRecursion] **** xref:eventing:eventing-handler-advancedMutateInField.adoc[advancedMutateInField] **** xref:eventing:eventing-handler-advancedMutateInArray.adoc[advancedMutateInArray] - **** xref:eventing:eventing-handler-multiCollectionEventing.adoc[multiCollectionEventing] *** Binary Document Support **** xref:eventing:eventing-handler-basicBinaryKV.adoc[basicBinaryKV] From faa5a2a14f3a898e73f85711aab6e7b7b46e4218 Mon Sep 17 00:00:00 2001 From: Julia Browne Date: Sat, 16 Mar 2024 00:38:31 -0700 Subject: [PATCH 6/8] More updates --- .../pages/eventing-Terminologies.adoc | 314 +++++++++--------- modules/eventing/partials/nav.adoc | 2 +- 2 files changed, 160 insertions(+), 156 deletions(-) diff --git a/modules/eventing/pages/eventing-Terminologies.adoc b/modules/eventing/pages/eventing-Terminologies.adoc index 87975f9c..aa5cc66a 100644 --- a/modules/eventing/pages/eventing-Terminologies.adoc +++ b/modules/eventing/pages/eventing-Terminologies.adoc @@ -1,253 +1,257 @@ -= Terminology -:description: While using Eventing Service, the following terminologies are used. -:page-edition: Enterprise Edition - += Eventing Terminology +:description: The following terminology is used by the Eventing Service. +:page-toclevels: 2 [abstract] {description} -== Eventing Service - -The Eventing Service can run one or more Eventing Functions that offer a computing paradigm by which developers can react to mutations (or data changes) via the user code, specifically JavaScript entry points (or handlers) of *OnUpdate*, *OnDelete*, or a user defined *Timer callback*. System resources are managed at or above the Eventing Function level, and the containing Eventing Function scopes the state of all user code. - -== Eventing Function - -An Eventing Function is a collection of JavaScript functions and settings (or configuration) together react to a class of events. An Eventing Function is a stateless short running piece of a self-contained program that must execute from start to end prior to a specified timeout duration. - -Eventing Functions can interact with the Data Service (KV), the Query Service ({sqlpp}), and external REST services via a built-in cURL function call. Eventing Functions can also schedule a callback to an internal JavaScript function sometime in the future by creating a Timer. - -The Eventing Service routes mutations to the entry points *OnUpdate* or *OnDelete* and fired Timers to a user defined *Timer callback*. - -NOTE: Since the 6.5 release, the JavaScript code in the Eventing Function is compressed (with the compressed size limited to 128KB) in the Couchbase Server. - -=== Handler - -The Eventing Service calls the following entry points or JavaScript functions on events (mutations or fired timers). - -==== Insert/Update Handler -The *OnUpdate* handler gets called when a document is created or modified. Two major limitations exist. First, if a document is modified several times in a short duration, the calls may be coalesced into a single event due to deduplication. Second, it is not possible to discern between Create and Update operations. -The entry point OnUpdate(doc,meta) passes both `doc`, the document, and `meta`, additional data containing useful information such as the document's id, CAS, expiration, and datatype ("json" or "binary"). -NOTE: Unless the _Language compatibility_ in the settings of the Function is at least 6.6.2 binary documents will be suppressed. -==== Delete Handler -The *OnDelete* handler gets called when a document is deleted (or expired). -The entry point OnDelete(meta,options) passes both `meta` which contains useful information such as the document (see above) and also `options` which has one boolean parameter `options.expired` to indicate if the removal was due to a deletion or an expiration. -One major limitation exists - it is not possible to get the value of the document that was just deleted or expired. - -==== Timer Callback Handler - -Timer callbacks are user defined JavaScript functions passed as the callback argument to the built-in createTimer(callback, date, reference, context) function call. +Typically you should set Function Scope to the bucket.scope that holds the collection that is the source of your mutations to your Eventing Function. This best practice ensures that you _*do not*_ inadvertently cause an Eventing Function to undeploy by removing a *Function Scope* pointing to a resource that is not required for the function to run. -After creating a timer the argument "callback" or JavaScript function will be executed at or close to the desired "date" argument. The "reference" argument is an identifier for the timer scoped to an Eventing function and callback. The "context" argument must be serializable data that is available to the callback when the timer is fired. -For more information see xref:eventing-timers.adoc#createtimer-function[createTimer function]. -=== Statelessness -The persistent state of an Eventing Function is captured in the below external elements, and all states that appears on the execution stack are ephemeral +[#function-scope] +=== Function Scope +You can use the Function scope, or `bucket.scope`, to identify Functions that belong to the same group. -* The Listen To Location (the Eventing source) a collection that is the source of the mutations sent to the Function via the Database Change Protocol (DCP). -* The Eventing Storage (the Eventing metadata) a collection used as a scratch pad for the Function's state (this can be shared across all a tenant's Functions). -* The documents or mutations being observed along with their extended attributes. -* Optional Bindings for Function. There are three distinct types of bindings: -** Bucket alias, an alias and access mode used by the Function to access a collection. -** URL alias, an alias and HTTP/S settings used by the Function to access external REST APIs. -** Constant alias, an alias to an integer, decimal number, string, boolean, or a JSON object used as a global variable within the Function. +To set the `bucket.scope` to `+`.`+`, you must have the `Eventing Full Admin` or the `Full Admin` role. +All other users must use a scope that references an existing resource of their `bucket.scope`. -=== Deduplication -Couchbase does not store every version of a document permanently. Hence, when a handler receives the mutation history of documents from the Eventing source, it sees a truncated history of each document. However, the final state of a document is always present in all such histories (as the current state is always available in the database). -Similarly, the KV data engine deduplicates multiple mutations made to any individual document rapidly in succession, to ensure highest possible performance. So, when a document mutates rapidly, handlers may not see all intermediate states, but in all cases, will see the final state of the document. +[#eventing-keyspaces] +=== Eventing Keyspaces -=== Recursive Mutation +A keyspace is a path to a collection in the format `bucket-name.scope-name.collection-name`. -An abbreviation of convenience of the term Potentially Recursive Mutation. When a handler manipulates documents in a keyspace that also serves as the source of mutations to this or any other handler, a write originated by a handler will cause a mutation to be seen by itself or another handler. These are called potentially recursive mutations. +For backward compatibility, you can also use the format `bucket-name._default._default`. +This is the format of a bucket from Couchbase Server version 6.6 that is upgraded to version 7.0. -[#json_number_percision] -=== JSON Number Precision +The following are the two keyspaces used by Eventing Functions: -JSON does not have specialized types for integral and floating-point numbers. So many JavaScript runtimes utilize floating-point numbers to hold JSON numbers. This means that JavaScript numbers have a very large range but lesser precision when compared to traditional integers of the same size. +* <>, which represents the Eventing source +* <>, which represents the Eventing metadata -v8 utilizes 64-bit floating-point numbers which yields a 53-bit precision. So, only integers up to +/- 253 can safely be handled in Eventing JavaScript. When handling very large integers, that is, numbers having 15 or more digits, one should utilize JavaScript BigInt types to safely handle them. The exact numbers where integral precision is lost is defined by JavaScript in the constants `Number.MAX_SAFE_INTEGER` and `Number.MIN_SAFE_INTEGER`. +[#listen-to-location] +==== Listen to Location -Often, such large integers are really only tokens, and it is not necessary to perform arithmetic on them, and only comparison for equality is necessary. Examples of this in Eventing are CAS values generated by Advanced Bucket Operations, or the result of the crc64() function. In these cases, it is appropriate to hold these large integers as strings, as it ensures full fidelity while retaining the ability to do equality comparisons. +Eventing Functions use a collection as the source for their data mutations. +This collection is called the Eventing source, and can be made up of Couchbase or Ephemeral keyspace types. +Memcached keyspace types are not supported. -=== Feed Boundary +When you create an Eventing Function, you must specify a source collection. +The `OnUpdate` and `OnDelete` handlers are the entry points for this collection; they receive events and receive and track data mutations. +When you delete a source collection, all deployed and paused Functions associated with the collection are undeployed. -Feed Boundary is a time or progress milestone used during an Eventing Function configuration. The Feed Boundary is a persistent setting in the Function's definition and can only be set or altered when a Function is created, undeployed or paused. +While a Function is processing its JavaScript code, the Function's documents can be mutated in different collections. +You can set keyspaces as destination collections, which are then bound to the Function through bucket aliases. -Based on the `Feed Boundary` setting, when an Eventing Function is deployed it can either process all data mutations available in the cluster (`Everything`) or process only future data mutations (`From now`) that occur post deployment. However, once deployed you may Pause/Resume an Eventing Function in this case; the Feed Boundary is a checkpoint of the Function's actual progress such that no mutations or timers are reprocessed or lost. +The Function's JavaScript code triggers data mutations on documents through Basic Keyspace Accessors or Advanced Keyspace Accessors in the Data Service. +If the code directly modifies documents in the source collection, the Eventing Service suppresses the mutation back to the Function performing the mutation. -=== Function Scope +The Function's JavaScript code can also trigger mutations on documents through inline {sqlpp} statements in the Query Service or `N1QL()` function calls. -A bucket.scope combination is used for identifying functions belonging to the same group. +NOTE: When you implement multiple Functions, you can create infinite recursions. +The Eventing Service prevents the deployment of Functions that might result in recursion loops. +For more information abotu cyclic generation of data changes, see xref:troubleshooting-best-practices.adoc#cyclicredun[Bucket Allocation Considerations]. -Only the "Eventing Full Admin" role and also the "Full Admin" role can set the bucket.scope to *+*+.+*+*; all other Eventing non-privileged users need to define a *Function Scope* for their Eventing functions that references an existing resource of bucket.scope. -This provides role based isolation of Eventing functions between non-privileged users +To get the `Listen To` keyspace to listen to multiple collections, you can use a `{asterisk}` wildcard for the scope or collection. +If the bucket binding used by the JavaScript code also has a `{asterisk}` wildcard for its scope or collection, you must use Advanced Keyspace Accessors to read or write the Data Service. +For more information about Advanced Keyspace Accessors, see xref:eventing-advanced-keyspace-accessors.adoc#multiple-collection-functions[Eventing Functions that Listen to Multiple Collections]. -Typically you should set Function Scope to the bucket.scope that holds the collection that is the source of your mutations to your Eventing Function. This best practice ensures that you _*do not*_ inadvertently cause an Eventing Function to undeploy by removing a *Function Scope* pointing to a resource that is not required for the function to run. +TIP: You can have multiple Functions listening to the same collection while running different code. +To use less resources, though, you can use only one Function and code an if-then-else or switch statement in your handler's JavaScript. -=== Keyspaces +[#eventing-storage] +=== Eventing Storage +The Eventing Storage is the Eventing Function's metadata bucket. +The metadata bucket stores artifacts, or configuration documents, that contain information about DCP streams, worker allocations, Timer information and state, and internal checkpoints. -A keyspace is a fully qualified path to a collection of the form "bucket-name.scope-name.collection-name". For backward compatibility a keyspace can also be of the form "bucket-name._default._default" which is the form of a 6.6 bucket upgraded to 7.0. The two terms keyspace and collection can be considered equivalent. +When you create an Eventing Function, you must make sure that a separate collection has been designated as an Eventing metadata and reserved for the Eventing Service's internal use. +You can use a common Eventing metadata collection across multiple Eventing Functions for the same tenant. -[#eventing-keyspaces] -=== Eventing Keyspaces +The Eventing Storage keyspace must be in a Couchbase-type bucket. +If this keyspace is not persistent, the Data Service evicts Timer and checkpoint documents when it hits quota, and loses track of Timers and mutations that have been processed. -There are two keyspaces used by every Eventing Function: the Listen To Location (the Eventing source) collection and the Eventing Storage (the Eventing metadata) collection. +NOTE: Do not delete the Eventing metadata collection. +Make sure that your Function's JavaScript code does not perform a write or delete operation on the Eventing metadata collection. +If you delete the metadata collection, all deployed Eventing Functions are undeployed and all associated indexes and constructs are dropped. -*Listen To Location (the Eventing source)* +[#function-settings] +=== Eventing Function Settings +[cols="1,2",options="header"] +|=== +|Function setting +|Description -Couchbase Eventing Functions use a collection as the source of data mutations. This collection is referred to as the Eventing source. This source collection can be either Couchbase or Ephemeral keyspace type. However, memcached keyspace types are not supported. +|Function Name +a|A unique name for your Eventing Function. -When you are creating an Eventing Function, you need to specify a source collection. The handler(s) of *OnUpdate* and/or *OnDelete* are the entry points that receive events from this collection via DCP to both receive and track data mutations. +The Function name must: -NOTE: You can have multiple Eventing Functions running different code listening to the same source collection. However it is less resource intensive to use just one Eventing Function and merely code an if-then-else or switch statement in your handler’s JavaScript. +* Start with an uppercase character (A-Z), lowercase character (a-z), or number (0-9) +* Contain only uppercase characters (A-Z), lowercase characters (a-z), numbers (0-9), underscores (_), and hyphens (-) -When a source collection is deleted, all deployed (or paused) Eventing Functions associated with this source collection are undeployed. +|Description +|An optional description that describes the purpose of your Eventing Function. -The `Listen To` can listen to multiple collections via a wildcard of `{asterisk}` for the scope and/or the collection. -For these functions, if the bucket binding used by the JavaScript code also contains a wildcard of `{asterisk}` for the scope and/or the collection only the Advanced Keyspace Accessors will be able to read or write the Data Service (or KV). +|Deployment Feed Boundary +|The Feed Boundary determines if the Eventing Function's activities need to include documents that already exist. -In the course of processing the JavaScript code of an Eventing Function, documents can be mutated in different collections. For understanding purposes, these keyspaces can be termed as destination collections which are bound to the Function via Bucket aliases. +When you set the Feed Boundary to `Everything`, the Function deploys all mutations available in your database. +When you set the Feed Boundary to `From Now`, the Function only processes instances of data mutation that happen after the Function's deployment. -At times, the Eventing Function's JavaScript code can trigger data mutations on documents via the Data Service (KV) via either Basic Keyspace Accessors or Advanced Keyspace Accessors. -If the Eventing Function code directly modifies documents in the source collection, the Eventing Service will suppress the mutation back to the Eventing Function making the mutation. -When implementing multiple Functions it is possible to create infinite recursions, however the Eventing Service by default will prevent deploying Functions that would result in recursion loops. It should be noted that not all recursion loops can be detected nor are all recursion loops wrong -- the default recursion checks can be disabled. For more detail on cyclic generation of data changes, refer to xref:troubleshooting-best-practices.adoc#cyclicredun[Bucket Allocation Considerations]. +The Feed Boundary also works as a checkpoint for paused Functions. +When you resume a paused Function, the Feed Boundary makes sure that no mutations are lost or processed again. -At times, the Eventing Function's JavaScript code can trigger data mutations on documents via the Query Service ({sqlpp}) via inline {sqlpp} statements or N1QL() function calls. In this case the Eventing Function will see the mutation it just generated and additional business logic may be needed to terminate or protect against possible recursion. +You can only modify the Feed Boundary when you create a Function or when a Function is undeployed or paused. -*Eventing Storage (the Eventing metadata)* +|System Log Level +|Determines the granularity of messages logged across the Eventing Function. -The Eventing Storage (or Metadata) collection, stores artifacts (or configuration documents) that contain information about DCP streams, worker allocations, timer information/state, and internal checkpoints. +Can be one of `Info` (the default), `Error`, `Debug`, `Warning`, or `Trace`. -When you are creating an Eventing Function, ensure that a separate collection is designated as an Eventing metadata and reserved solely for the internal use of the Eventing Service. You can use a common Eventing metadata collection across multiple Eventing Functions for the same tenant. +|Application Log Location +|The directory path to the log file for the Eventing Function. +The format is `.log`. -NOTE: The Eventing Storage keyspace must be in a Bucket of type Couchbase. If this keyspace is not persistent the Data Service, or KV, will evict timer and checkpoint documents on hitting quota and Eventing can lose track of both timers and mutations processed. Furthermore at any point, refrain from deleting the Eventing metadata collection. Also, ensure that your Eventing Function's JavaScript code or other services do not perform a write or delete operation on the Eventing metadata collection. +The Function uses `log()` statements to write to this file. +When you select the *Log* value on the UI, all log files are combined across Eventing nodes and displayed. -If an Eventing metadata collection gets accidentally deleted, then all deployed Eventing Function are undeployed and associated indexes and constructs get dropped. +The log value is read-only and cannot be changed. -*Function Name* +|{sqlpp} Consistency +|The default consistency level of {sqlpp} statements in the Eventing Function. +You can set the consistency level by statement. -All Eventing Functions must have a unique name in a Couchbase cluster. A Function name can only start with characters in range A-Z, a-z, 0-9, and can only contain characters in range A-Z, a-z, 0-9, underscore, and hyphen. +Can be one of `None` (the default) or `Request`. -*Deployment Feed Boundary* +|Workers +|The number of worker threads per node to be allocated to the Eventing Function to process events. +Allows the Function to scale up. -Using the `Feed Boundary` drop down, you can either set an Eventing Function to deploy for all data mutations available in the cluster (`Everything`) or choose to deploy the Eventing Function to process only future data mutations, post deployment (`From now`). However, once deployed you may Pause/Resume an Eventing Function in the Resume case; the Feed Boundary is a checkpoint of the Function's actual progress when the Function was paused such that no mutations are reprocessed or lost upon a subsequent Resume. +The minimum number of workers is `1` (the default) and the maximum is `64`. -*Description* +|Language Compatibility +|The language version of the Eventing Function for backward compatibility. -The Description is an optional text that can be added to the Function, typically to describe the purpose of the particular business logic. +If the semantics of a language construct change during a release, the Language Compatibility setting makes sure that an older Eventing Function continues to produce the runtime behavior from when the Function was initially created. +The older Function only stops this behavior when the behavior is deprecated and removed. -[#function-settings] -=== Eventing Function Settings +Couchbase versions 6.0.0, 6.5.0, and 6.6.2 are the only versions that are currently defined. +New Functions default to the highest compatibility version available of 6.6.2. -There are several advanced settings (by default hidden within a collapsible panel) that can be adjusted. The System Log Level, {sqlpp} Consistency, Workers, Language compatibility, Script Timeout, and Timer Context Max Size are additional options available during the Eventing Function definition process. +In version 6.5.0, trying to access a non-existing item from a keyspace returns an undefined value. +In version 6.0.0, it throws an exception. -* *System Log Level*: Determines the granularity at which messages are logged to the common system log messages across all Eventing Functions. The available choices are: `Info` (the default), `Error`, `Debug`, `Warning`, and `Trace`. -+ -Typically you will never need to adjust this from the default setting of `Info`, the data in this file is generally only used by support. +Only a Function with a language compability setting of version 6.6.2 passes binary documents to Eventing Function handlers. +Versions 6.0.0 and 6.5.0 filter all binary documents out of the DCP mutation stream. -* *Application log location* The directory path to the log file for the application or the Function specific log messages named <>.log. -The Function designer uses log() statements to write to this file in addition it will also record some Function specific system level errors. -In the UI when "Log" is selected these files are combined across all Eventing nodes and displayed. This value is read-only and set at system initialization time and cannot be subsequently changed. +|Script Timeout +|The number of seconds to elapse before the script times out and is terminated. -* *{sqlpp} Consistency*: The default consistency level of {sqlpp} statements in the Eventing Function. -This controls the consistency level for {sqlpp} statements, but can be set on a per statement basis. The valid values are `None` (the default) and `Request`. +The entry points into the handler processing for each mutation must run from start to finish before the specified timeout duration. -* *Workers*: Workers the number of worker processes to be started for the Eventing Function. -Allows the Eventing Function to be scaled up (or vertical scaling). Each worker process supports two fixed threads of execution, however this setting is limited to a maximum of 64 for system optimization purposes. -The system automatically generates a warning message if the number of workers exceeds a set threshold based upon cluster resources, however, in this case the handler can still be deployed. -The minimum value is 1 (the default) and the recommended maximum is 64. In most cases the maximum should be the number of vCPUs. +The default number of seconds is `60`. -* *Language compatibility*: The language version of the Eventing Function for backward compatibility. -+ -If the semantics of a language construct change in any given release the “Language compatibility” setting will ensure an older Eventing Function will continue to see the runtime behavior that existed at the time it was authored, until such behavior is deprecated and removed. Note 6.0.0, 6.5.0, and 6.6.2 are the only currently defined versions and for newly authored Functions the default is the highest compatibility version available, currently 6.6.2. -+ -For example, accessing non-existent items from a keyspace returns undefined in 6.5.0, while in 6.0.0 an exception is thrown. In addition, only a Function with “language compatibility” of 6.6.2 in its settings will pass binary documents to the OnUpdate(doc,meta) handler. In addition, values of 6.0.0 and 6.5.0 will filter all binary documents out of the DCP mutation stream, only 6.6.2 will pass binary documents to the Eventing Function handlers. +|Time Context Max Size +|The size limit of the context for any Timer created by the Eventing Function. -* *Script Timeout*: Script Timeout provides a timeout option to terminate a non-responsive Function. -+ -The entry points into the handler, e.g. OnUpdate and OnDelete, processing for each mutation must complete from start to finish prior to this specified timeout duration. The default is 60 seconds. +A context can be any JSON document. +Timers can store and access a context, which is then used to store the state of when a Timer is created and to retrieve the state of when a Timer is fired. -* *Timer Context Max Size*: Timer Context Max Size limits the size of the context for any Timer created by the Function. -+ -Eventing Timers can store and access a context which can be any JSON document, the context is used to store state when the timer is created and retrieve state when the timer fires. By default the size is 1024 bytes, but this can be adjusted on a per Function basis. +The default is `1024`. -[#section_mzd_l1p_m2b] -=== Bindings +|=== -A binding is a construct that allows separating environment specific variables (example: keyspace names, external endpoint URLs plus credentials, or global constants) from the Eventing Function's source code. It provides a level of indirection between environment specific artifacts to symbolic names, to help moving an Eventing Function definition from development to production environments without changing code. Binding names must be valid JavaScript identifiers and must not conflict with any built-in types. -An Eventing Function can have no binding, one binding, or several bindings. There are three distinct types of bindings: +== Operations -*Bucket alias* +Operations exposed through the UI, couchbase-cli, and REST APIs. -Bucket aliases allow the JavaScript in an Eventing Function to access Couchbase KV collections from the Data Service or KV. The keyspaces (bucket.scope.collection) are then accessible by the bound name as a JavaScript map in the global space of the Eventing Function. +=== Deploy -An Eventing Function can listen to multiple collections via a wildcard of `{asterisk}` for the scope and/or the collection. -For these functions, the bucket alias (or binding) used by the JavaScript code can also contain a wildcard of `{asterisk}` for the scope and/or the collection. -If bucket alias contains a wildcard of `{asterisk}` only the Advanced Keyspace Accessors will be able to read or write the Data Service (or KV). +The deploy operation activates an Eventing Function in a cluster. +It performs validations and allows only valid Eventing Functions to be deployed. -You can add bucket aliases via the 'Bucket alias' choice then entering a tuple of: alias-name, keyspace, and an access level. Where the alias-name that you can use to refer to the keyspace or collection from your Eventing Function code; the keyspace is the full path to a collection in the cluster; and the access level to the keyspace is either 'read only' or 'read and write'. +Deploying an Eventing Function: -NOTE: One or more Bucket alias bindings (or Bucket aliases) are mandatory when your Eventing Function code performs any collection related operations directly against the Data Service. +* Creates necessary metadata +* Spawns worker processes +* Calculates initial partitions +* Initiates check-pointing of DCP streams to process +* Allows the Function to receive and process mutations and Timer callbacks -* Read Only Bindings: A binding with access level of "Read Only" allows reading documents from the collection, but cannot be used to write (create, update or delete) documents in such a collection. Attempting to do so will throw a runtime exception. +You cannot edit the source code of a deployed Eventing Function. -* Read-Write Bindings: A binding with access level of "Read Write" allows both reading and writing (create, update, delete) of documents in the collection. If you wish to modify the document passed to the OnUpdate entry point (or any other document in the source collection) you will need to provide a Read-Write binding alias to the Function's source collection. +During deployment, you must choose one of the following *Deployment Feed Boundary* settings: -*URL alias* +* *Everything*, which provides the Eventing Function with a deduplicated history of all documents, ending with the current value of each document. This means the Function sees every document in the keyspace at least once. +* *From now*, which provides the Eventing Function with mutations starting at deployment. This means the Function only sees documents that have mutated after the Function's deployment. -These bindings are utilized by the cURL language construct to access external resources. The binding specifies the endpoint, the protocol (http/https), and credentials if necessary. Cookie support can be enabled via the binding if desired when accessing trusted remote nodes. When a URL binding limits access through to be the URL specified or descendants of it. The target of a URL binding should not be a node that belongs to the Couchbase cluster. +=== Undeploy -You can add URL bindings via the 'URL alias' choice then entering the following: alias-name, URL, allow cookies setting, security settings of validate SSL certificate and an auth type of (no auth, basic, bearer, and digest). For more details refer to xref:eventing-curl-spec.adoc#bindings[cURL Bindings]. +The undeploy operation causes the Eventing Function to stop processing events of all types. +It also shuts down the worker processes associated with the Function. -*Constant alias* +Undeploying an Eventing Function: -These bindings are utilized by the Eventing Function's JavaScript code as global variables. +* Deletes all Timers and context documents created by the Function +* Releases any runtime resources acquired by the Function -You can add URL bindings via the 'Constant alias' choice then entering an alias-name and value. The value can be either an integer, decimal number, string, boolean, or a JSON object. For example you might have an alias of _debug_ with a value of _true_ (or _false_) to control verbose logging this would act just like adding a statement `const debug = true;` at the beginning of your JavaScript code (_although the Eventing syntax wouldn't allow this global to be added to the actual JavaScript_). +You can edit the code and change the settings of an undeployed Eventing Function. +When you create a new Eventing Function, the Function's state is undeployed. -== Operations +=== Pause -The following operations are exposed through the UI, couchbase-cli and REST APIs. +The pause operation causes the Eventing Function to pause all mutations and Timer callbacks. +It also performs a checkpoint to be used for resuming the Function. -=== Deploy +You can edit the code and change the settings of a paused Eventing Function. +You can also resume or undeploy a paused Function. -The deploy operation activates an Eventing Function in a cluster. +=== Resume -This operation activates an Eventing Function. Source validations are performed, and only valid Eventing Function can be deployed. Deployment transpiles the code and creates the executable artifacts. The source code of an activated (or deployed and running) Eventing Function cannot be edited. Unless an Eventing Function is in deployed state, it will not receive or process any events (mutations or Timer callbacks). Deployment of an Eventing Function creates necessary metadata, spawns worker processes, calculates initial partitions, and initiates check-pointing of DCP stream to processes. +The resume operation continues processing mutations and Timer callbacks of an Eventing Function that was previously paused. -Deployment for DCP observer (or Feed Boundary) has two variations controlled by the setting of the Eventing Function's "Deployment Feed Boundary": +The resume operation is similar to the deploy operation, but it uses a progress checkpoint to restart the Function. This means no mutations are lost or processed again. -* Everything: The Eventing Function will see a deduplicated history of all documents, ending with the current value of each document. Hence, the Eventing Function will see every document in the keyspace at least once. +When you resume a Function, the backlog of mutations that occurred when the Function was in a paused state is processed. +The backlog of Timers also fires, even if the time of the Timers has already passed. -* From now: The Eventing Function will see mutations from current time. In other words, the Eventing Function will see only documents that mutate after it is deployed. +Depending on the system capacity and on how long the Function was paused, clearing the backlog can take some time. +After the backlog is cleared, the Function goes on to process current mutations and Timers. -=== Undeploy +=== Delete -This operation causes the Eventing Function to stop processing events of all types and shuts down the worker processes associated with the Eventing Function. It deletes all timers created by the Eventing Function being undeployed and their context documents. It releases any runtime resources acquired by the Eventing Function. An Eventing Function in the Undeployed state can have its code edited and settings altered. Newly created Eventing Functions start in Undeployed state. +The delete operation deletes the following in the Eventing Function: -=== Pause +* The source code implementing the Function +* All Timers and Timer contexts +* All processing checkpoints +* Application logs +* Any other artifacts in the metadata provider -This action stops all processing associated with an Eventing Function including timer callbacks and performs a checkpoint (to be used for a subsequent resume). An Eventing Function in the Paused state can have its code edited and settings altered. Eventing Functions in Paused state can be either Resumed or Undeployed. +You can only delete an undeployed Eventing Function. -=== Resume +=== Debug -This action continues processing of an Eventing Function that was previously Paused. The Resume process is akin to a Deploy but utilizes a progress checkpoint (made when the Eventing Function was paused) to restart such that no mutations are reprocessed or lost. The backlog of mutations that occurred when the Eventing Function was paused will now be processed. The backlog of timers that came due when the Eventing Function was paused will now fire even if that timer is now in the past. Depending on the system capacity and how long the Eventing Function was paused, clearing the backlog may take some time before Eventing Function moves on to current mutations and timers. +The debug operation traps and sends the next event instance received by the Eventing Function to a separate v8 worker with debugging enabled. +Debug is a special flag that can be attach to a Function. -=== Delete +The debug operation pauses the trapped event, opens a TCP port, and generates a Chrome Developer Tools URL with a session cookie that can be used to control the debug worker. -When an Eventing Function is deleted, the source code implementing the Eventing Function, all timers and timer contexts, all processing checkpoints, application logs and other artifacts in the metadata provider are purged. A future Eventing Function by the same name has no relation to a prior deleted Eventing Function of the same name. Only undeployed Eventing Function can be deleted. +With the exception of the trapped event instance, all other Eventing Function events continue processing. +When the trapped event finishes debugging, the debug operation traps another event instance. +This continues until you stop the operation. -=== Debug -Debug is a special flag on an Eventing Function that causes the next event instance received by the Eventing Function to be trapped and sent to a separate v8 worker with debugging enabled. The debug worker pauses the trapped event processing and opens a TCP port and generates a Chrome Developer Tools URL with a session cookie that can be used to control the debug worker. All other events, except the trapped event instance, continue unencumbered. If the debugged event instance completes execution, another event instance is trapped for debugging, and this continues till debugging is stopped, at which point any trapped instance runs to completion and the debugging worker becomes passive. +== See Also -Debugging is convenience feature intended to help during Eventing Function development and should not be used in production environments. It also be noted that using the debugger does not provide correctness or functionality guarantees. +* xref:eventing-advanced-keyspace-accessors.adoc[Advanced Keyspace Accessors] +* xref:eventing-language-constructs.adoc#basic_bucket_accessors[Basic Keyspace Accessors] \ No newline at end of file diff --git a/modules/eventing/partials/nav.adoc b/modules/eventing/partials/nav.adoc index fe6974f4..d4e7ff5f 100644 --- a/modules/eventing/partials/nav.adoc +++ b/modules/eventing/partials/nav.adoc @@ -1,6 +1,6 @@ * xref:eventing:eventing-overview.adoc[Eventing] - ** xref:eventing:eventing-Terminologies.adoc[Terminology] + ** xref:eventing:eventing-terminologies.adoc[Terminology] ** xref:eventing:eventing-language-constructs.adoc[Language Constructs] *** xref:eventing:eventing-advanced-keyspace-accessors.adoc[Advanced Keyspace Accessors] From 829b207da1f9d97fa4b6676ab7047ff067b20c85 Mon Sep 17 00:00:00 2001 From: Julia Browne Date: Sat, 16 Mar 2024 00:39:55 -0700 Subject: [PATCH 7/8] Revert "More updates" This reverts commit faa5a2a14f3a898e73f85711aab6e7b7b46e4218. --- .../pages/eventing-Terminologies.adoc | 314 +++++++++--------- modules/eventing/partials/nav.adoc | 2 +- 2 files changed, 156 insertions(+), 160 deletions(-) diff --git a/modules/eventing/pages/eventing-Terminologies.adoc b/modules/eventing/pages/eventing-Terminologies.adoc index aa5cc66a..87975f9c 100644 --- a/modules/eventing/pages/eventing-Terminologies.adoc +++ b/modules/eventing/pages/eventing-Terminologies.adoc @@ -1,257 +1,253 @@ -= Eventing Terminology -:description: The following terminology is used by the Eventing Service. -:page-toclevels: 2 += Terminology +:description: While using Eventing Service, the following terminologies are used. +:page-edition: Enterprise Edition + [abstract] {description} +== Eventing Service +The Eventing Service can run one or more Eventing Functions that offer a computing paradigm by which developers can react to mutations (or data changes) via the user code, specifically JavaScript entry points (or handlers) of *OnUpdate*, *OnDelete*, or a user defined *Timer callback*. System resources are managed at or above the Eventing Function level, and the containing Eventing Function scopes the state of all user code. +== Eventing Function +An Eventing Function is a collection of JavaScript functions and settings (or configuration) together react to a class of events. An Eventing Function is a stateless short running piece of a self-contained program that must execute from start to end prior to a specified timeout duration. +Eventing Functions can interact with the Data Service (KV), the Query Service ({sqlpp}), and external REST services via a built-in cURL function call. Eventing Functions can also schedule a callback to an internal JavaScript function sometime in the future by creating a Timer. +The Eventing Service routes mutations to the entry points *OnUpdate* or *OnDelete* and fired Timers to a user defined *Timer callback*. +NOTE: Since the 6.5 release, the JavaScript code in the Eventing Function is compressed (with the compressed size limited to 128KB) in the Couchbase Server. -Typically you should set Function Scope to the bucket.scope that holds the collection that is the source of your mutations to your Eventing Function. This best practice ensures that you _*do not*_ inadvertently cause an Eventing Function to undeploy by removing a *Function Scope* pointing to a resource that is not required for the function to run. +=== Handler +The Eventing Service calls the following entry points or JavaScript functions on events (mutations or fired timers). +==== Insert/Update Handler -[#function-scope] -=== Function Scope -You can use the Function scope, or `bucket.scope`, to identify Functions that belong to the same group. +The *OnUpdate* handler gets called when a document is created or modified. Two major limitations exist. First, if a document is modified several times in a short duration, the calls may be coalesced into a single event due to deduplication. Second, it is not possible to discern between Create and Update operations. -To set the `bucket.scope` to `+`.`+`, you must have the `Eventing Full Admin` or the `Full Admin` role. -All other users must use a scope that references an existing resource of their `bucket.scope`. +The entry point OnUpdate(doc,meta) passes both `doc`, the document, and `meta`, additional data containing useful information such as the document's id, CAS, expiration, and datatype ("json" or "binary"). +NOTE: Unless the _Language compatibility_ in the settings of the Function is at least 6.6.2 binary documents will be suppressed. +==== Delete Handler -[#eventing-keyspaces] -=== Eventing Keyspaces +The *OnDelete* handler gets called when a document is deleted (or expired). -A keyspace is a path to a collection in the format `bucket-name.scope-name.collection-name`. +The entry point OnDelete(meta,options) passes both `meta` which contains useful information such as the document (see above) and also `options` which has one boolean parameter `options.expired` to indicate if the removal was due to a deletion or an expiration. -For backward compatibility, you can also use the format `bucket-name._default._default`. -This is the format of a bucket from Couchbase Server version 6.6 that is upgraded to version 7.0. +One major limitation exists - it is not possible to get the value of the document that was just deleted or expired. -The following are the two keyspaces used by Eventing Functions: +==== Timer Callback Handler -* <>, which represents the Eventing source -* <>, which represents the Eventing metadata +Timer callbacks are user defined JavaScript functions passed as the callback argument to the built-in createTimer(callback, date, reference, context) function call. -[#listen-to-location] -==== Listen to Location +After creating a timer the argument "callback" or JavaScript function will be executed at or close to the desired "date" argument. The "reference" argument is an identifier for the timer scoped to an Eventing function and callback. The "context" argument must be serializable data that is available to the callback when the timer is fired. +For more information see xref:eventing-timers.adoc#createtimer-function[createTimer function]. -Eventing Functions use a collection as the source for their data mutations. -This collection is called the Eventing source, and can be made up of Couchbase or Ephemeral keyspace types. -Memcached keyspace types are not supported. +=== Statelessness -When you create an Eventing Function, you must specify a source collection. -The `OnUpdate` and `OnDelete` handlers are the entry points for this collection; they receive events and receive and track data mutations. -When you delete a source collection, all deployed and paused Functions associated with the collection are undeployed. +The persistent state of an Eventing Function is captured in the below external elements, and all states that appears on the execution stack are ephemeral -While a Function is processing its JavaScript code, the Function's documents can be mutated in different collections. -You can set keyspaces as destination collections, which are then bound to the Function through bucket aliases. +* The Listen To Location (the Eventing source) a collection that is the source of the mutations sent to the Function via the Database Change Protocol (DCP). +* The Eventing Storage (the Eventing metadata) a collection used as a scratch pad for the Function's state (this can be shared across all a tenant's Functions). +* The documents or mutations being observed along with their extended attributes. +* Optional Bindings for Function. There are three distinct types of bindings: +** Bucket alias, an alias and access mode used by the Function to access a collection. +** URL alias, an alias and HTTP/S settings used by the Function to access external REST APIs. +** Constant alias, an alias to an integer, decimal number, string, boolean, or a JSON object used as a global variable within the Function. -The Function's JavaScript code triggers data mutations on documents through Basic Keyspace Accessors or Advanced Keyspace Accessors in the Data Service. -If the code directly modifies documents in the source collection, the Eventing Service suppresses the mutation back to the Function performing the mutation. +=== Deduplication -The Function's JavaScript code can also trigger mutations on documents through inline {sqlpp} statements in the Query Service or `N1QL()` function calls. +Couchbase does not store every version of a document permanently. Hence, when a handler receives the mutation history of documents from the Eventing source, it sees a truncated history of each document. However, the final state of a document is always present in all such histories (as the current state is always available in the database). -NOTE: When you implement multiple Functions, you can create infinite recursions. -The Eventing Service prevents the deployment of Functions that might result in recursion loops. -For more information abotu cyclic generation of data changes, see xref:troubleshooting-best-practices.adoc#cyclicredun[Bucket Allocation Considerations]. +Similarly, the KV data engine deduplicates multiple mutations made to any individual document rapidly in succession, to ensure highest possible performance. So, when a document mutates rapidly, handlers may not see all intermediate states, but in all cases, will see the final state of the document. -To get the `Listen To` keyspace to listen to multiple collections, you can use a `{asterisk}` wildcard for the scope or collection. -If the bucket binding used by the JavaScript code also has a `{asterisk}` wildcard for its scope or collection, you must use Advanced Keyspace Accessors to read or write the Data Service. -For more information about Advanced Keyspace Accessors, see xref:eventing-advanced-keyspace-accessors.adoc#multiple-collection-functions[Eventing Functions that Listen to Multiple Collections]. +=== Recursive Mutation -TIP: You can have multiple Functions listening to the same collection while running different code. -To use less resources, though, you can use only one Function and code an if-then-else or switch statement in your handler's JavaScript. +An abbreviation of convenience of the term Potentially Recursive Mutation. When a handler manipulates documents in a keyspace that also serves as the source of mutations to this or any other handler, a write originated by a handler will cause a mutation to be seen by itself or another handler. These are called potentially recursive mutations. -[#eventing-storage] -=== Eventing Storage -The Eventing Storage is the Eventing Function's metadata bucket. -The metadata bucket stores artifacts, or configuration documents, that contain information about DCP streams, worker allocations, Timer information and state, and internal checkpoints. +[#json_number_percision] +=== JSON Number Precision -When you create an Eventing Function, you must make sure that a separate collection has been designated as an Eventing metadata and reserved for the Eventing Service's internal use. -You can use a common Eventing metadata collection across multiple Eventing Functions for the same tenant. +JSON does not have specialized types for integral and floating-point numbers. So many JavaScript runtimes utilize floating-point numbers to hold JSON numbers. This means that JavaScript numbers have a very large range but lesser precision when compared to traditional integers of the same size. -The Eventing Storage keyspace must be in a Couchbase-type bucket. -If this keyspace is not persistent, the Data Service evicts Timer and checkpoint documents when it hits quota, and loses track of Timers and mutations that have been processed. +v8 utilizes 64-bit floating-point numbers which yields a 53-bit precision. So, only integers up to +/- 253 can safely be handled in Eventing JavaScript. When handling very large integers, that is, numbers having 15 or more digits, one should utilize JavaScript BigInt types to safely handle them. The exact numbers where integral precision is lost is defined by JavaScript in the constants `Number.MAX_SAFE_INTEGER` and `Number.MIN_SAFE_INTEGER`. -NOTE: Do not delete the Eventing metadata collection. -Make sure that your Function's JavaScript code does not perform a write or delete operation on the Eventing metadata collection. -If you delete the metadata collection, all deployed Eventing Functions are undeployed and all associated indexes and constructs are dropped. +Often, such large integers are really only tokens, and it is not necessary to perform arithmetic on them, and only comparison for equality is necessary. Examples of this in Eventing are CAS values generated by Advanced Bucket Operations, or the result of the crc64() function. In these cases, it is appropriate to hold these large integers as strings, as it ensures full fidelity while retaining the ability to do equality comparisons. -[#function-settings] -=== Eventing Function Settings -[cols="1,2",options="header"] -|=== -|Function setting -|Description +=== Feed Boundary -|Function Name -a|A unique name for your Eventing Function. +Feed Boundary is a time or progress milestone used during an Eventing Function configuration. The Feed Boundary is a persistent setting in the Function's definition and can only be set or altered when a Function is created, undeployed or paused. -The Function name must: +Based on the `Feed Boundary` setting, when an Eventing Function is deployed it can either process all data mutations available in the cluster (`Everything`) or process only future data mutations (`From now`) that occur post deployment. However, once deployed you may Pause/Resume an Eventing Function in this case; the Feed Boundary is a checkpoint of the Function's actual progress such that no mutations or timers are reprocessed or lost. -* Start with an uppercase character (A-Z), lowercase character (a-z), or number (0-9) -* Contain only uppercase characters (A-Z), lowercase characters (a-z), numbers (0-9), underscores (_), and hyphens (-) +=== Function Scope -|Description -|An optional description that describes the purpose of your Eventing Function. +A bucket.scope combination is used for identifying functions belonging to the same group. -|Deployment Feed Boundary -|The Feed Boundary determines if the Eventing Function's activities need to include documents that already exist. +Only the "Eventing Full Admin" role and also the "Full Admin" role can set the bucket.scope to *+*+.+*+*; all other Eventing non-privileged users need to define a *Function Scope* for their Eventing functions that references an existing resource of bucket.scope. +This provides role based isolation of Eventing functions between non-privileged users -When you set the Feed Boundary to `Everything`, the Function deploys all mutations available in your database. -When you set the Feed Boundary to `From Now`, the Function only processes instances of data mutation that happen after the Function's deployment. +Typically you should set Function Scope to the bucket.scope that holds the collection that is the source of your mutations to your Eventing Function. This best practice ensures that you _*do not*_ inadvertently cause an Eventing Function to undeploy by removing a *Function Scope* pointing to a resource that is not required for the function to run. -The Feed Boundary also works as a checkpoint for paused Functions. -When you resume a paused Function, the Feed Boundary makes sure that no mutations are lost or processed again. +=== Keyspaces -You can only modify the Feed Boundary when you create a Function or when a Function is undeployed or paused. +A keyspace is a fully qualified path to a collection of the form "bucket-name.scope-name.collection-name". For backward compatibility a keyspace can also be of the form "bucket-name._default._default" which is the form of a 6.6 bucket upgraded to 7.0. The two terms keyspace and collection can be considered equivalent. -|System Log Level -|Determines the granularity of messages logged across the Eventing Function. +[#eventing-keyspaces] +=== Eventing Keyspaces -Can be one of `Info` (the default), `Error`, `Debug`, `Warning`, or `Trace`. +There are two keyspaces used by every Eventing Function: the Listen To Location (the Eventing source) collection and the Eventing Storage (the Eventing metadata) collection. -|Application Log Location -|The directory path to the log file for the Eventing Function. -The format is `.log`. +*Listen To Location (the Eventing source)* -The Function uses `log()` statements to write to this file. -When you select the *Log* value on the UI, all log files are combined across Eventing nodes and displayed. +Couchbase Eventing Functions use a collection as the source of data mutations. This collection is referred to as the Eventing source. This source collection can be either Couchbase or Ephemeral keyspace type. However, memcached keyspace types are not supported. -The log value is read-only and cannot be changed. +When you are creating an Eventing Function, you need to specify a source collection. The handler(s) of *OnUpdate* and/or *OnDelete* are the entry points that receive events from this collection via DCP to both receive and track data mutations. -|{sqlpp} Consistency -|The default consistency level of {sqlpp} statements in the Eventing Function. -You can set the consistency level by statement. +NOTE: You can have multiple Eventing Functions running different code listening to the same source collection. However it is less resource intensive to use just one Eventing Function and merely code an if-then-else or switch statement in your handler’s JavaScript. -Can be one of `None` (the default) or `Request`. +When a source collection is deleted, all deployed (or paused) Eventing Functions associated with this source collection are undeployed. -|Workers -|The number of worker threads per node to be allocated to the Eventing Function to process events. -Allows the Function to scale up. +The `Listen To` can listen to multiple collections via a wildcard of `{asterisk}` for the scope and/or the collection. +For these functions, if the bucket binding used by the JavaScript code also contains a wildcard of `{asterisk}` for the scope and/or the collection only the Advanced Keyspace Accessors will be able to read or write the Data Service (or KV). -The minimum number of workers is `1` (the default) and the maximum is `64`. +In the course of processing the JavaScript code of an Eventing Function, documents can be mutated in different collections. For understanding purposes, these keyspaces can be termed as destination collections which are bound to the Function via Bucket aliases. -|Language Compatibility -|The language version of the Eventing Function for backward compatibility. +At times, the Eventing Function's JavaScript code can trigger data mutations on documents via the Data Service (KV) via either Basic Keyspace Accessors or Advanced Keyspace Accessors. +If the Eventing Function code directly modifies documents in the source collection, the Eventing Service will suppress the mutation back to the Eventing Function making the mutation. +When implementing multiple Functions it is possible to create infinite recursions, however the Eventing Service by default will prevent deploying Functions that would result in recursion loops. It should be noted that not all recursion loops can be detected nor are all recursion loops wrong -- the default recursion checks can be disabled. For more detail on cyclic generation of data changes, refer to xref:troubleshooting-best-practices.adoc#cyclicredun[Bucket Allocation Considerations]. -If the semantics of a language construct change during a release, the Language Compatibility setting makes sure that an older Eventing Function continues to produce the runtime behavior from when the Function was initially created. -The older Function only stops this behavior when the behavior is deprecated and removed. +At times, the Eventing Function's JavaScript code can trigger data mutations on documents via the Query Service ({sqlpp}) via inline {sqlpp} statements or N1QL() function calls. In this case the Eventing Function will see the mutation it just generated and additional business logic may be needed to terminate or protect against possible recursion. -Couchbase versions 6.0.0, 6.5.0, and 6.6.2 are the only versions that are currently defined. -New Functions default to the highest compatibility version available of 6.6.2. +*Eventing Storage (the Eventing metadata)* -In version 6.5.0, trying to access a non-existing item from a keyspace returns an undefined value. -In version 6.0.0, it throws an exception. +The Eventing Storage (or Metadata) collection, stores artifacts (or configuration documents) that contain information about DCP streams, worker allocations, timer information/state, and internal checkpoints. -Only a Function with a language compability setting of version 6.6.2 passes binary documents to Eventing Function handlers. -Versions 6.0.0 and 6.5.0 filter all binary documents out of the DCP mutation stream. +When you are creating an Eventing Function, ensure that a separate collection is designated as an Eventing metadata and reserved solely for the internal use of the Eventing Service. You can use a common Eventing metadata collection across multiple Eventing Functions for the same tenant. -|Script Timeout -|The number of seconds to elapse before the script times out and is terminated. +NOTE: The Eventing Storage keyspace must be in a Bucket of type Couchbase. If this keyspace is not persistent the Data Service, or KV, will evict timer and checkpoint documents on hitting quota and Eventing can lose track of both timers and mutations processed. Furthermore at any point, refrain from deleting the Eventing metadata collection. Also, ensure that your Eventing Function's JavaScript code or other services do not perform a write or delete operation on the Eventing metadata collection. -The entry points into the handler processing for each mutation must run from start to finish before the specified timeout duration. +If an Eventing metadata collection gets accidentally deleted, then all deployed Eventing Function are undeployed and associated indexes and constructs get dropped. -The default number of seconds is `60`. +*Function Name* -|Time Context Max Size -|The size limit of the context for any Timer created by the Eventing Function. +All Eventing Functions must have a unique name in a Couchbase cluster. A Function name can only start with characters in range A-Z, a-z, 0-9, and can only contain characters in range A-Z, a-z, 0-9, underscore, and hyphen. -A context can be any JSON document. -Timers can store and access a context, which is then used to store the state of when a Timer is created and to retrieve the state of when a Timer is fired. +*Deployment Feed Boundary* -The default is `1024`. +Using the `Feed Boundary` drop down, you can either set an Eventing Function to deploy for all data mutations available in the cluster (`Everything`) or choose to deploy the Eventing Function to process only future data mutations, post deployment (`From now`). However, once deployed you may Pause/Resume an Eventing Function in the Resume case; the Feed Boundary is a checkpoint of the Function's actual progress when the Function was paused such that no mutations are reprocessed or lost upon a subsequent Resume. -|=== +*Description* +The Description is an optional text that can be added to the Function, typically to describe the purpose of the particular business logic. -== Operations +[#function-settings] +=== Eventing Function Settings -Operations exposed through the UI, couchbase-cli, and REST APIs. +There are several advanced settings (by default hidden within a collapsible panel) that can be adjusted. The System Log Level, {sqlpp} Consistency, Workers, Language compatibility, Script Timeout, and Timer Context Max Size are additional options available during the Eventing Function definition process. -=== Deploy +* *System Log Level*: Determines the granularity at which messages are logged to the common system log messages across all Eventing Functions. The available choices are: `Info` (the default), `Error`, `Debug`, `Warning`, and `Trace`. ++ +Typically you will never need to adjust this from the default setting of `Info`, the data in this file is generally only used by support. -The deploy operation activates an Eventing Function in a cluster. -It performs validations and allows only valid Eventing Functions to be deployed. +* *Application log location* The directory path to the log file for the application or the Function specific log messages named <>.log. +The Function designer uses log() statements to write to this file in addition it will also record some Function specific system level errors. +In the UI when "Log" is selected these files are combined across all Eventing nodes and displayed. This value is read-only and set at system initialization time and cannot be subsequently changed. -Deploying an Eventing Function: +* *{sqlpp} Consistency*: The default consistency level of {sqlpp} statements in the Eventing Function. +This controls the consistency level for {sqlpp} statements, but can be set on a per statement basis. The valid values are `None` (the default) and `Request`. -* Creates necessary metadata -* Spawns worker processes -* Calculates initial partitions -* Initiates check-pointing of DCP streams to process -* Allows the Function to receive and process mutations and Timer callbacks +* *Workers*: Workers the number of worker processes to be started for the Eventing Function. +Allows the Eventing Function to be scaled up (or vertical scaling). Each worker process supports two fixed threads of execution, however this setting is limited to a maximum of 64 for system optimization purposes. +The system automatically generates a warning message if the number of workers exceeds a set threshold based upon cluster resources, however, in this case the handler can still be deployed. +The minimum value is 1 (the default) and the recommended maximum is 64. In most cases the maximum should be the number of vCPUs. -You cannot edit the source code of a deployed Eventing Function. +* *Language compatibility*: The language version of the Eventing Function for backward compatibility. ++ +If the semantics of a language construct change in any given release the “Language compatibility” setting will ensure an older Eventing Function will continue to see the runtime behavior that existed at the time it was authored, until such behavior is deprecated and removed. Note 6.0.0, 6.5.0, and 6.6.2 are the only currently defined versions and for newly authored Functions the default is the highest compatibility version available, currently 6.6.2. ++ +For example, accessing non-existent items from a keyspace returns undefined in 6.5.0, while in 6.0.0 an exception is thrown. In addition, only a Function with “language compatibility” of 6.6.2 in its settings will pass binary documents to the OnUpdate(doc,meta) handler. In addition, values of 6.0.0 and 6.5.0 will filter all binary documents out of the DCP mutation stream, only 6.6.2 will pass binary documents to the Eventing Function handlers. -During deployment, you must choose one of the following *Deployment Feed Boundary* settings: +* *Script Timeout*: Script Timeout provides a timeout option to terminate a non-responsive Function. ++ +The entry points into the handler, e.g. OnUpdate and OnDelete, processing for each mutation must complete from start to finish prior to this specified timeout duration. The default is 60 seconds. -* *Everything*, which provides the Eventing Function with a deduplicated history of all documents, ending with the current value of each document. This means the Function sees every document in the keyspace at least once. -* *From now*, which provides the Eventing Function with mutations starting at deployment. This means the Function only sees documents that have mutated after the Function's deployment. +* *Timer Context Max Size*: Timer Context Max Size limits the size of the context for any Timer created by the Function. ++ +Eventing Timers can store and access a context which can be any JSON document, the context is used to store state when the timer is created and retrieve state when the timer fires. By default the size is 1024 bytes, but this can be adjusted on a per Function basis. -=== Undeploy +[#section_mzd_l1p_m2b] +=== Bindings -The undeploy operation causes the Eventing Function to stop processing events of all types. -It also shuts down the worker processes associated with the Function. +A binding is a construct that allows separating environment specific variables (example: keyspace names, external endpoint URLs plus credentials, or global constants) from the Eventing Function's source code. It provides a level of indirection between environment specific artifacts to symbolic names, to help moving an Eventing Function definition from development to production environments without changing code. Binding names must be valid JavaScript identifiers and must not conflict with any built-in types. -Undeploying an Eventing Function: +An Eventing Function can have no binding, one binding, or several bindings. There are three distinct types of bindings: -* Deletes all Timers and context documents created by the Function -* Releases any runtime resources acquired by the Function +*Bucket alias* -You can edit the code and change the settings of an undeployed Eventing Function. -When you create a new Eventing Function, the Function's state is undeployed. +Bucket aliases allow the JavaScript in an Eventing Function to access Couchbase KV collections from the Data Service or KV. The keyspaces (bucket.scope.collection) are then accessible by the bound name as a JavaScript map in the global space of the Eventing Function. -=== Pause +An Eventing Function can listen to multiple collections via a wildcard of `{asterisk}` for the scope and/or the collection. +For these functions, the bucket alias (or binding) used by the JavaScript code can also contain a wildcard of `{asterisk}` for the scope and/or the collection. +If bucket alias contains a wildcard of `{asterisk}` only the Advanced Keyspace Accessors will be able to read or write the Data Service (or KV). -The pause operation causes the Eventing Function to pause all mutations and Timer callbacks. -It also performs a checkpoint to be used for resuming the Function. +You can add bucket aliases via the 'Bucket alias' choice then entering a tuple of: alias-name, keyspace, and an access level. Where the alias-name that you can use to refer to the keyspace or collection from your Eventing Function code; the keyspace is the full path to a collection in the cluster; and the access level to the keyspace is either 'read only' or 'read and write'. -You can edit the code and change the settings of a paused Eventing Function. -You can also resume or undeploy a paused Function. +NOTE: One or more Bucket alias bindings (or Bucket aliases) are mandatory when your Eventing Function code performs any collection related operations directly against the Data Service. -=== Resume +* Read Only Bindings: A binding with access level of "Read Only" allows reading documents from the collection, but cannot be used to write (create, update or delete) documents in such a collection. Attempting to do so will throw a runtime exception. -The resume operation continues processing mutations and Timer callbacks of an Eventing Function that was previously paused. +* Read-Write Bindings: A binding with access level of "Read Write" allows both reading and writing (create, update, delete) of documents in the collection. If you wish to modify the document passed to the OnUpdate entry point (or any other document in the source collection) you will need to provide a Read-Write binding alias to the Function's source collection. -The resume operation is similar to the deploy operation, but it uses a progress checkpoint to restart the Function. This means no mutations are lost or processed again. +*URL alias* -When you resume a Function, the backlog of mutations that occurred when the Function was in a paused state is processed. -The backlog of Timers also fires, even if the time of the Timers has already passed. +These bindings are utilized by the cURL language construct to access external resources. The binding specifies the endpoint, the protocol (http/https), and credentials if necessary. Cookie support can be enabled via the binding if desired when accessing trusted remote nodes. When a URL binding limits access through to be the URL specified or descendants of it. The target of a URL binding should not be a node that belongs to the Couchbase cluster. -Depending on the system capacity and on how long the Function was paused, clearing the backlog can take some time. -After the backlog is cleared, the Function goes on to process current mutations and Timers. +You can add URL bindings via the 'URL alias' choice then entering the following: alias-name, URL, allow cookies setting, security settings of validate SSL certificate and an auth type of (no auth, basic, bearer, and digest). For more details refer to xref:eventing-curl-spec.adoc#bindings[cURL Bindings]. -=== Delete +*Constant alias* -The delete operation deletes the following in the Eventing Function: +These bindings are utilized by the Eventing Function's JavaScript code as global variables. -* The source code implementing the Function -* All Timers and Timer contexts -* All processing checkpoints -* Application logs -* Any other artifacts in the metadata provider +You can add URL bindings via the 'Constant alias' choice then entering an alias-name and value. The value can be either an integer, decimal number, string, boolean, or a JSON object. For example you might have an alias of _debug_ with a value of _true_ (or _false_) to control verbose logging this would act just like adding a statement `const debug = true;` at the beginning of your JavaScript code (_although the Eventing syntax wouldn't allow this global to be added to the actual JavaScript_). -You can only delete an undeployed Eventing Function. +== Operations -=== Debug +The following operations are exposed through the UI, couchbase-cli and REST APIs. + +=== Deploy + +The deploy operation activates an Eventing Function in a cluster. + +This operation activates an Eventing Function. Source validations are performed, and only valid Eventing Function can be deployed. Deployment transpiles the code and creates the executable artifacts. The source code of an activated (or deployed and running) Eventing Function cannot be edited. Unless an Eventing Function is in deployed state, it will not receive or process any events (mutations or Timer callbacks). Deployment of an Eventing Function creates necessary metadata, spawns worker processes, calculates initial partitions, and initiates check-pointing of DCP stream to processes. + +Deployment for DCP observer (or Feed Boundary) has two variations controlled by the setting of the Eventing Function's "Deployment Feed Boundary": + +* Everything: The Eventing Function will see a deduplicated history of all documents, ending with the current value of each document. Hence, the Eventing Function will see every document in the keyspace at least once. + +* From now: The Eventing Function will see mutations from current time. In other words, the Eventing Function will see only documents that mutate after it is deployed. -The debug operation traps and sends the next event instance received by the Eventing Function to a separate v8 worker with debugging enabled. -Debug is a special flag that can be attach to a Function. +=== Undeploy + +This operation causes the Eventing Function to stop processing events of all types and shuts down the worker processes associated with the Eventing Function. It deletes all timers created by the Eventing Function being undeployed and their context documents. It releases any runtime resources acquired by the Eventing Function. An Eventing Function in the Undeployed state can have its code edited and settings altered. Newly created Eventing Functions start in Undeployed state. + +=== Pause + +This action stops all processing associated with an Eventing Function including timer callbacks and performs a checkpoint (to be used for a subsequent resume). An Eventing Function in the Paused state can have its code edited and settings altered. Eventing Functions in Paused state can be either Resumed or Undeployed. -The debug operation pauses the trapped event, opens a TCP port, and generates a Chrome Developer Tools URL with a session cookie that can be used to control the debug worker. +=== Resume + +This action continues processing of an Eventing Function that was previously Paused. The Resume process is akin to a Deploy but utilizes a progress checkpoint (made when the Eventing Function was paused) to restart such that no mutations are reprocessed or lost. The backlog of mutations that occurred when the Eventing Function was paused will now be processed. The backlog of timers that came due when the Eventing Function was paused will now fire even if that timer is now in the past. Depending on the system capacity and how long the Eventing Function was paused, clearing the backlog may take some time before Eventing Function moves on to current mutations and timers. -With the exception of the trapped event instance, all other Eventing Function events continue processing. -When the trapped event finishes debugging, the debug operation traps another event instance. -This continues until you stop the operation. +=== Delete +When an Eventing Function is deleted, the source code implementing the Eventing Function, all timers and timer contexts, all processing checkpoints, application logs and other artifacts in the metadata provider are purged. A future Eventing Function by the same name has no relation to a prior deleted Eventing Function of the same name. Only undeployed Eventing Function can be deleted. + +=== Debug -== See Also +Debug is a special flag on an Eventing Function that causes the next event instance received by the Eventing Function to be trapped and sent to a separate v8 worker with debugging enabled. The debug worker pauses the trapped event processing and opens a TCP port and generates a Chrome Developer Tools URL with a session cookie that can be used to control the debug worker. All other events, except the trapped event instance, continue unencumbered. If the debugged event instance completes execution, another event instance is trapped for debugging, and this continues till debugging is stopped, at which point any trapped instance runs to completion and the debugging worker becomes passive. -* xref:eventing-advanced-keyspace-accessors.adoc[Advanced Keyspace Accessors] -* xref:eventing-language-constructs.adoc#basic_bucket_accessors[Basic Keyspace Accessors] \ No newline at end of file +Debugging is convenience feature intended to help during Eventing Function development and should not be used in production environments. It also be noted that using the debugger does not provide correctness or functionality guarantees. diff --git a/modules/eventing/partials/nav.adoc b/modules/eventing/partials/nav.adoc index d4e7ff5f..fe6974f4 100644 --- a/modules/eventing/partials/nav.adoc +++ b/modules/eventing/partials/nav.adoc @@ -1,6 +1,6 @@ * xref:eventing:eventing-overview.adoc[Eventing] - ** xref:eventing:eventing-terminologies.adoc[Terminology] + ** xref:eventing:eventing-Terminologies.adoc[Terminology] ** xref:eventing:eventing-language-constructs.adoc[Language Constructs] *** xref:eventing:eventing-advanced-keyspace-accessors.adoc[Advanced Keyspace Accessors] From 2209a896478ca1ea5348a687d872d79640019a93 Mon Sep 17 00:00:00 2001 From: Julia B <150837331+julia-cb@users.noreply.github.com> Date: Sat, 16 Mar 2024 01:39:54 -0700 Subject: [PATCH 8/8] Deleted nav partial from this PR as I already have the file sitting in a different PR --- modules/eventing/partials/nav.adoc | 80 ------------------------------ 1 file changed, 80 deletions(-) delete mode 100644 modules/eventing/partials/nav.adoc diff --git a/modules/eventing/partials/nav.adoc b/modules/eventing/partials/nav.adoc deleted file mode 100644 index fe6974f4..00000000 --- a/modules/eventing/partials/nav.adoc +++ /dev/null @@ -1,80 +0,0 @@ -* xref:eventing:eventing-overview.adoc[Eventing] - - ** xref:eventing:eventing-Terminologies.adoc[Terminology] - - ** xref:eventing:eventing-language-constructs.adoc[Language Constructs] - *** xref:eventing:eventing-advanced-keyspace-accessors.adoc[Advanced Keyspace Accessors] - *** xref:eventing:eventing-timers.adoc[Timers] - *** xref:eventing:eventing-curl-spec.adoc[cURL] - - ** xref:eventing:eventing-lifecycle.adoc[Eventing Lifecycle] - ** xref:eventing:eventing-buckets-to-collections.adoc[Buckets vs Collections] - ** xref:eventing:eventing-rbac.adoc[Eventing Role-Based Access Control] - - ** xref:eventing:eventing-examples.adoc[Examples: Using the Eventing Service] - *** Step-by-Step Examples - **** xref:eventing:eventing-example-data-enrichment.adoc[Data Enrichment] - **** xref:eventing:eventing-examples-cascade-delete.adoc[Cascade Delete] - **** xref:eventing:eventing-examples-docexpiry.adoc[Document Expiry] - **** xref:eventing:eventing-examples-delete-v-expiry.adoc[Delete v Expiry] - **** xref:eventing:eventing-examples-docarchive.adoc[Document Archival] - **** xref:eventing:eventing-examples-cancel-overwrite-timer.adoc[Cancel or Overwrite Timer] - **** xref:eventing:eventing-examples-recurring-timer.adoc[Recurring Timer] - **** xref:eventing:eventing-examples-rest-via-curl-get.adoc[External REST via cURL GET] - **** xref:eventing:eventing-examples-high-risk.adoc[Risk Assessment] - - *** Basic Accessor Functions - **** xref:eventing:eventing-handler-basicBucketOps.adoc[basicBucketOps] - **** xref:eventing:eventing-handler-curl-get.adoc[basicCurlGet] - **** xref:eventing:eventing-handler-curl-post.adoc[basicCurlPost] - **** xref:eventing:eventing-handler-simpleTimer.adoc[simpleTimer] - **** xref:eventing:eventing-handler-cascadeKvDeleteWithDoc.adoc[cascadeKvDeleteWithDoc] - **** xref:eventing:eventing-handler-redactSharedData.adoc[redactSharedData] - **** xref:eventing:eventing-handler-simpleFlatten.adoc[simpleFlatten] - **** xref:eventing:eventing-handler-fixEmailDomains.adoc[fixEmailDomains] - **** xref:eventing:eventing-handler-keepLastN.adoc[keepLastN] - **** xref:eventing:eventing-handler-docControlledSelfExpiry.adoc[docControlledSelfExpiry] - **** xref:eventing:eventing-handler-shippingNotifier.adoc[shippingNotifier] - **** xref:eventing:eventing-handler-ConvertBucketToCollections.adoc[ConvertBucketToCollections] - - *** Basic {sqlpp} Functions - **** xref:eventing:eventing-handler-basicN1qlSelectStmt.adoc[] - **** xref:eventing:eventing-handler-basicN1qlPreparedSelectStmt.adoc[] - - *** Generic Manipulation Functions - **** xref:eventing:eventing-handler-dateToEpochConversion.adoc[dateToEpochConversion] - **** xref:eventing:eventing-handler-deepCloneAndModify.adoc[deepCloneAndModify] - **** xref:eventing:eventing-handler-removeObjectStubs.adoc[removeObjectStubs] - **** xref:eventing:eventing-handler-removeNullsAndEmptys.adoc[removeNullsAndEmptys] - **** xref:eventing:eventing-handler-genericRename.adoc[genericRename] - **** xref:eventing:eventing-handler-genericFlatten.adoc[genericFlatten] - **** xref:eventing:eventing-handler-convertXMLtoJSON.adoc[convertXMLtoJSON] - **** xref:eventing:eventing-handler-convertAdvXMLtoJSON.adoc[convertAdvXMLtoJSON] - - *** Advanced Accessor Functions - **** xref:eventing:eventing-handler-advancedGetOp.adoc[advancedGetOp] - **** xref:eventing:eventing-handler-advancedGetOpWithCache.adoc[advancedGetOpWithCache] - **** xref:eventing:eventing-handler-advancedInsertOp.adoc[advancedInsertOp] - **** xref:eventing:eventing-handler-advancedUpsertOp.adoc[advancedUpsertOp] - **** xref:eventing:eventing-handler-advancedReplaceOp.adoc[advancedReplaceOp] - **** xref:eventing:eventing-handler-advancedDeleteOp.adoc[advancedDeleteOp] - **** xref:eventing:eventing-handler-advancedIncrementOp.adoc[advancedIncrementOp] - **** xref:eventing:eventing-handler-advancedDecrementOp.adoc[advancedDecrementOp] - **** xref:eventing:eventing-handler-advancedTouchOp.adoc[advancedTouchOp] - **** xref:eventing:eventing-handler-advanced-keepLastN.adoc[advancedKeepLastN] - **** xref:eventing:eventing-handler-advanced-docControlledSelfExpiry.adoc[advancedDocControlledSelfExpiry] - **** xref:eventing:eventing-handler-multiCollectionEventing.adoc[multiCollectionEventing] - **** xref:eventing:eventing-handler-advancedSelfRecursion.adoc[advancedSelfRecursion] - **** xref:eventing:eventing-handler-advancedMutateInField.adoc[advancedMutateInField] - **** xref:eventing:eventing-handler-advancedMutateInArray.adoc[advancedMutateInArray] - - *** Binary Document Support - **** xref:eventing:eventing-handler-basicBinaryKV.adoc[basicBinaryKV] - **** xref:eventing:eventing-handler-advancedBinaryKV.adoc[advancedBinaryKV] - *** Performance Functions - **** xref:eventing:eventing-handler-fasterToLocalString.adoc[fasterToLocalString] - - ** xref:eventing:eventing-debugging-and-diagnosability.adoc[Debugging and Diagnosability] - ** xref:eventing:eventing-statistics.adoc[Statistics] - ** xref:eventing:troubleshooting-best-practices.adoc[Troubleshooting and Best Practices] - ** xref:eventing:eventing-faq.adoc[Frequently Asked Questions]