diff --git a/modules/eventing/pages/eventing-examples.adoc b/modules/eventing/pages/eventing-examples.adoc index e6e4de3c..86ad7f51 100644 --- a/modules/eventing/pages/eventing-examples.adoc +++ b/modules/eventing/pages/eventing-examples.adoc @@ -1,5 +1,5 @@ = Examples: Using the Eventing Service -:description: This page contains examples of how to use the Eventing Service, using the Couchbase Web Console. +:description: This page contains examples of how to use the Eventing Service with the Couchbase Web Console. :page-edition: Enterprise Edition +++ +++ @@ -7,12 +7,14 @@ {description} [#examples-step-by-step] -== Step by Step Examples +== Step-by-Step Examples [#Couchbase-Eventing-Examples] -*Detailed Examples*: These tutorial-like guides are ideal for a novice to learn the basics of the Eventing Service, via complete detailed step by step start-to-finish instructions. +=== Detailed Examples -[cols="1,1,1"] +The following tutorial-like guides have detailed start-to-finish instructions and are ideal for new users to learn the basics of the Eventing Service. + +[cols="1,1,1,1"] |=== | xref:eventing:eventing-example-data-enrichment.adoc[Data Enrichment] | xref:eventing:eventing-examples-cascade-delete.adoc[Cascade Delete] @@ -22,18 +24,23 @@ | xref:eventing:eventing-examples-cancel-overwrite-timer.adoc[Cancel or Overwrite Timer] | xref:eventing:eventing-examples-recurring-timer.adoc[Recurring Timer] | xref:eventing:eventing-examples-rest-via-curl-get.adoc[External REST via cURL GET] -| xref:eventing:eventing-examples-high-risk.adoc[Risk Assessment] +| xref:eventing:eventing-examples-high-risk.adoc[Risk Assessment] +| +| +| |=== [#examples-scriptlets] -== Scriptlets or Terse Examples +== Scriptlets [#Couchbase-Eventing-Scriptlets] [#examples-scriptlets-kv] -*Basic KV Eventing Functions*: The following Scriptlets are essentially stand alone Eventing Functions examples, and introduce more use cases. Here we assume the reader has a good understanding of the Eventing System and requires little guidance. +=== Basic Accessor Eventing Functions + +The following scriptlets are examples of standadlone Eventing Functions. -[#Couchbase-Eventing-Snipets] -[cols="1,1,1"] +[#Couchbase-Eventing-Snippets] +[cols="1,1,1,1"] |=== | xref:eventing:eventing-handler-basicBucketOps.adoc[basicBucketOps] | xref:eventing:eventing-handler-curl-get.adoc[basicCurlGet] @@ -46,23 +53,28 @@ | xref:eventing:eventing-handler-keepLastN.adoc[keepLastN] | xref:eventing:eventing-handler-docControlledSelfExpiry.adoc[docControlledSelfExpiry] | xref:eventing:eventing-handler-shippingNotifier.adoc[shippingNotifier] -| xref:eventing:eventing-handler-ConvertBucketToCollections.adoc[ConvertBucketToCollections] +| xref:eventing:eventing-handler-ConvertBucketToCollections.adoc[convertBucketToCollections] |=== [#examples-scriptlets-n1ql] -*Basic {sqlpp} Eventing Functions*: The following Scriptlets demonstrate using {sqlpp} or the Query Service from within an Eventing Function. +=== Basic {sqlpp} Eventing Functions -[cols="1,1,1"] +The following scriptlets demonstrate how to use {sqlpp} or the Query Service with an Eventing Function. + +[cols="1,1,1,1"] |=== | xref:eventing:eventing-handler-basicN1qlSelectStmt.adoc[basicN1qlSelectStmt] | xref:eventing:eventing-handler-basicN1qlPreparedSelectStmt.adoc[basicN1qlPreparedSelectStmt] | +| |=== [#examples-scriptlets-generic] -*Generic Manipulation Eventing Functions* The following Scriptlets are more advanced use cases which focus on mutating documents without knowledge of the document's schema. +=== Generic Manipulation Eventing Functions -[cols="1,1,1"] +The following scriptlets are examples of advanced use cases that focus on mutating a document without knowing that document's schema. + +[cols="1,1,1,1"] |=== | xref:eventing:eventing-handler-dateToEpochConversion.adoc[dateToEpochConversion] | xref:eventing:eventing-handler-deepCloneAndModify.adoc[deepCloneAndModify] @@ -72,13 +84,18 @@ | xref:eventing:eventing-handler-genericFlatten.adoc[genericFlatten] | xref:eventing:eventing-handler-convertXMLtoJSON.adoc[convertXMLtoJSON] | xref:eventing:eventing-handler-convertAdvXMLtoJSON.adoc[convertAdvXMLtoJSON] -| |=== [#examples-scriptlets-advanced-accessors] -*Advanced Accessor Eventing Functions*: The following Scriptlets demonstrate using Advanced Bucket Accessors (introduced in version 6.6.1) which allow the use of CAS, ability to set expirations (or TTLs) and the use of distributed atomic counters to increment or decrement counts. +=== Advanced Accessor Eventing Functions -[cols="1,1,1"] +The following scriptlets demonstrate how to use Advanced Keyspace Accessors, which allow you to: + +* Use CAS +* Set expiry (TTL) dates +* Use distributed atomic counters to increment and decrement counts + +[cols="1,1,1,1"] |=== | xref:eventing:eventing-handler-advancedGetOp.adoc[advancedGetOp] | xref:eventing:eventing-handler-advancedGetOpWithCache.adoc[advancedGetOpWithCache] @@ -88,30 +105,42 @@ | xref:eventing:eventing-handler-advancedDeleteOp.adoc[advancedDeleteOp] | xref:eventing:eventing-handler-advancedIncrementOp.adoc[advancedIncrementOp] | xref:eventing:eventing-handler-advancedDecrementOp.adoc[advancedDecrementOp] +| xref:eventing:eventing-handler-advancedTouchOp.adoc[advancedTouchOp] | xref:eventing:eventing-handler-advanced-keepLastN.adoc[advancedKeepLastN] | xref:eventing:eventing-handler-advanced-docControlledSelfExpiry.adoc[advancedDocControlledSelfExpiry] | xref:eventing:eventing-handler-multiCollectionEventing.adoc[multiCollectionEventing] +| xref:eventing:eventing-handler-advancedSelfRecursion.adoc[advancedSelfRecursion] +| xref:eventing:eventing-handler-advancedMutateInField.adoc[advancedMutateInField] +| xref:eventing:eventing-handler-advancedMutateInArray.adoc[advancedMutateInArray] | |=== [#examples-scriptlets-binary-documents] -*Binary Document Support*: The following Scriptlets demonstrate support for binary documents in Eventing. Only a Function with “language compatibility” of 6.6.2 or above in its settings will pass binary documents to the OnUpdate(doc,meta) handler. +=== Binary Document Support + +The following scriptlets demonstrate support for binary documents in Eventing. + +Your Eventing Function must have a language compatibility setting of Couchbase Server version 6.6.2 or above to pass binary documents in its `OnUpdate(doc,meta)` handler. -[cols="1,1,1"] +[cols="1,1,1,1"] |=== | xref:eventing:eventing-handler-basicBinaryKV.adoc[basicBinaryKV] | xref:eventing:eventing-handler-advancedBinaryKV.adoc[advancedBinaryKV] | +| |=== [#examples-scriptlets-performance] -*Performance Eventing Functions* The following Scriptlets are performance oriented and/or benchmarks. +=== Performance Eventing Functions + +The following scriptlets are examples of performance-oriented or benchmark Eventing Functions. -[cols="1,1,1"] +[cols="1,1,1,1"] |=== | xref:eventing:eventing-handler-fasterToLocalString.adoc[fasterToLocalString] | | +| |=== -+++ +++ ++++ +++ \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advanced-docControlledSelfExpiry.adoc b/modules/eventing/pages/eventing-handler-advanced-docControlledSelfExpiry.adoc index 610b50ad..6ec3d26f 100644 --- a/modules/eventing/pages/eventing-handler-advanced-docControlledSelfExpiry.adoc +++ b/modules/eventing/pages/eventing-handler-advanced-docControlledSelfExpiry.adoc @@ -1,43 +1,46 @@ = Function: Advanced Document Controlled Expiry -:description: pass:q[Purge a document automatically based on self-contained start and duration fields.] +:description: pass:q[Purge a document automatically based on the document's self-contained start and duration fields.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} - -* This function *advancedDocControlledSelfExpiry* demonstrates self-expiry of a document; for example, a user trial. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* When documents are created, they will have no expiration value. This function processes the initial mutation to calculate and set the proper TTL. -* In Couchbase, when using a simple integer expiry value (as opposed to a proper date or time object), the expiration can be specified in two ways: -** As an offset from the current time. If the absolute value of the expiry is less than 30 days (60 * 60 * 24 * 30 seconds), it is considered an offset. -** As an absolute Unix time stamp. If the value is greater than 30 days (60 * 60 * 24 * 30 seconds), it is considered an absolute time stamp. -** As described in xref:learn:data/expiration.adoc[Expiration], if a "Bucket Max Time-To-Live" is set (specified in seconds), it is an enforced hard upper limit. As such, any subsequent document mutation (by {sqlpp}, Eventing, or any Couchbase SDK) will result in the document having its expiration adjusted and set to the bucket’s maximum TTL if the operation has: -*** No TTL. -*** A TTL of zero. -*** A TTL greater than the bucket TTL. -* As we are using Advanced Bucket Accessors setting document expirations (or TTLs) we use a JavaScript Data object. -* Will operate on any document with type == "trial_customers". -* Will ignore any doc with a non-zero TTL -* This is different than setting a TTL on a bucket or a collection which will typically update (or extend) the TTL of a document on each mutation. +{description} + +The `advancedDocControlledSelfExpiry` function: + +* Demonstrates the self-expiry of a document (for example, a user trial) +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Processes the initial mutation to calculate and set the TTL of a newly-created document +* Uses a JavaScript data object to set document expiration +* Operates on any document where `type == "trial_custoimers"` +* Ignores any document with a TTL that is not zero + +When you use a simple integer instead of a proper date or time object for your document's expiration value, the expiration value is specified in one of the following ways: + +* As an offset from the current time if the value is less than 30 days (60 * 60 * 24 * 30 seconds). +* As an absolute Unix time stamp if the value is greater than 30 days (60 * 60 * 24 * 30 seconds). + +If a `Bucket Max Time-to-Live` is set and specified in seconds, it's enforced as a hard upper limit. +Any subsequent document mutation, whether by {sqlpp}, Eventing, or a Couchbase SDK, results in the document having its expiration adjusted and set to the bucket's maximum TTL if the operation has: + +* No TTL +* A TTL of zero +* A TTL greater than the bucket's TTL [{tabs}] ==== advancedDocControlledSelfExpiry:: + -- -Two variants of this function are available - a 6.6 version that relies on {sqlpp} and a 6.6.1+/7.0.0+ version (*this Function*) that directly sets the expiration. -You can completely avoid _N1QL(...)_ and use _couchbase.replace(bucket_binding, meta, doc)_ as the advancedDocControlledSelfExpiry variant is much faster. +There are two variants of this function available: a xref:eventing-handler-docControlledSelfExpiry.adoc[Couchbase Server version 6.6 that relies on {sqlpp}], and a Couchbase Server version 6.6.1+/7.0.0+ that directly sets the expiration. -* xref:eventing-handler-docControlledSelfExpiry.adoc[docControlledSelfExpiry (indirect TTL via {sqlpp})] -* <> +You can improve your function's performance by avoiding N1QL() and using `couchbase.replace(bucket_binding, meta, doc)` instead. -[#advancedDocControlledSelfExpiry] -advancedDocControlledSelfExpiry (direct TTL) +The following example directly sets the expiration. [source,javascript] ---- -// To run configure the settings for this Function, advancedDocControlledSelfExpiry, as follows: +// Configure the settings for the advancedDocControlledSelfExpiry function as follows: // // Version 7.1+ // "Function Scope" @@ -61,36 +64,36 @@ advancedDocControlledSelfExpiry (direct TTL) // "bucket alias", "src_col", "source", "read and write" function OnUpdate(doc, meta) { - // Filter items that don't have been updated + // Filter items that have not been updated if (meta.expiration !== 0) { log(meta.id, "IGNORE expiration "+meta.expiration+" !== 0 or "+ new Date(meta.expiration).toString()); return; } - // Optional filter to a specic field like 'type' + // Optional filter to a specific field like 'type' if (doc.type !== 'trial_customers') return; - // Our expiry is based on a JavaScript date parsable field, it must exist + // The expiry is based on a JavaScript date parsable field if (!doc.trialStartDate || !doc.trialDurationDays) return; - // Convert the doc's field timeStamp and convert to unix epoch time (in ms.). + // Convert the doc field timeStamp to Unix epoch time in milliseconds var docTimeStampMs = Date.parse(doc.trialStartDate); var keepDocForMs = doc.trialDurationDays * 1000 * 60 * 60 * 24 ; - var nowMs = Date.now(); // get current unix time (in ms.). + var nowMs = Date.now(); // Get current Unix time in milliseconds - // Archive if we have kept it for too long no need to set the expiration + // Archive if it has been kept for too long; you do not need to set an expiration if( nowMs >= (docTimeStampMs + keepDocForMs) ) { - // Delete the document form the source collection via the map alias + // Delete the document from the source collection through the map alias delete src_col[meta.id]; log(meta.id, "DELETE from src_col to dst_bkt alias as our expiration " + new Date(docTimeStampMs + keepDocForMs).toString()) + " is already past"; } else { var key = meta.id; - //set the meta.expiration=ttlMs + // Set the meta.expiration=ttlMs var ttlMs = docTimeStampMs + keepDocForMs; if (ttlMs !== 0) { @@ -98,7 +101,7 @@ function OnUpdate(doc, meta) { ttlMs+" or " + new Date(ttlMs).toString()); // Advanced Bucket Accessors use JavaScript Date objects var expiryDate = new Date(ttlMs); - // This is 4X to 5X faster than using N1QL(...) and no need to worry about recursion. + // This is 4X to 5X faster than using N1QL(...) and you do not need to worry about recursion var res = couchbase.replace(src_col,{"id":meta.id,"expiry_date":expiryDate},doc); if (!res.success) { log(meta.id,'Setting TTL to',expiryDate,'failed',res); @@ -109,13 +112,13 @@ function OnUpdate(doc, meta) { ---- -- -Input Data/Mutation:: +Input data:: + -- +Create a test set of 4 documents using the Query Editor to insert the data items. +You do not need an Index. -We want to create a test set of four (4) documents, use the Query Editor to insert the the data items (you do not need an index). - -Note, if the today is past 08-25-2021 (MM-DD-YYYY) just change the `trialStartDate` for the last two records to at least 90 days from now. +If today's date is past 08-25-2021 (MM-DD-YYYY), you can change the `trialStartDate` for the last two records to at least 90 days from today. [source,sqlpp] ---- @@ -154,7 +157,7 @@ Note, if the today is past 08-25-2021 (MM-DD-YYYY) just change the `trialStartDa ---- -- -Output Data/Mutation:: +Output data:: + -- [source,json] @@ -179,7 +182,7 @@ NEW/OUTPUT: KEY trial_customers::3 "type": "trial_customers" } -We end up with two (2) of the four documents (obviously you may need to adjust the {sqlpp} INSERT in a few months as all the document would be immediately deleted). +Returns 2 of the 4 documnents. * "trial_customers::0" was deleted * "trial_customers::1" was deleted @@ -188,4 +191,4 @@ We end up with two (2) of the four documents (obviously you may need to adjust t ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advanced-keepLastN.adoc b/modules/eventing/pages/eventing-handler-advanced-keepLastN.adoc index 4c95ccd0..b05bfac2 100644 --- a/modules/eventing/pages/eventing-handler-advanced-keepLastN.adoc +++ b/modules/eventing/pages/eventing-handler-advanced-keepLastN.adoc @@ -1,34 +1,35 @@ = Function: Advanced Keep the Last N User Items -:description: pass:q[Keep the last N user notifications seen related to a user ID (these could be any documents).] +:description: pass:q[Keep the last N user notifications related to a user ID.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedKeepLastN* demonstrates how to keep a user record with the last N activities. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation with a key starting with "nu:" of the form "nu:#:#". -* The key "nu:#:#" contains two numbers. The first # is an increasing notification number, the second # is the user ID. -* Anytime we insert a new record we want to remove the earliest notification record for the user so we only have at most N records for each user. -We assume that nid always increases across time as such we ignore duplicates. -* For our test we will keep just the three (3) most recent notifications per user ID. +The `advancedKeepLastN` function: + +* Demonstrates how to keep a user record with the last N activities +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where the KEY starts with `nu:` in the form `nu:#:#` +** The KEY `nu:#:#` has 2 numbers. The first is an increasing notification number and the second is the user ID. +* Only keeps N records for each user +* Removes the earliest notification record for a user whenever a new record is inserted for that user + +The following example assumes that N always increases across time and ignores any duplicates. +It keeps only the 3 most recent notifications for each user ID. [{tabs}] ==== advancedKeepLastN:: + -- -Two variants of this function are available - a 6.6 version that implements userspace CAS and a 6.6.1+/7.0.0+ version (*this Function*) which uses true CAS. +There are two variants of this function available: a xref:eventing-handler-keepLastN.adoc[Couchbase Server version 6.6 that implements userspace CAS], and a Couchbase Server version 6.6.1+/7.0.0+ that uses true CAS. -* xref:eventing-handler-keepLastN.adoc[keepLastN (userspace CAS)] -* <> +The following example uses true CAS. -[#advancedKeepLastN] -advancedKeepLastN (true CAS) [source,javascript] ---- -// To run configure the settings for this Function, advancedKeepLastN, as follows: +// Configure the settings for the advancedKeepLastN function as follows: // // Version 7.1+ // "Function Scope" @@ -52,53 +53,52 @@ advancedKeepLastN (true CAS) // "bucket alias", "src_col", "source", "read and write" /* - * Process all mutations, however updateNotifyArrayInKV(...) will only - * data with KEYS like nu:#:# + * Process all mutations; updateNotifyArrayInKV(...) only processes data with KEYS like nu:#:# */ function OnUpdate(doc, meta) { - const MAX_ARRAY = 3; // alter to keep 'N' items - const DEBUG = false; // if true very verbose + const MAX_ARRAY = 3; // Keep 'N' items + const DEBUG = false; // If true, the debug log can be too long updateNotifyArrayInKV(doc, meta, MAX_ARRAY, DEBUG); } /* - * manipulates the in memory document to only keep 'MAX_ARRAY' items + * Manipulate the in-memory document to only keep 'MAX_ARRAY' items */ function addToNtfyArray(user_doc, user_id, insert_json, MAX_ARRAY, DEBUG) { var ntfy_id = insert_json.nid; if (user_doc.notifications && user_doc.notifications[0] && user_doc.notifications[0].nid >= ntfy_id && user_doc.notifications.length === MAX_ARRAY) { - // do nothing this is older data, we assume that nid always increases + // Do nothing; this is older data return null; } else { - // find insert position + // Find the insert position for (var i = 0; i <= user_doc.notifications.length + 1; i++) { if (i < user_doc.notifications.length && user_doc.notifications[i].nid === ntfy_id) { - // do nothing this is duplicate data we already have it, assume no updates to notifys + // Do nothing; this is duplicated data if (DEBUG) log('Ignore DUP ntfy_id', ntfy_id, 'user_id', user_id, 'insert_json', insert_json); return null; } if (i == user_doc.notifications.length || user_doc.notifications[i].nid > ntfy_id) { - // add to array middle or end + // Add to middle or end of array user_doc.notifications.splice(i, 0, insert_json); break; } } } while (user_doc.notifications.length > MAX_ARRAY) { - // ensure proper size + // Ensure proper size user_doc.notifications.shift(); } return user_doc; } /* - * creates, gets, and updates (via replace) the KV tracking array document + * Creates, gets, and updates (via replace) the KV tracking array document */ function updateNotifyArrayInKV(doc, meta, MAX_ARRAY, DEBUG) { - // will process ALL data like nu:#:# + // Process ALL data like nu:#:# var parts = meta.id.split(':'); if (!parts || parts.length != 3 || parts[0] != "nu") return; var ntfy_id = parseInt(parts[1]); @@ -109,7 +109,7 @@ function updateNotifyArrayInKV(doc, meta, MAX_ARRAY, DEBUG) { "nid": ntfy_id, doc }; - // In version 6.6.1 we can use CAS in Eventing to avoid race conditions + // In version 6.6.1, use CAS in Eventing to avoid race conditions var res = null; var req_id = "user_plus_ntfys:" + user_id; var req_meta = { @@ -127,11 +127,11 @@ function updateNotifyArrayInKV(doc, meta, MAX_ARRAY, DEBUG) { user_meta = res.meta; } else { if (!res.error.key_not_found) { - // do nothing this is a big error + // Do nothing; this is a big error log("FAILED to insert id: " + meta.id, doc, 'res', res) return; } - // here we just need to create our document or initialize it. + // Create the document and initialize it user_doc = { "type": "user_plus_ntfys", "id": user_id, @@ -139,25 +139,25 @@ function updateNotifyArrayInKV(doc, meta, MAX_ARRAY, DEBUG) { }; res = couchbase.insert(src_col, req_meta, user_doc); if (DEBUG) log('couchbase.insert(src_col,', req_meta, user_doc, ') success==' + res.success, res); - // redo loop just force a couchbase.get (try again or read our insert) + // Redo the loop to force couchbase.get res = null; } if (res !== null) { - // here we had a successful couchbase.get(...) so both 'user_doc' and 'user_meta' - // must be valid so we manipulate our copy of the user_doc to keep only MAX_ARRAY + // Successful couchbase.get(...) for both user_doc and user_meta + // Manipulate the copy of the user_doc to keep only MAX_ARRAY var new_doc = addToNtfyArray(user_doc, user_id, insert_json, MAX_ARRAY, DEBUG); if (new_doc == null) { - // duplicate data we already have it, just ignore/skip + // Ignore or skip duplicated data break; } - // now try to replace the user_doc with new_doc but pass CAS to test for race conditions + // Try to replace the user_doc with new_doc; pass CAS to test for race conditions res = couchbase.replace(src_col, user_meta, new_doc); if (DEBUG) log('couchbase.replace(src_col,', user_meta, new_doc, ') success==' + res.success, res); if (res.success) { - // CAS matched so we are done. + // CAS matches and operation is successful break; } else { - // redo loop try again + // Redo loop and try again res = null; } } @@ -166,11 +166,12 @@ function updateNotifyArrayInKV(doc, meta, MAX_ARRAY, DEBUG) { ---- -- -Input Data/Mutation:: +Input data:: + -- -We want to create a test doc set +Create a new test document set using the Query Editor to insert the data items. +You do not need an Index. [cols="1,3",width=50%,frame=all] |=== @@ -189,9 +190,6 @@ We want to create a test doc set |=== - -Use the Query Editor to insert the above data items (you do not need an Index) - [source,sqlpp] ---- UPSERT INTO `bulk`.`data`.`source` (KEY,VALUE) @@ -208,7 +206,7 @@ Use the Query Editor to insert the above data items (you do not need an Index) ---- -- -Output Data/Mutation:: +Output data:: + -- [source,json] @@ -261,4 +259,4 @@ NEW/OUTPUT: KEY user_plus_ntfys:2 } ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedDecrementOp.adoc b/modules/eventing/pages/eventing-handler-advancedDecrementOp.adoc index 7ee9ebde..6fb8e1aa 100644 --- a/modules/eventing/pages/eventing-handler-advancedDecrementOp.adoc +++ b/modules/eventing/pages/eventing-handler-advancedDecrementOp.adoc @@ -1,15 +1,19 @@ -= Function: Advanced DECREMENT operation -:description: pass:q[Perform the Advanced DECREMENT operation where Eventing interacts with the Data service.] += Function: Advanced DECREMENT Operation +:description: pass:q[Perform the Advanced DECREMENT operation where Eventing interacts with the Data Service.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedDecrementOp* merely demonstrates the Advanced DECREMENT operation. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation and count down (decrement) the mutations subject to DCP dedup. -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-decrement-op[Advanced DECREMENT operation] in the detailed documentation. +The `advancedDecrementOp` function: + +* Performs the Advanced DECREMENT operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation +* Counts down the mutations subject to DCP deduplication + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-decrement-op[Advanced DECREMENT Operation]. [{tabs}] ==== @@ -18,7 +22,7 @@ advancedDecrementOp:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedDecrementOp, as follows: +// Configure the settings for the advancedDecrementOp function as follows: // // Version 7.1+ // "Function Scope" @@ -46,7 +50,7 @@ function OnUpdate(doc, meta) { // log('input meta', meta); // log('input doc ', doc); - // if doc.count doesn't exist it will be created + // Creates doc.count if it does not already exist var ctr_meta = {"id": "my_atomic_counter:1" }; var result = couchbase.decrement(src_col,ctr_meta); if (result.success) { @@ -57,7 +61,7 @@ function OnUpdate(doc, meta) { } ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -76,9 +80,9 @@ UPSERT INTO `bulk`.`data`.`source` (KEY,VALUE) ---- -- + -Output Data:: +Output data:: + -We insert ten (10) documents and count the mutations via decrementing +Insert 10 documents and count the mutations through decrementing. + -- [source,json] @@ -91,7 +95,7 @@ KEY: my_atomic_counter:1 ---- -- -Output Log:: +Output log:: + -- [source,json] @@ -127,4 +131,4 @@ Output Log:: {"doc":{"count":-10},"meta":{"id":"my_atomic_counter:1","cas":"1610137859948412928"},"success":true} ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedDeleteOp.adoc b/modules/eventing/pages/eventing-handler-advancedDeleteOp.adoc index 7970b4e7..3945f9ef 100644 --- a/modules/eventing/pages/eventing-handler-advancedDeleteOp.adoc +++ b/modules/eventing/pages/eventing-handler-advancedDeleteOp.adoc @@ -1,17 +1,20 @@ -= Function: Advanced DELETE operation -:description: pass:q[Perform the Advanced DELETE operation where Eventing interacts with the Data service.] += Function: Advanced DELETE Operation +:description: pass:q[Perform the Advanced DELETE operation where Eventing interacts with the Data Service.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedDeleteOp* merely demonstrates the Advanced DELETE operation. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation where doc.type === "control_adv_delete". -* Always tries to insert the test document, any insert error will be ignored. -* There are 4 modes of operation: no_cas, bad_cas, no_key, and good_cas. -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-delete-op[Advanced DELETE operation] in the detailed documentation. +The `advancedDeleteOp` function: + +* Performs the Advanced DELETE operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where `doc.type === "control_adv_delete"` +* Always tries to insert the test document and ignores insert errors +* Has 4 modes of operation: `no_cas`, `bad_cas`, `good_cas`, and `no_key` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-delete-op[Advanced DELETE Operation]. [{tabs}] ==== @@ -20,7 +23,7 @@ advancedDeleteOp:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedDeleteOp, as follows: +// Configure the settings for the advancedDeleteOp function as follows: // // Version 7.1+ // "Function Scope" @@ -49,23 +52,23 @@ function OnUpdate(doc, meta) { log('input meta', meta); log('input doc ', doc); - // Setup, make sure we have our doc to "delete", ignore any errors + // Setup, make sure there is a doc to "delete", ignore any errors couchbase.insert(src_col,{"id":"test_adv_delete:" + doc.ins_id},{"a:": 1}); var new_meta; if (doc.mode && doc.mode === "no_cas") { - // Here we pass no CAS it will always succeed + // No CAS is passed - it always succeeds new_meta = {"id":"test_adv_delete:" + doc.ins_id}; } if (doc.mode && doc.mode === "bad_cas") { - // Here we pass a non-matching CAS it will always fail + // Pass a non-matching CAS - it always fails new_meta = {"id":"test_adv_delete:" + doc.ins_id, "cas":"1111111111111111111"}; } if (doc.mode && doc.mode === "good_cas") { - // Here we will pass the matching or current CAS it will succeed + // Pass the matching or current CAS - it succeeds var tmp_r = couchbase.get(src_col,{"id":"test_adv_delete:" + doc.ins_id}); if (tmp_r.success) { - // Here we use the current CAS just read via couchbase.get(...) + // Use the current CAS to read via couchbase.get(...) new_meta = {"id":"test_adv_delete:" + doc.ins_id, "cas": tmp_r.meta.cas}; } else { log('Cannot delete due to no such key',"test_adv_delete:" + doc.ins_id); @@ -86,7 +89,7 @@ function OnUpdate(doc, meta) { } ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -137,9 +140,11 @@ INPUT: KEY control_adv_delete::4 ---- -- + -Output Data/Log:: +Output data:: + -We do four (4) deletion attempts the second fails due to a CAS missmatch and the third fails due to no such key. +Perform 4 deletion attempts. +The second attempt fails because of a CAS mismatch. +The third attempt fails because the document key does not exist. + -- [source,json] @@ -257,4 +262,4 @@ Logs from Mutation #4 } ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedGetOp.adoc b/modules/eventing/pages/eventing-handler-advancedGetOp.adoc index 728cf6e3..f890d1c7 100644 --- a/modules/eventing/pages/eventing-handler-advancedGetOp.adoc +++ b/modules/eventing/pages/eventing-handler-advancedGetOp.adoc @@ -1,15 +1,18 @@ -= Function: Advanced GET operation -:description: pass:q[Perform the Advanced GET operation where Eventing interacts with the Data service.] += Function: Advanced GET Operation +:description: pass:q[Perform the Advanced GET operation where Eventing interacts with the Data Service.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedGetOp* merely demonstrates the Advanced GET operation. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation where doc.type === "test_adv_get". -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-get-op[Advanced GET operation] in the detailed documentation. +The `advancedGetOp` function: + +* Performs the Advanced GET operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where `doc.type === "test_adv_get"` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-get-op[Advanced GET Operation]. [{tabs}] ==== @@ -18,7 +21,7 @@ advancedGetOp:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedGetOp, as follows: +// Configure the settings for the advancedGetOp function as follows: // // Version 7.1+ // "Function Scope" @@ -60,7 +63,7 @@ function OnUpdate(doc, meta) { ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -75,7 +78,7 @@ INPUT: KEY test_adv_get::1 ---- -- -Output Data/Logged:: +Output data:: + -- [source,json] @@ -120,4 +123,4 @@ Output Data/Logged:: ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedGetOpWithCache.adoc b/modules/eventing/pages/eventing-handler-advancedGetOpWithCache.adoc index 9f763894..80783c6d 100644 --- a/modules/eventing/pages/eventing-handler-advancedGetOpWithCache.adoc +++ b/modules/eventing/pages/eventing-handler-advancedGetOpWithCache.adoc @@ -1,18 +1,19 @@ -= Function: Advanced GET operation (with cache) -:description: pass:q[Perform the Advanced GET operation where Eventing interacts with the Data service.] += Function: Advanced GET Operation with Cache +:description: pass:q[Perform the Advanced GET operation with cache where Eventing interacts with the Data Service.] :page-edition: Enterprise Edition :tabs: -[.status]#Couchbase Server 7.0.2# -*Goal*: {description} +{description} -* This function *advancedGetOpWithCache* merely demonstrates the Advanced GET operation with Bucket Backed Cache enabled. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation where doc.type === "test_adv_get". -* The optional third parameter to couchbase.get of *{"cache": true}* enables caching of documents for up to 1 second. -* This RYOW caching is 18X-25X faster than reading near static data directly from the Data Service (or KV). -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-get-op[Advanced GET operation] in the detailed documentation. +The `advancedGetOpWithCache` function: + +* Performs the Advanced GET operation with an enabled bucket-backed cache +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where `doc.type === "test_adv_get"` +* Has an optional parameter to `couchbase.get` called `{ "cache": true }`, which enables caching of documents for up to 1 second + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#optional-cache-true-parameter[Optional `{ "cache": true }` Parameter]. [{tabs}] ==== @@ -21,7 +22,7 @@ advancedGetOpWithCache:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedGetOpWithCache, as follows: +// Configure the settings for the advancedGetOpWithCache function as follows: // // Version 7.1+ // "Function Scope" @@ -54,7 +55,7 @@ function OnUpdate(doc, meta) { ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -69,7 +70,7 @@ INPUT: KEY test_adv_get::1 ---- -- -Output Data/Logged:: +Output data:: + -- [source,json] @@ -111,4 +112,4 @@ Output Data/Logged:: ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedIncrementOp.adoc b/modules/eventing/pages/eventing-handler-advancedIncrementOp.adoc index f1bfe773..c83cb1ca 100644 --- a/modules/eventing/pages/eventing-handler-advancedIncrementOp.adoc +++ b/modules/eventing/pages/eventing-handler-advancedIncrementOp.adoc @@ -1,15 +1,19 @@ -= Function: Advanced INCREMENT operation -:description: pass:q[Perform the Advanced INCREMENT operation where Eventing interacts with the Data service.] += Function: Advanced INCREMENT Operation +:description: pass:q[Perform the Advanced INCREMENT operation where Eventing interacts with the Data Service.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedIncrementOp* merely demonstrates the Advanced INCREMENT operation. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation and count the mutations subject to DCP dedup. -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-increment-op[Advanced INCREMENT operation] in the detailed documentation. +The `advancedIncrementOp` function: + +* Performs the Advanced INCREMENT operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation +* Counts the mutations subject to DCP deduplication + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-increment-op[Advanced INCREMENT Operation]. [{tabs}] ==== @@ -18,7 +22,7 @@ advancedIncrementOp:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedIncrementOp, as follows: +// Configure the settings for the advancedIncrementOp function as follows: // // Version 7.1+ // "Function Scope" @@ -46,7 +50,7 @@ function OnUpdate(doc, meta) { // log('input meta', meta); // log('input doc ', doc); - // if doc.count doesn't exist it will be created + // Creates doc.count if it does not already exist var ctr_meta = {"id": "my_atomic_counter:1" }; var result = couchbase.increment(src_col,ctr_meta); if (result.success) { @@ -57,7 +61,7 @@ function OnUpdate(doc, meta) { } ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -76,9 +80,9 @@ UPSERT INTO `bulk`.`data`.`source` (KEY,VALUE) ---- -- -Output Data:: +Output data:: + -We insert ten (10) documents and count the mutations +Insert 10 documents and count the mutations through incrementing. + -- [source,json] @@ -91,7 +95,7 @@ KEY: my_atomic_counter:1 ---- -- -Output Log:: +Output log:: + -- [source,json] @@ -127,4 +131,4 @@ Output Log:: {"doc":{"count":10},"meta":{"id":"my_atomic_counter:1","cas":"1610137859948412928"},"success":true} ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedInsertOp.adoc b/modules/eventing/pages/eventing-handler-advancedInsertOp.adoc index e022382c..d0cac006 100644 --- a/modules/eventing/pages/eventing-handler-advancedInsertOp.adoc +++ b/modules/eventing/pages/eventing-handler-advancedInsertOp.adoc @@ -1,15 +1,18 @@ -= Function: Advanced INSERT operation -:description: pass:q[Perform the Advanced INSERT operation where Eventing interacts with the Data service.]] += Function: Advanced INSERT Operation +:description: pass:q[Perform the Advanced INSERT operation where Eventing interacts with the Data Service.]] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedInsertOp* merely demonstrates the Advanced INSERT operation. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation where doc.type === "control_adv_insert". -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-insert-op[Advanced INSERT operation] in the detailed documentation. +The `advancedInsertOp` function: + +* Performs the Advanced INSERT operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where `doc.type === "control_adv_insert"` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-insert-op[Advanced INSERT Operation]. [{tabs}] ==== @@ -18,7 +21,7 @@ advancedInsertOp:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedInsertOp, as follows: +// Configure the settings for the advancedInsertOp function as follows: // // Version 7.1+ // "Function Scope" @@ -45,7 +48,7 @@ function OnUpdate(doc, meta) { if (!meta.id.startsWith("control_adv_insert")) return; log('input meta', meta); log('input doc ', doc); - // two modes typical insert or setting a expiration/TTL + // two modes: typical insert and setting an expiration/TTL var new_meta = {"id":"test_adv_insert:"+doc.ins_id}; if (doc.set_expiry && doc.set_expiry === true) { new_meta = {"id":"test_adv_insert:"+doc.ins_id, expiry_date: new Date(Date.now() + 60 * 1000)}; @@ -60,7 +63,7 @@ function OnUpdate(doc, meta) { } ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -100,9 +103,12 @@ INPUT: KEY control_adv_insert::3 ---- -- + -Output Data:: +Output data:: + -We try to insert three (3) documents the first two (2) inserts succeed but the second, test_adv_insert:2, will expire in 60 seconds because we set an expiration. The third insert attempt will fail since test_adv_insert:1 already exists. +The output data inserts 3 documents. +The first 2 insertions are successful. +The `test_adv_insert: 2` has an expiration of 60 seconds. +The third insertion attempt fails because `test_adv_insert: 1` already exists. + -- [source,json] @@ -125,7 +131,7 @@ KEY: test_adv_insert:2 ---- -- + -Output Log:: +Output log:: + -- [source,json] @@ -215,4 +221,4 @@ Logs from Mutation #3 } ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedMutateInArray.adoc b/modules/eventing/pages/eventing-handler-advancedMutateInArray.adoc new file mode 100644 index 00000000..17c847be --- /dev/null +++ b/modules/eventing/pages/eventing-handler-advancedMutateInArray.adoc @@ -0,0 +1,84 @@ += Function: Advanced Sub-Document MUTATEIN Array Operation +:description: pass:q[Perform the Advanced Sub-Document MUTATEIN operation on an array where Eventing interacts with the Data Service.] +:page-edition: Enterprise Edition +:tabs: + +{description} + +The `advancedMutateInArray` function: + +* Demonstrates the CAS-free Sub-Document MUTATEIN operation on a document array field +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where the `meta.id` or KEY is `combine_landmark_names` + +For example, you can generate an input document with the KEY `combine_landmark_names` and the DATA `{ "id": "combine_landmark_names", "landmark_names": [] }`, then set the number of workers in the Eventing Function's setting to 18. +Running the Function adds 4,495 landmark names to an array without conflict and in no particular order. + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#sub-document-mutatein-operation[Sub-Document MUTATEIN Operation]. + +[{tabs}] +==== +advancedMutateInArray:: ++ +-- +[source,javascript] +---- +// Configure the settings for the advancedMutateInArray function as follows: +// +// Version 7.6+ +// "Function Scope" +// *.* (or try bulk.data if non-privileged) +// "Listen to Location" +// travel-sample.inventory.landmark +// "Eventing Storage" +// rr100.eventing.metadata +// Binding(s) +// 1. "binding type", "alias name...", "bucket.scope.collection", "Access" +// "bucket alias", "dst_col", "bulk.data.source", "read and write" + +function OnUpdate(doc, meta) { + var accum_meta = {"id": "combine_landmark_names" }; + couchbase.mutateIn(dst_col, accum_meta, [ + couchbase.MutateInSpec.arrayAppend("landmark_names", doc.name), + ]); +} +---- +-- + +Input data before deployment:: ++ +-- +[source,json] +---- +INPUT: KEY combine_landmark_names + +{ + "id": "combine_landmark_names", + "landmark_names": [] +} +---- +-- + +Output data after deployment:: ++ +-- +[source,json] +---- +OUTPUT: KEY combine_landmark_names + +{ + "id": "combine_landmark_names", + "landmark_names": [ + "Gabriel's Wharf", + "Blue Bear Performance Hall", + "Circle Bar", + *** 4490 lines removed *** + "Quarry Bank Mill & Styal Estate", + "Mad Cat Brewery", + "Casbah Café" + ] +} +---- +-- +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedMutateInField.adoc b/modules/eventing/pages/eventing-handler-advancedMutateInField.adoc new file mode 100644 index 00000000..bd98eea2 --- /dev/null +++ b/modules/eventing/pages/eventing-handler-advancedMutateInField.adoc @@ -0,0 +1,91 @@ += Function: Advanced Sub-Document MUTATEIN Operation +:description: pass:q[Perform the Advanced Sub-Document MUTATEIN operation on a field where Eventing interacts with the Data Service.] +:page-edition: Enterprise Edition +:tabs: + +{description} + +The `advancedMutateInField` function: + +* Demonstrates the CAS-free Sub-Document MUTATEIN operation on a document field +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where the `meta.id` or KEY starts with `mutateinfield:` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#sub-document-mutatein-operation[Sub-Document MUTATEIN Operation]. + +[{tabs}] +==== +advancedMutateInField:: ++ +-- +[source,javascript] +---- +// Configure the settings for the advancedMutateInField function as follows: +// +// Version 7.6+ +// "Function Scope" +// *.* (or try bulk.data if non-privileged) +// "Listen to Location" +// bulk.data.source +// "Eventing Storage" +// rr100.eventing.metadata +// Binding(s) +// 1. "binding type", "alias name...", "bucket.scope.collection", "Access" +// "bucket alias", "src_col", "bulk.data.source", "read and write" + +function OnUpdate(doc, meta) { + if (meta.id.startsWith("mutateinfield:") === false) return; + + var meta = { "id": meta.id }; + var res; + var opcnt = 1; + + res = + couchbase.mutateIn(src_col, meta, [ + couchbase.MutateInSpec.insert("testField", "insert") + ]); + log(opcnt++,res); + + res = + couchbase.mutateIn(src_col, meta, [ + couchbase.MutateInSpec.replace("testField", "replace") + ]); + log(opcnt++,res); + + res = + couchbase.mutateIn(src_col, meta, [ + couchbase.MutateInSpec.remove("testField") + ]); + log(opcnt++,res); +} +---- +-- + +Input data:: ++ +-- +[source,json] +---- +INPUT: KEY mutateinfield:001 + +{ + "id": "mutateinfield:001", +} + +---- +-- + +Output data:: ++ +-- +[source,json] +---- +2024-03-15T14:42:53.314-07:00 [INFO] 1 {"meta":{"id":"mutateinfield:001","cas":"1710538973313433600"},"success":true} + +2024-03-15T14:42:53.316-07:00 [INFO] 2 {"meta":{"id":"mutateinfield:001","cas":"1710538973315596288"},"success":true} + +2024-03-15T14:42:53.317-07:00 [INFO] 3 {"meta":{"id":"mutateinfield:001","cas":"1710538973316841472"},"success":true} +---- +-- +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedReplaceOp.adoc b/modules/eventing/pages/eventing-handler-advancedReplaceOp.adoc index f425eae9..8cdb54e9 100644 --- a/modules/eventing/pages/eventing-handler-advancedReplaceOp.adoc +++ b/modules/eventing/pages/eventing-handler-advancedReplaceOp.adoc @@ -1,17 +1,20 @@ -= Function: Advanced REPLACE operation -:description: pass:q[Perform the Advanced REPLACE operation where Eventing interacts with the Data service.] += Function: Advanced REPLACE Operation +:description: pass:q[Perform the Advanced REPLACE operation where Eventing interacts with the Data Service.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedReplaceOp* merely demonstrates the Advanced REPLACE operation. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation where doc.type === "control_adv_replace". -* Always tries to insert the test document, any insert error will be ignored. -* There are 3 modes of operation: no_cas, bad_cas, and good_cas. -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-replace-op[Advanced REPLACE operation] in the detailed documentation. +The `advancedReplaceOp` function: + +* Performs the Advanced REPLACE operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where `doc.type === "control_adv_replace"` +* Always tries to insert the test document and ignores insert errors +* Has 3 modes of operation: `no_cas`, `bad_cas`, and `good_cas` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-replace-op[Advanced REPLACE Operation]. [{tabs}] ==== @@ -20,7 +23,7 @@ advancedReplaceOp:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedReplaceOp, as follows: +// Configure the settings for the advancedReplaceOp function as follows: // // Version 7.1+ // "Function Scope" @@ -49,25 +52,25 @@ function OnUpdate(doc, meta) { log('input meta', meta); log('input doc ', doc); - // Setup, make sure we have our doc to "replace", ignore any errors + // Setup, make sure there is a doc to "replace", ignore any errors couchbase.insert(src_col,{"id":"test_adv_replace:" + doc.ins_id},{"a:": 1}); var new_meta; if (doc.mode && doc.mode === "no_cas") { - // Here we pass no CAS it will always succeed + // No CAS is passed - it always succeeds new_meta = {"id":"test_adv_replace:" + doc.ins_id}; - // optional set an expiry 60 seconds in the future + // (Optional) Set an expiry 60 seconds in the future // new_meta.expiry_date = new Date(Date.now() + 60 * 1000); } if (doc.mode && doc.mode === "bad_cas") { - // Here we pass a non-matching CAS it will always fail + // Pass a non-matching CAS - it always fails new_meta = {"id":"test_adv_replace:" + doc.ins_id, "cas":"1111111111111111111"}; } if (doc.mode && doc.mode === "good_cas") { - // Here we will pass the matching or current CAS it will succeed + // Pass the matching or current CAS - it succeeds var tmp_r = couchbase.get(src_col,{"id":"test_adv_replace:" + doc.ins_id}); if (tmp_r.success) { - // Here we use the current CAS just read via couchbase.get(...) + // Use the current CAS to read via couchbase.get(...) new_meta = {"id":"test_adv_replace:" + doc.ins_id, "cas": tmp_r.meta.cas}; } else { log('Cannot replace due to no such key',"test_adv_replace:" + doc.ins_id); @@ -84,7 +87,7 @@ function OnUpdate(doc, meta) { } ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -124,9 +127,10 @@ INPUT: KEY control_adv_replace::3 ---- -- + -Output Data:: +Output data:: + -We try to replace three (3) documents the first and last replacements succeed but the second, fails because of a CAS missmatch. +Replace 3 documents. +The first and last replacements are successful; the second replacement fails because of a CAS mismatch. + -- [source,json] @@ -141,7 +145,7 @@ KEY: test_adv_replace:10 ---- -- + -Output Log:: +Output log:: + -- [source,json] @@ -230,4 +234,4 @@ Logs from Mutation #3 } ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedSelfRecursion.adoc b/modules/eventing/pages/eventing-handler-advancedSelfRecursion.adoc new file mode 100644 index 00000000..613427e5 --- /dev/null +++ b/modules/eventing/pages/eventing-handler-advancedSelfRecursion.adoc @@ -0,0 +1,199 @@ += Function: Advanced Self-Recursion Parameter +:description: pass:q[Perform the Advanced Self-Recursion parameter where Eventing interacts with the Data Service.] +:page-edition: Enterprise Edition +:tabs: + +{description} + +The `advancedSelfRecursion` function: + +* Performs the Advanced Self-Recursion parameter +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where the `meta.id` or KEY starts with `doquery:` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#optional-params-recursion[Optional { "self_recursion": true }` Parameter]. + +The following example shows you how to stop and restart a long-running process like a N1QL query. +It counts the number of hotels that start with a particular letter. + +[{tabs}] +==== +advancedSelfRecursion:: ++ +-- +[source,javascript] +---- +// Configure the settings for the advancedSelfRecursion function as follows: +// +// Version 7.6+ +// "Function Scope" +// *.* (or try bulk.data if non-privileged) +// "Listen to Location" +// bulk.data.source +// "Eventing Storage" +// rr100.eventing.metadata +// Binding(s) +// 1. "binding type", "alias name...", "bucket.scope.collection", "Access" +// "bucket alias", "src_col", "bulk.data.source", "read and write" +// +// You must have the sample dataset travel-sample installed + +function OnUpdate(doc, meta) { + if ( meta.id.startsWith("doquery:") === false ) return; + if (doc.done && doc.done === true) return; + + if (! doc.continue) { + doc.queryBeg = new Date(); + doc.queryCnt = 0; + doc.currentQueryOffset = 0; + doc.namesProcessed = 0; + doc.letterHash = {}; + log(meta.id,'Query initialized at ' + doc.queryBeg); + } + + var offset = doc.currentQueryOffset; + var results = + SELECT name + FROM `travel-sample`.`inventory`.`hotel` + LIMIT 100 + OFFSET $offset; + + doc.queryCnt++; + doc.currentQueryOffset = doc.currentQueryOffset + 100; + + var loopCnt = 0; + for (var item of results) { + loopCnt++; + doc.namesProcessed++; + var name = item.name; + if (name && name.length > 0) { + // Extract the first character and convert it to lowercase + var firstChar = name[0].toLowerCase(); + + // If the letter exists in the hash, increment its count. Otherwise initialize it to 1. + if (doc.letterHash[firstChar]) { + doc.letterHash[firstChar]++; + } else { + doc.letterHash[firstChar] = 1; + } + } + } + results.close(); + + if (loopCnt < 100) { + // we are done + if (doc.continue) delete doc.continue + doc.done = true; + doc.queryEnd = new Date(); + log(meta.id,'Query cnt complete mutations ' + doc.queryCnt + ' namesProcessed ' + doc.namesProcessed ); + log(meta.id,'Query completed at ' + doc.queryEnd); + log(meta.id,'Result hotels starting with "a" ' + doc.letterHash['a'] + ', hotels starting with "b" ' + doc.letterHash['b'] + ', ...'); + // no self recursion + src_col[meta.id] = doc; + } else { + // we are not done + doc.continue = true; + log(meta.id,'Query cnt in progress mutations ' + doc.queryCnt + ' namesProcessed ' + doc.namesProcessed ); + // using self recursion results in a continuation of the query + couchbase.upsert(src_col, meta, doc, { "self_recursion": true }); + } +} +---- +-- + +Input data:: ++ +-- +[source,json] +---- +INPUT: KEY doquery:001 + +{ + "id": "doquery:001" +} + +---- +-- + +Output data:: ++ +-- +[source,json] +---- +OUTPUT: KEY doquery:001 +{ + "id": "doquery:001", + "queryBeg": "2024-03-15T21:07:38.114Z", + "queryCnt": 10, + "currentQueryOffset": 1000, + "namesProcessed": 917, + "letterHash": { + "1": 1, + "5": 2, + "8": 1, + "m": 58, + "t": 127, + "l": 41, + "g": 25, + "w": 27, + "a": 33, + "b": 48, + "r": 35, + "h": 168, + "n": 19, + "o": 15, + "p": 41, + "s": 64, + "c": 84, + "i": 23, + "u": 8, + "k": 15, + "j": 7, + "'": 1, + "e": 16, + "d": 21, + "q": 4, + "f": 16, + "y": 5, + "v": 12 + }, + "done": true, + "queryEnd": "2024-03-15T21:07:38.425Z" +} +---- +-- + +Output log:: ++ +-- +[source,json] +---- +2024-03-15T14:07:38.116-07:00 [INFO] "doquery:001" "Query initialized at Fri Mar 15 2024 14:07:38 GMT-0700 (Pacific Daylight Time)" + +2024-03-15T14:07:38.159-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 1 namesProcessed 100" + +2024-03-15T14:07:38.175-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 2 namesProcessed 200" + +2024-03-15T14:07:38.191-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 3 namesProcessed 300" + +2024-03-15T14:07:38.204-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 4 namesProcessed 400" + +2024-03-15T14:07:38.217-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 5 namesProcessed 500" + +2024-03-15T14:07:38.351-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 6 namesProcessed 600" + +2024-03-15T14:07:38.376-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 7 namesProcessed 700" + +2024-03-15T14:07:38.396-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 8 namesProcessed 800" + +2024-03-15T14:07:38.413-07:00 [INFO] "doquery:001" "Query cnt in progress mutations 9 namesProcessed 900" + +2024-03-15T14:07:38.425-07:00 [INFO] "doquery:001" "Query cnt complete mutations 10 namesProcessed 917" + +2024-03-15T14:07:38.425-07:00 [INFO] "doquery:001" "Query completed at Fri Mar 15 2024 14:07:38 GMT-0700 (Pacific Daylight Time)" + +2024-03-15T14:07:38.425-07:00 [INFO] "doquery:001" "Result hotels starting with \"a\" 33, hotels starting with \"b\" 48, ..." +---- +-- +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedTouchOp.adoc b/modules/eventing/pages/eventing-handler-advancedTouchOp.adoc new file mode 100644 index 00000000..06f82849 --- /dev/null +++ b/modules/eventing/pages/eventing-handler-advancedTouchOp.adoc @@ -0,0 +1,119 @@ += Function: Advanced TOUCH Operation +:description: pass:q[Perform the Advanced TOUCH operation where Eventing interacts with the Data Service.] +:page-edition: Enterprise Edition +:tabs: + +{description} + +The `advancedTouchOp` function: + +* Performs the Advanced TOUCH operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where the `meta.id` or KEY starts with `ten_seconds:` +* Does not require that you send the document back to the Data Service to update the TTL + +For more information about the Advanced TOUCH operation, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-touch-op[Advanced Keyspace Accessors]. + +[{tabs}] +==== +advancedTouchOp:: ++ +-- +[source,javascript] +---- +// Configure the settings for the advancedTouchOp function as follows: +// +// Version 7.6+ +// "Function Scope" +// *.* (or try bulk.data if non-privileged) +// "Listen to Location" +// bulk.data.source +// "Eventing Storage" +// rr100.eventing.metadata +// Binding(s) +// 1. "binding type", "alias name...", "bucket.scope.collection", "Access" +// "bucket alias", "src_col", "bulk.data.source", "read and write" + +function OnUpdate(doc, meta) { + if (! meta.id.startsWith("ten_seconds:") ) return; + + log('input meta', meta); + log('input doc', doc); + + var expiry = new Date(); + expiry.setSeconds(expiry.getSeconds() + 10); + + var req = {"id": meta.id, "expiry_date": expiry}; + var result = couchbase.touch(src_col, req); + if (result.success) { + log('success adv. touch: result', result); + } else { + log('failure adv. touch: id', req.id, 'result', result); + } +} +---- +-- + +Input Data:: ++ +-- +[source,json] +---- +INPUT: KEY ten_seconds:001 + +{ + "id": "ten_seconds:001", + "type": "Auto-deletes in 10 seconds. Keep refreshing to retrieve documents." +} + +---- +-- + +Output Data:: ++ +-- +[source,json] +---- + +2024-03-15T11:57:51.103-07:00 [INFO] "input doc" +{ + "id": "ten_seconds:001", + "type": "Auto-deletes in 10 seconds. Keep refreshing to retrieve documents." +} + +2024-03-15T11:57:51.103-07:00 [INFO] "input meta" +{ + "cas": "1710529071079817216", + "id": "ten_seconds:001", + "expiration": 0, + "flags": 33554438, + "vb": 679, + "seq": 102, + "datatype": "json", + "keyspace": + { + "bucket_name": "travel-sample", + "scope_name": "tenant_agent_00", + "collection_name": "bookings" + }, + "cid": 18 +} + +2024-03-15T11:57:51.108-07:00 [INFO] "success adv. touch: result" +{ + "meta": + { + "id": "ten_seconds:001", + "cas": "1710529071107276800" + }, + "success": true +} + +2024-03-15T11:58:03.302-07:00 [INFO] "Doc deleted/expired" "ten_seconds:001" +{ + "expired": true +} +---- +-- +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-advancedUpsertOp.adoc b/modules/eventing/pages/eventing-handler-advancedUpsertOp.adoc index 211e4a92..ba5d5420 100644 --- a/modules/eventing/pages/eventing-handler-advancedUpsertOp.adoc +++ b/modules/eventing/pages/eventing-handler-advancedUpsertOp.adoc @@ -1,15 +1,18 @@ -= Function: Advanced UPSERT operation -:description: pass:q[Perform the Advanced UPSERT operation where Eventing interacts with the Data service.] += Function: Advanced UPSERT Operation +:description: pass:q[Perform the Advanced UPSERT operation where Eventing interacts with the Data Service.] :page-edition: Enterprise Edition :tabs: -*Goal*: {description} +{description} -* This function *advancedUpsertOp* merely demonstrates the Advanced UPSERT operation. -* Requires Eventing Storage (or metadata collection) and a "source" collection. -* Needs a Binding of type "bucket alias" (as documented in the Scriptlet). -* Will operate on any mutation where doc.type === "control_adv_insert". -* For more information refer to xref:eventing-advanced-keyspace-accessors.adoc#advanced-upsert-op[Advanced UPSERT operation] in the detailed documentation. +The `advancedUpsertOp` function: + +* Performs the Advanced UPSERT operation +* Requires Eventing Storage (or a metadata collection) and a source collection +* Requires a binding of type `bucket alias` +* Operates on any mutation where `doc.type === "control_adv_insert"` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#advanced-upsert-op[Advanced UPSERT Operation]. [{tabs}] ==== @@ -18,7 +21,7 @@ advancedUpsertOp:: -- [source,javascript] ---- -// To run configure the settings for this Function, advancedGetOp, as follows: +// Configure the settings for the advancedUpsertOp function as follows: // // Version 7.1+ // "Function Scope" @@ -45,8 +48,8 @@ function OnUpdate(doc, meta) { if (!meta.id.startsWith("control_adv_upsert")) return; log('input meta', meta); log('input doc ', doc); - // two modes typical upsert or setting a expiration/TTL - // note CAS if supplied will be ignored (use replace for this) + // two modes: typical upsert and setting an expiration/TTL + // note that CAS, if supplied, is ignored. You can use REPLACE instead. var new_meta = {"id":"test_adv_upsert:"+doc.ins_id}; if (doc.set_expiry && doc.set_expiry === true) { new_meta = {"id":"test_adv_upsert:"+doc.ins_id, expiry_date: new Date(Date.now() + 60 * 1000)}; @@ -61,7 +64,7 @@ function OnUpdate(doc, meta) { } ---- -- -Input Data/Mutation:: +Input data:: + -- [source,json] @@ -101,10 +104,12 @@ INPUT: KEY control_adv_upsert::3 ---- -- + -Output Data:: +Output data:: + -We try to upsert three (3) documents all three (2) upserts succeed but the second, test_adv_upsert:2, will expire in 60 seconds because we set an expiration. -Note, third upsert attempt will overwrite test_adv_upsert:1 thus we are left with two documents. +The output data upserts 3 documents. +The first 2 upsertions are successful. +The `test_adv_insert: 2` has an expiration of 60 seconds. +The third upsertion attempt fails because `test_adv_insert: 1` already exists. + -- [source,json] @@ -127,7 +132,7 @@ KEY: test_adv_upsert:2 ---- -- + -Output Log:: +Output log:: + -- [source,json] @@ -215,4 +220,4 @@ Logs from Mutation #3 } ---- -- -==== +==== \ No newline at end of file diff --git a/modules/eventing/pages/eventing-handler-multiCollectionEventing.adoc b/modules/eventing/pages/eventing-handler-multiCollectionEventing.adoc index 4be311e8..1b3c017c 100644 --- a/modules/eventing/pages/eventing-handler-multiCollectionEventing.adoc +++ b/modules/eventing/pages/eventing-handler-multiCollectionEventing.adoc @@ -1,19 +1,22 @@ = Function: Multi Collection Eventing -:description: pass:q[Show how to access the Data Service when Eventing is listening to multiple collections.] +:description: pass:q[Access the Data Service when Eventing is listening to multiple collections.] :page-edition: Enterprise Edition +{description} -*Goal*: {description} +The `multiCollectionEventing` function: -* This function *multiCollectionEventing* demonstrates how to access the Data Service when using wildcard bindings. -* Requires four (4) keyspaces in two buckets "rr100" and "source" -** rr100.eventing.metadata -** source._default._default -** source.myscope.mycol1 -** source.myscope.mycol2 -* Needs two Bindings of type "bucket alias" (as documented in the Scriptlet). -* Will operate on three test documents. Add them one at a time after the function is deployed. -* Highlights the use of _meta.keyspace_ +* Demonstrates how to access the Data Service when using `{asterisk}` wildcard bindings +* Requires 2 bindings of type `bucket alias` +* Requires the following 4 keyspaces in two buckets, `rr100` and source: +** `rr100.eventing.metadata` +** `source._default._default` +** `source.myscope.mycol1` +** `source.myscope.mycol2` +* Operates on 3 test documents +* Highlights the use of `meta.keyspace` + +For more information about the Advanced Self-Recursion Parameter, see xref:eventing-advanced-keyspace-accessors.adoc#multiple-collection-functions[Eventing Functions that Listen to Multiple Collections]. [tabs] ==== @@ -22,9 +25,9 @@ multiCollectionEventing:: -- [source, javascript] ---- -// To run configure the settings for this Function, multiCollectionEventing, as follows: +// Configure the settings for the multiCollectionEventing function as follows: // -// Setup four (4) required keyspaces in two buckets "rr100" and "source" +// Set up four (4) required keyspaces in two buckets "rr100" and "source" // rr100.eventing.metadata // source._default._default // source.myscope.mycol1 @@ -58,15 +61,15 @@ function OnUpdate(doc, meta) { {"bucket_name": "source","scope_name": "myscope","collection_name": "mycol2"}}); log('>>>B fixed read',"res1", res1); - // protect against reading from something outside the alias + // Protect against reading from something outside the alias if (meta.keyspace.scope_name == "myscope") { - // TEST GET with keyspace form meta + // TEST GET with keyspace from meta var res2 = couchbase.get(alias_ro,meta); log('>>>C read using passed meta (must be myscope)',"res2", res2); if (meta.keyspace.collection_name == "mycol2") { // TEST UPSERT with hardcode keyspace - // add a field to the doc we read in res1 + // Add a field to the document read in res1 res1.doc.random1 = Math.random(); var res3 = couchbase.upsert(alias_rw,{"id": "doc2", "keyspace": {"bucket_name": "source","scope_name": "myscope","collection_name": "mycol2"}}, res1.doc) @@ -82,13 +85,13 @@ function OnUpdate(doc, meta) { var res5 = couchbase.get(alias_rw,meta); log('>>>F get (show added fields)',"res5", res5); - // TEST DELETE with hardcode keyspace (so we can TEST the insert) + // TEST DELETE with hardcode keyspace (so the insert can be tested) var res6 = couchbase.delete(alias_rw,{"id": "doc2", "keyspace": {"bucket_name": "source","scope_name": "myscope","collection_name": "mycol2"}}) log('>>>G delete',"res6", res6); // TEST INSERT with hardcode keyspace - // now remove the added items put thnigs back + // Remove the added items delete res1.doc.random1; delete res1.doc.random2; var res7 = couchbase.insert(alias_rw,{"id": "doc2", "keyspace": @@ -100,13 +103,13 @@ function OnUpdate(doc, meta) { ---- -- -Input Data/Mutation:: +Input data:: + -- +Create a test document set of 3 documents using the Query Editor to insert the data items. +You do not need an Index. -We want to create a small test doc set of three documents - -Use the Query Editor to insert the above data items (you do not need an Index) add them one at a time and check the Eventing log each time. +Add one test document at a time. [source,sqlpp] ---- @@ -118,7 +121,7 @@ Use the Query Editor to insert the above data items (you do not need an Index) a ---- -- -Output Log (reverse order)/Mutation:: +Output log in reverse order:: + -- [source,log] @@ -158,4 +161,4 @@ Function Log - multiCollectionEventing ---- -- -==== +==== \ No newline at end of file