A newer version of this documentation is available.

View Latest

Function: Advanced Keep the Last N User Items

      +

      Goal: Keep the last N user notifications seen related to a user ID (these could be any documents).

      • This function advancedKeepLastN demonstrates how to keep a user record with the last N activities.

      • Requires Eventing Storage (or metadata collection) and a "source" collection.

      • Needs a Binding of type "bucket alias" (as documented in the Scriptlet).

      • Will operate on any mutation with a key starting with "nu:" of the form "nu:#:#".

      • The key "nu:#:#" contains two numbers. The first # is an increasing notification number, the second # is the user ID.

      • Anytime we insert a new record we want to remove the earliest notification record for the user so we only have at most N records for each user. We assume that nid always increases across time as such we ignore duplicates.

      • For our test we will keep just the three (3) most recent notifications per user ID.

      • advancedKeepLastN

      • Input Data/Mutation

      • Output Data/Mutation

      Two variants of this function are available - a 6.6 version that implements userspace CAS and a 6.6.1+/7.0.0+ version (this Function) which uses true CAS.

      advancedKeepLastN (true CAS)

      // To run configure the settings for this Function, advancedKeepLastN, as follows:
      //
      // Version 7.1+
      //   "Function Scope"
      //     *.* (or try bulk.data if non-privileged)
      // Version 7.0+
      //   "Listen to Location"
      //     bulk.data.source
      //   "Eventing Storage"
      //     rr100.eventing.metadata
      //   Binding(s)
      //    1. "binding type", "alias name...", "bucket.scope.collection", "Access"
      //       "bucket alias", "src_col",       "bulk.data.source",        "read and write"
      //
      // Version 6.6.1
      //   "Source Bucket"
      //     source
      //   "MetaData Bucket"
      //     metadata
      //   Binding(s)
      //    1. "binding type", "alias name...", "bucket", "Access"
      //       "bucket alias", "src_col",       "source", "read and write"
      
      /*
       * Process all mutations, however updateNotifyArrayInKV(...) will only
       * data with KEYS like nu:#:#
       */
      function OnUpdate(doc, meta) {
          const MAX_ARRAY = 3; // alter to keep 'N' items
          const DEBUG = false; // if true very verbose
      
          updateNotifyArrayInKV(doc, meta, MAX_ARRAY, DEBUG);
      }
      
      /*
       * manipulates the in memory document to only keep 'MAX_ARRAY' items
       */
      function addToNtfyArray(user_doc, user_id, insert_json, MAX_ARRAY, DEBUG) {
          var ntfy_id = insert_json.nid;
          if (user_doc.notifications && user_doc.notifications[0] &&
              user_doc.notifications[0].nid >= ntfy_id &&
              user_doc.notifications.length === MAX_ARRAY) {
              // do nothing this is older data, we assume that nid always increases
              return null;
          } else {
              // find insert position
              for (var i = 0; i <= user_doc.notifications.length + 1; i++) {
                  if (i < user_doc.notifications.length && user_doc.notifications[i].nid === ntfy_id) {
                      // do nothing this is duplicate data we already have it, assume no updates to notifys
                      if (DEBUG) log('Ignore DUP ntfy_id', ntfy_id, 'user_id', user_id, 'insert_json', insert_json);
                      return null;
                  }
                  if (i == user_doc.notifications.length || user_doc.notifications[i].nid > ntfy_id) {
                      // add to array middle or end
                      user_doc.notifications.splice(i, 0, insert_json);
                      break;
                  }
              }
          }
          while (user_doc.notifications.length > MAX_ARRAY) {
              // ensure proper size
              user_doc.notifications.shift();
          }
          return user_doc;
      }
      
      /*
       * creates, gets, and updates (via replace) the KV tracking array document
       */
      function updateNotifyArrayInKV(doc, meta, MAX_ARRAY, DEBUG) {
          // will process ALL data like nu:#:#
          var parts = meta.id.split(':');
          if (!parts || parts.length != 3 || parts[0] != "nu") return;
          var ntfy_id = parseInt(parts[1]);
          var user_id = parseInt(parts[2]);
          //log("Doc created/updated " +  meta.id + " ntfy_id " + ntfy_id  + " user_id " + user_id);
      
          var insert_json = {
              "nid": ntfy_id,
              doc
          };
          // In version 6.6.1 we can use CAS in Eventing to avoid race conditions
          var res = null;
          var req_id = "user_plus_ntfys:" + user_id;
          var req_meta = {
              id: req_id
          };
          var user_doc = null;
          var user_meta = null;
          while (res === null) {
              res = couchbase.get(src_col, req_meta);
              if (DEBUG) log('couchbase.get(src_col,', req_meta, ') success==' + res.success, res);
              if (res.success) {
                  user_doc = res.doc;
                  // EXAMPLE user_meta
                  // {"id":"user_plus_ntfys:2","cas":"1609965779014844416","data_type":"json"}
                  user_meta = res.meta;
              } else {
                  if (!res.error.key_not_found) {
                      // do nothing this is a big error
                      log("FAILED to insert id: " + meta.id, doc, 'res', res)
                      return;
                  }
                  // here we just need to create our document or initialize it.
                  user_doc = {
                      "type": "user_plus_ntfys",
                      "id": user_id,
                      "notifications": []
                  };
                  res = couchbase.insert(src_col, req_meta, user_doc);
                  if (DEBUG) log('couchbase.insert(src_col,', req_meta, user_doc, ') success==' + res.success, res);
                  // redo loop just force a couchbase.get (try again or read our insert)
                  res = null;
              }
              if (res !== null) {
                  // here we had a successful couchbase.get(...) so both 'user_doc' and 'user_meta'
                  // must be valid so we manipulate our copy of the user_doc to keep only MAX_ARRAY
                  var new_doc = addToNtfyArray(user_doc, user_id, insert_json, MAX_ARRAY, DEBUG);
                  if (new_doc == null) {
                      // duplicate data we already have it, just ignore/skip
                      break;
                  }
                  // now try to replace the user_doc with new_doc but pass CAS to test for race conditions
                  res = couchbase.replace(src_col, user_meta, new_doc);
                  if (DEBUG) log('couchbase.replace(src_col,', user_meta, new_doc, ') success==' + res.success, res);
                  if (res.success) {
                      // CAS matched so we are done.
                      break;
                  } else {
                      // redo loop try again
                      res = null;
                  }
              }
          }
      }

      We want to create a test doc set

      key data

      nu:1:1

      {"somekey":"someValue"}

      nu:2:2

      {"somekey":"someValue"}

      nu:3:1

      {"somekey":"someValue"}

      nu:4:1

      {"somekey":"someValue"}

      nu:5:1

      {"somekey":"someValue"}

      nu:6:2

      {"somekey":"someValue"}

      nu:7:2

      {"somekey":"someValue"}

      nu:8:1

      {"somekey":"someValue"}

      nu:9:2

      {"somekey":"someValue"}

      nu:10:2

      {"somekey":"someValue"}

      Use the Query Editor to insert the above data items (you do not need an Index)

        UPSERT INTO `bulk`.`data`.`source` (KEY,VALUE)
        VALUES ( "nu:1:1",  {"somekey":"someValue"} ),
        VALUES ( "nu:2:2",  {"somekey":"someValue"} ),
        VALUES ( "nu:3:1",  {"somekey":"someValue"} ),
        VALUES ( "nu:4:1",  {"somekey":"someValue"} ),
        VALUES ( "nu:5:1",  {"somekey":"someValue"} ),
        VALUES ( "nu:6:2",  {"somekey":"someValue"} ),
        VALUES ( "nu:7:2",  {"somekey":"someValue"} ),
        VALUES ( "nu:8:1",  {"somekey":"someValue"} ),
        VALUES ( "nu:9:2",  {"somekey":"someValue"} ),
        VALUES ( "nu:10:2", {"somekey":"someValue"} );
      NEW/OUTPUT: KEY user_plus_ntfys:1
      
      {
        "type": "user_plus_ntfys",
        "id": 1,
        "notifications": [{
          "nid": 4,
          "doc": {
            "somekey": "someValue"
          }
        }, {
          "nid": 5,
          "doc": {
            "somekey": "someValue"
          }
        }, {
          "nid": 8,
          "doc": {
            "somekey": "someValue"
          }
        }],
        "random": 0.9071605464143964
      }
      
      NEW/OUTPUT: KEY user_plus_ntfys:2
      
      {
        "type": "user_plus_ntfys",
        "id": 2,
        "notifications": [{
          "nid": 7,
          "doc": {
            "somekey": "someValue"
          }
        }, {
          "nid": 9,
          "doc": {
            "somekey": "someValue"
          }
        }, {
          "nid": 10,
          "doc": {
            "somekey": "someValue"
          }
        }]
      }