diff --git a/R/build.gradle b/R/build.gradle
index 96c00db6d86..83f26e670e7 100644
--- a/R/build.gradle
+++ b/R/build.gradle
@@ -42,6 +42,7 @@ def buildRClient = Docker.registerDockerTask(project, 'rClient') {
             include 'rdeephaven/R/**'
             include 'rdeephaven/src/*.cpp'
             include 'rdeephaven/src/Makevars'
+            include 'rdeephaven/vignettes/*.Rmd'
         }
     }
     dockerfile {
@@ -64,6 +65,7 @@ def buildRClient = Docker.registerDockerTask(project, 'rClient') {
         copyFile('rdeephaven/R/', "${prefix}/src/rdeephaven/R/")
         copyFile('rdeephaven/src/*.cpp', "${prefix}/src/rdeephaven/src/")
         copyFile('rdeephaven/src/Makevars', "${prefix}/src/rdeephaven/src/")
+        copyFile('rdeephaven/vignettes/*.Rmd', "${prefix}/src/rdeephaven/vignettes/")
         copyFile('r-build.sh', "${prefix}/bin/rdeephaven")
         runCommand("PREFIX=${prefix}; " +
                   '''set -eux ; \
@@ -155,5 +157,43 @@ def rClientDoc = Docker.registerDockerTask(project, 'rClientDoc') {
     containerOutPath = "${prefix}/src/rdeephaven/man"
 }
 
+def rClientSite = Docker.registerDockerTask(project, 'rClientSite') {
+    // Only tested on x86-64, and we only build dependencies for x86-64
+    platform = 'linux/amd64'
+    copyIn {
+        from(layout.projectDirectory) {
+            include 'r-site.sh'
+            include 'rdeephaven/man/**'
+        }
+    }
+    copyOut {
+        into layout.projectDirectory.dir('rdeephaven/docs')
+    }
+    dockerfile {
+        from('deephaven/r-client-doc:local-build')
+        // We need the contents of 'man' to build the docsite
+        copyFile('rdeephaven/man/**', "${prefix}/src/rdeephaven/man/")
+        runCommand("mkdir -p ${prefix}/src/rdeephaven/docs")
+        runCommand('''echo "status = tryCatch(" \
+                           "   {" \
+                           "      install.packages('pkgdown', repos='http://cran.us.r-project.org'); " \
+                           "      0" \
+                           "   }," \
+                           "  error=function(e) 1," \
+                           "  warning=function(w) 2" \
+                           ");" \
+                           "print(paste0('status=', status));" \
+                           "quit(save='no', status=status)" | \
+                        MAKE="make -j`getconf _NPROCESSORS_ONLN`" R --no-save --no-restore
+                   ''')
+        // Keep this after the package installs above;
+        // it is likely it changes more frequently.
+        copyFile('r-site.sh', "${prefix}/bin/rdeephaven")
+    }
+    parentContainers = [ project.tasks.getByName('rClientDoc') ]
+    entrypoint = ["${prefix}/bin/rdeephaven/r-site.sh"]
+    containerOutPath = "${prefix}/src/rdeephaven/docs"
+}
+
 deephavenDocker.shouldLogIfTaskFails testRClient
 tasks.check.dependsOn(testRClient)
diff --git a/R/r-build.sh b/R/r-build.sh
index 910e3bb43f5..273f67e37ad 100755
--- a/R/r-build.sh
+++ b/R/r-build.sh
@@ -18,18 +18,14 @@ fi
 trap 'rm -f src/*.o src/*.so' 1 2 15
 rm -f src/*.o src/*.so
 
-MAKE="make -j${NCPUS}" R --no-save --no-restore <<EOF
-status = tryCatch(
-  {
-     install.packages(".", repos=NULL, type="source")
-     0
-  },
-  error=function(e) 1,
-  warning=function(w) 2
-)
-print(paste0('status=', status))
-quit(save='no', status=status)
-EOF
+MAKE="make -j${NCPUS}"
+cd .. && \
+  rm -f rdeephaven_*.tar.gz && \
+  R CMD build rdeephaven && \
+  R CMD INSTALL --no-multiarch --with-keep.source rdeephaven_*.tar.gz && \
+  rm -f rdeephaven_*.tar.gz && \
+  cd rdeephaven ||
+  exit 1
 
 rm -f src/*.o src/*.so
 
diff --git a/R/r-site.sh b/R/r-site.sh
new file mode 100644
index 00000000000..26fef767c4e
--- /dev/null
+++ b/R/r-site.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+
+set -euo pipefail
+
+if [ -z "${DH_PREFIX}" ]; then
+    echo "$0: Environment variable DH_PREFIX is not set, aborting." 1>&2
+    exit 1
+fi
+
+source $DH_PREFIX/env.sh
+
+cd $DH_PREFIX/src/rdeephaven
+
+R --no-save --no-restore <<EOF
+library('pkgdown')
+status = tryCatch(
+  {
+     pkgdown::build_site()
+     0
+  },
+  error=function(e) 1
+)
+print(paste0('status=', status))
+quit(save='no', status=status)
+EOF
diff --git a/R/rdeephaven/.Rbuildignore b/R/rdeephaven/.Rbuildignore
new file mode 100644
index 00000000000..3cd2a1f8301
--- /dev/null
+++ b/R/rdeephaven/.Rbuildignore
@@ -0,0 +1,5 @@
+^_pkgdown\.yml$
+^docs$
+^pkgdown$
+^doc$
+^Meta$
diff --git a/R/rdeephaven/.gitignore b/R/rdeephaven/.gitignore
index ad4856e82ab..dd54a26f35b 100644
--- a/R/rdeephaven/.gitignore
+++ b/R/rdeephaven/.gitignore
@@ -5,3 +5,7 @@ lib/cpp-dependencies/src
 lib/cpp-dependencies/env.sh
 *.o
 *.so
+docs
+inst/doc
+/doc/
+/Meta/
diff --git a/R/rdeephaven/DESCRIPTION b/R/rdeephaven/DESCRIPTION
index b3bdc2cb3b4..bdb670b86da 100644
--- a/R/rdeephaven/DESCRIPTION
+++ b/R/rdeephaven/DESCRIPTION
@@ -16,6 +16,12 @@ License: Apache License (== 2.0)
 Depends: R (>= 3.5.3)
 Imports: Rcpp (>= 1.0.10), arrow (>= 12.0.0), R6 (>= 2.5.0), dplyr (>= 1.1.0), utils (>= 3.5.3)
 LinkingTo: Rcpp
-Suggests: testthat (>= 3.0.0), lubridate (>= 1.9.0), zoo (>= 1.8-0)
+Suggests: 
+    testthat (>= 3.0.0),
+    lubridate (>= 1.9.0),
+    zoo (>= 1.8-0),
+    knitr,
+    rmarkdown
 Config/testthat/edition: 3
 RoxygenNote: 7.2.3
+VignetteBuilder: knitr
diff --git a/R/rdeephaven/R/agg_ops_wrapper.R b/R/rdeephaven/R/agg_ops_wrapper.R
index 744fe57ac60..373e3292826 100644
--- a/R/rdeephaven/R/agg_ops_wrapper.R
+++ b/R/rdeephaven/R/agg_ops_wrapper.R
@@ -1,118 +1,11 @@
-# An AggOp represents an aggregation operator that can be passed to agg_by() or agg_all_by(). This is the return type
-# of all of the agg functions. It is a wrapper around an Rcpp_INTERNAL_AggOp, which itself is a wrapper around a
-# C++ AggregateWrapper, which is finally a wrapper around a C++ Aggregate. See rdeephaven/src/client.cpp for details.
-# Note that AggOps should not be instantiated directly by user code, but rather by provided agg functions.
-
-
-#' @name
-#' AggBy
-#' @title
-#' Aggregations in Deephaven
-#' @md
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#'
-#' @description
-#' Table aggregations are a quintessential feature of Deephaven. You can apply as many aggregations as
-#' needed to static tables _or_ streaming tables, and if the parent tables are streaming, the resulting aggregated
-#' tables will update alongside their parent tables. It is also very easy to perform _grouped_ aggregations, which
-#' allow you to aggregate tables on a per-group basis.
-#'
-#' @section
-#' Apply aggregations to a table:
-#' There are two methods for performing aggregations on a table, `agg_by()` and `agg_all_by()`. `agg_by()` allows you to
-#' perform many aggregations on specified columns, while `agg_all_by()` allows you to perform a single aggregation to
-#' every non-grouping column in the table. Both methods have an optional `by` parameter that is used to specify grouping columns.
-#' Here are some details on each method:
-#'
-#' - `TableHandle$agg_by(aggs, by)`: Creates a new table containing grouping columns and grouped data.
-#'   The resulting grouped data is defined by the aggregation(s) specified.
-#' - `TableHandle$agg_all_by(agg, by)`: Creates a new table containing grouping columns and grouped data.
-#'   The resulting grouped data is defined by the aggregation specified. This method applies the aggregation to all
-#'   non-grouping columns of the table, so it can only accept one aggregation at a time.
-#'
-#' The `agg_by()` and `agg_all_by()` methods themselves do not know anything about the columns on which you want to
-#' perform aggregations. Rather, the desired columns are passed to individual `agg` functions, enabling you to apply
-#' various kinds of aggregations to different columns or groups of columns as needed.
-#'
-#' @section
-#' `agg` functions:
-#' `agg` functions are used to perform aggregation calculations on grouped data by passing them to `agg_by()` or
-#' `agg_all_by()`. These functions are _generators_, meaning they return _functions_ that the Deephaven engine knows
-#' how to interpret. We call the functions that they return [`AggOp`][AggOp]s. These `AggOp`s are not R-level functions,
-#' but Deephaven-specific data types that perform all of the intensive calculations. Here is a list of all `agg` functions
-#' available in Deephaven:
-#'
-#' - [`agg_first()`][agg_first]
-#' - [`agg_last()`][agg_last]
-#' - [`agg_min()`][agg_min]
-#' - [`agg_max()`][agg_max]
-#' - [`agg_sum()`][agg_sum]
-#' - [`agg_abs_sum()`][agg_abs_sum]
-#' - [`agg_avg()`][agg_avg]
-#' - [`agg_w_avg()`][agg_w_avg]
-#' - [`agg_median()`][agg_median]
-#' - [`agg_var()`][agg_var]
-#' - [`agg_std()`][agg_std]
-#' - [`agg_percentile()`][agg_percentile]
-#' - [`agg_count()`][agg_count]
-#'
-#' For more details on each aggregation function, click on one of the methods above or see the reference documentation
-#' by running `?agg_first`, `?agg_last`, etc.
-#'
-#' @examples
-#' \dontrun{
-#' library(rdeephaven)
-#'
-#' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
-#'
-#' # create data frame, push to server, retrieve TableHandle
-#' df <- data.frame(
-#'   X = c("A", "B", "A", "C", "B", "A", "B", "B", "C"),
-#'   Y = c("M", "N", "O", "N", "P", "M", "O", "P", "M"),
-#'   Number1 = c(100, -44, 49, 11, -66, 50, 29, 18, -70),
-#'   Number2 = c(-55, 76, 20, 130, 230, -50, 73, 137, 214)
-#' )
-#' th <- client$import_table(df)
-#'
-#' # get first and last elements of each column
-#' th1 <- th$
-#'   agg_by(agg_first(c("XFirst = X", "YFirst = Y", "Number1First = Number1", "Number2First = Number2")),
-#'          agg_last(c("XLast = X", "YLast = Y", "Number1Last = Number1", "Number2Last = Number2")))
-#'
-#' # compute mean and standard deviation of Number1 and Number2, grouped by X
-#' th2 <- th$
-#'   agg_by(
-#'     c(agg_avg(c("Number1Avg = Number1", "Number2Avg = Number2")),
-#'       agg_std(c("Number1Std = Number1", "Number2Std = Number2"))),
-#'     by="X")
-#'
-#' # compute maximum of all non-grouping columns, grouped by X and Y
-#' th3 <- th$
-#'   agg_all_by(agg_max(), by=c("X", "Y"))
-#'
-#' # compute minimum and maximum of Number1 and Number2 respectively grouped by Y
-#' th4 <- th$
-#'   agg_by(
-#'     c(agg_min("Number1Min = Number1"),
-#'       agg_max("Number2Max = Number2")),
-#'     by="Y")
-#'
-#' client$close()
-#' }
-#'
-NULL
-
-
-#' Name AggOp
+#' @name AggOp
 #' @title Deephaven AggOps
 #' @md
 #' @description
-#' An `AggOp` is the return type of one of Deephaven's [`agg`][AggBy] functions. It is a function that performs the
+#' An `AggOp` is the return type of one of Deephaven's `agg` functions. It is a function that performs the
 #' computation specified by the `agg` function. These are intended to be passed directly to `agg_by()` or `agg_all_by()`,
-#' and should never be instantiated directly be user code.
+#' and should never be instantiated directly be user code. For more information, see the
+#' vignette on `agg` functions with `vignette("agg_by")`.
 #'
 #' If multiple tables have the same schema and the same aggregations need to be applied to each table, saving these
 #' objects directly in a variable may be useful to avoid having to re-create them each time:
@@ -123,7 +16,7 @@ NULL
 #' result1 <- th1$agg_by(aggregations, by="Group")
 #' result2 <- th2$agg_by(aggregations, by="Group")
 #' ```
-#' In this example, `aggregations` would be a vector of two `AggOp`s that can be reused in multiple calls to `agg_by()`.
+#' In this example, `aggregations` would be a vector of two AggOps that can be reused in multiple calls to `agg_by()`.
 #'
 #' @usage NULL
 #' @format NULL
@@ -166,17 +59,20 @@ AggOp <- R6Class("AggOp",
 #' function called an [`AggOp`][AggOp] intended to be used in a call to `agg_by()` or `agg_all_by()`. This detail is
 #' typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 #' as the output of an `agg` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `agg` functions by running
+#' `vignette("agg_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to aggregate. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to aggregate all non-grouping columns, which is only valid in the `agg_all_by()` operation.
-#' @return `AggOp` to be used in a call to `agg_by()` or `agg_all_by()`.
+#' @return [`AggOp`][AggOp] to be used in a call to `agg_by()` or `agg_all_by()`.
 #'
 #' @examples
 #' \dontrun{
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
@@ -193,11 +89,11 @@ AggOp <- R6Class("AggOp",
 #'
 #' # get first elements of Y, Number1, and Number2 grouped by X
 #' th2 <- th$
-#'   agg_by(agg_first(c("Y", "Number1", "Number2")), by="X")
+#'   agg_by(agg_first(c("Y", "Number1", "Number2")), by = "X")
 #'
 #' # get first elements of Number1 and Number2 grouped by X and Y
 #' th3 <- th
-#'   agg_by(agg_first(c("Number1", "Number2")), by=c("X", "Y"))
+#' agg_by(agg_first(c("Number1", "Number2")), by = c("X", "Y"))
 #'
 #' client$close()
 #' }
@@ -205,7 +101,7 @@ AggOp <- R6Class("AggOp",
 #' @export
 agg_first <- function(cols = character()) {
   verify_string("cols", cols, FALSE)
-  return(AggOp$new(INTERNAL_agg_first, "agg_first", cols=cols))
+  return(AggOp$new(INTERNAL_agg_first, "agg_first", cols = cols))
 }
 
 #' @name
@@ -227,17 +123,20 @@ agg_first <- function(cols = character()) {
 #' function called an [`AggOp`][AggOp] intended to be used in a call to `agg_by()` or `agg_all_by()`. This detail is
 #' typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 #' as the output of an `agg` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `agg` functions by running
+#' `vignette("agg_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to aggregate. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to aggregate all non-grouping columns, which is only valid in the `agg_all_by()` operation.
-#' @return `AggOp` to be used in a call to `agg_by()` or `agg_all_by()`.
+#' @return [`AggOp`][AggOp] to be used in a call to `agg_by()` or `agg_all_by()`.
 #'
 #' @examples
 #' \dontrun{
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
@@ -254,11 +153,11 @@ agg_first <- function(cols = character()) {
 #'
 #' # get last elements of Y, Number1, and Number2 grouped by X
 #' th2 <- th$
-#'   agg_by(agg_last(c("Y", "Number1", "Number2")), by="X")
+#'   agg_by(agg_last(c("Y", "Number1", "Number2")), by = "X")
 #'
 #' # get last elements of Number1 and Number2 grouped by X and Y
 #' th3 <- th$
-#'   agg_by(agg_last(c("Number1", "Number2")), by=c("X", "Y"))
+#'   agg_by(agg_last(c("Number1", "Number2")), by = c("X", "Y"))
 #'
 #' client$close()
 #' }
@@ -266,7 +165,7 @@ agg_first <- function(cols = character()) {
 #' @export
 agg_last <- function(cols = character()) {
   verify_string("cols", cols, FALSE)
-  return(AggOp$new(INTERNAL_agg_last, "agg_last", cols=cols))
+  return(AggOp$new(INTERNAL_agg_last, "agg_last", cols = cols))
 }
 
 #' @name
@@ -288,17 +187,20 @@ agg_last <- function(cols = character()) {
 #' function called an [`AggOp`][AggOp] intended to be used in a call to `agg_by()` or `agg_all_by()`. This detail is
 #' typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 #' as the output of an `agg` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `agg` functions by running
+#' `vignette("agg_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to aggregate. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to aggregate all non-grouping columns, which is only valid in the `agg_all_by()` operation.
-#' @return `AggOp` to be used in a call to `agg_by()` or `agg_all_by()`.
+#' @return [`AggOp`][AggOp] to be used in a call to `agg_by()` or `agg_all_by()`.
 #'
 #' @examples
 #' \dontrun{
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
@@ -315,11 +217,11 @@ agg_last <- function(cols = character()) {
 #'
 #' # get minimum elements of Number1 and Number2 grouped by X
 #' th2 <- th$
-#'   agg_by(agg_min(c("Number1", "Number2")), by="X")
+#'   agg_by(agg_min(c("Number1", "Number2")), by = "X")
 #'
 #' # get minimum elements of Number1 and Number2 grouped by X and Y
 #' th3 <- th$
-#'   agg_by(agg_min(c("Number1", "Number2")), by=c("X", "Y"))
+#'   agg_by(agg_min(c("Number1", "Number2")), by = c("X", "Y"))
 #'
 #' client$close()
 #' }
@@ -327,7 +229,7 @@ agg_last <- function(cols = character()) {
 #' @export
 agg_min <- function(cols = character()) {
   verify_string("cols", cols, FALSE)
-  return(AggOp$new(INTERNAL_agg_min, "agg_min", cols=cols))
+  return(AggOp$new(INTERNAL_agg_min, "agg_min", cols = cols))
 }
 
 #' @name
@@ -349,17 +251,20 @@ agg_min <- function(cols = character()) {
 #' function called an [`AggOp`][AggOp] intended to be used in a call to `agg_by()` or `agg_all_by()`. This detail is
 #' typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 #' as the output of an `agg` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `agg` functions by running
+#' `vignette("agg_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to aggregate. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to aggregate all non-grouping columns, which is only valid in the `agg_all_by()` operation.
-#' @return `AggOp` to be used in a call to `agg_by()` or `agg_all_by()`.
+#' @return [`AggOp`][AggOp] to be used in a call to `agg_by()` or `agg_all_by()`.
 #'
 #' @examples
 #' \dontrun{
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
@@ -376,11 +281,11 @@ agg_min <- function(cols = character()) {
 #'
 #' # get maximum elements of Number1 and Number2 grouped by X
 #' th2 <- th$
-#'   agg_by(agg_max(c("Number1", "Number2")), by="X")
+#'   agg_by(agg_max(c("Number1", "Number2")), by = "X")
 #'
 #' # get maximum elements of Number1 and Number2 grouped by X and Y
 #' th3 <- th$
-#'   agg_by(agg_max(c("Number1", "Number2")), by=c("X", "Y"))
+#'   agg_by(agg_max(c("Number1", "Number2")), by = c("X", "Y"))
 #'
 #' client$close()
 #' }
@@ -388,7 +293,7 @@ agg_min <- function(cols = character()) {
 #' @export
 agg_max <- function(cols = character()) {
   verify_string("cols", cols, FALSE)
-  return(AggOp$new(INTERNAL_agg_max, "agg_max", cols=cols))
+  return(AggOp$new(INTERNAL_agg_max, "agg_max", cols = cols))
 }
 
 #' @name
@@ -411,16 +316,19 @@ agg_max <- function(cols = character()) {
 #' typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 #' as the output of an `agg` function can otherwise seem unexpected.
 #'
+#' For more information, see the vignette on `agg` functions by running
+#' `vignette("agg_by")`.
+#'
 #' @param cols String or list of strings denoting the column(s) to aggregate. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to aggregate all non-grouping columns, which is only valid in the `agg_all_by()` operation.
-#' @return `AggOp` to be used in a call to `agg_by()` or `agg_all_by()`.
+#' @return [`AggOp`][AggOp] to be used in a call to `agg_by()` or `agg_all_by()`.
 #'
 #' @examples
 #' \dontrun{
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
@@ -437,11 +345,11 @@ agg_max <- function(cols = character()) {
 #'
 #' # compute sum of Number1 and Number2 grouped by X
 #' th2 <- th$
-#'   agg_by(agg_sum(c("Number1", "Number2")), by="X")
+#'   agg_by(agg_sum(c("Number1", "Number2")), by = "X")
 #'
 #' # compute sum of Number1 and Number2 grouped by X and Y
 #' th3 <- th$
-#'   agg_by(agg_sum(c("Number1", "Number2")), by=c("X", "Y"))
+#'   agg_by(agg_sum(c("Number1", "Number2")), by = c("X", "Y"))
 #'
 #' client$close()
 #' }
@@ -449,7 +357,7 @@ agg_max <- function(cols = character()) {
 #' @export
 agg_sum <- function(cols = character()) {
   verify_string("cols", cols, FALSE)
-  return(AggOp$new(INTERNAL_agg_sum, "agg_sum", cols=cols))
+  return(AggOp$new(INTERNAL_agg_sum, "agg_sum", cols = cols))
 }
 
 #' @name
@@ -471,17 +379,20 @@ agg_sum <- function(cols = character()) {
 #' function called an [`AggOp`][AggOp] intended to be used in a call to `agg_by()` or `agg_all_by()`. This detail is
 #' typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 #' as the output of an `agg` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `agg` functions by running
+#' `vignette("agg_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to aggregate. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to aggregate all non-grouping columns, which is only valid in the `agg_all_by()` operation.
-#' @return `AggOp` to be used in a call to `agg_by()` or `agg_all_by()`.
+#' @return [`AggOp`][AggOp] to be used in a call to `agg_by()` or `agg_all_by()`.
 #'
 #' @examples
 #' \dontrun{
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
@@ -498,11 +409,11 @@ agg_sum <- function(cols = character()) {
 #'
 #' # compute absolute sum of Number1 and Number2 grouped by X
 #' th2 <- th$
-#'   agg_by(agg_abs_sum(c("Number1", "Number2")), by="X")
+#'   agg_by(agg_abs_sum(c("Number1", "Number2")), by = "X")
 #'
 #' # compute absolute sum of Number1 and Number2 grouped by X and Y
 #' th3 <- th$
-#'   agg_by(agg_abs_sum(c("Number1", "Number2")), by=c("X", "Y"))
+#'   agg_by(agg_abs_sum(c("Number1", "Number2")), by = c("X", "Y"))
 #'
 #' client$close()
 #' }
@@ -510,7 +421,7 @@ agg_sum <- function(cols = character()) {
 #' @export
 agg_abs_sum <- function(cols = character()) {
   verify_string("cols", cols, FALSE)
-  return(AggOp$new(INTERNAL_agg_abs_sum, "agg_abs_sum", cols=cols))
+  return(AggOp$new(INTERNAL_agg_abs_sum, "agg_abs_sum", cols = cols))
 }
 
 #' @name
@@ -532,17 +443,20 @@ agg_abs_sum <- function(cols = character()) {
 #' function called an [`AggOp`][AggOp] intended to be used in a call to `agg_by()` or `agg_all_by()`. This detail is
 #' typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 #' as the output of an `agg` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `agg` functions by running
+#' `vignette("agg_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to aggregate. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to aggregate all non-grouping columns, which is only valid in the `agg_all_by()` operation.
-#' @return `AggOp` to be used in a call to `agg_by()` or `agg_all_by()`.
+#' @return [`AggOp`][AggOp] to be used in a call to `agg_by()` or `agg_all_by()`.
 #'
 #' @examples
 #' \dontrun{
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
@@ -559,11 +473,11 @@ agg_abs_sum <- function(cols = character()) {
 #'
 #' # compute average of Number1 and Number2 grouped by X
 #' th2 <- th$
-#'   agg_by(agg_avg(c("Number1", "Number2")), by="X")
+#'   agg_by(agg_avg(c("Number1", "Number2")), by = "X")
 #'
 #' # compute average of Number1 and Number2 grouped by X and Y
 #' th3 <- th$
-#'   agg_by(agg_avg(c("Number1", "Number2")), by=c("X", "Y"))
+#'   agg_by(agg_avg(c("Number1", "Number2")), by = c("X", "Y"))
 #'
 #' client$close()
 #' }
@@ -571,7 +485,7 @@ agg_abs_sum <- function(cols = character()) {
 #' @export
 agg_avg <- function(cols = character()) {
   verify_string("cols", cols, FALSE)
-  return(AggOp$new(INTERNAL_agg_avg, "agg_avg", cols=cols))
+  return(AggOp$new(INTERNAL_agg_avg, "agg_avg", cols = cols))
 }
 
 #' @name
@@ -593,18 +507,21 @@ agg_avg <- function(cols = character()) {
 #' function called an [`AggOp`][AggOp] intended to be used in a call to `agg_by()` or `agg_all_by()`. This detail is
 #' typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 #' as the output of an `agg` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `agg` functions by running
+#' `vignette("agg_by")`.
 #'
 #' @param wcol String denoting the column to use for weights. This must be a numeric column.
 #' @param cols String or list of strings denoting the column(s) to aggregate. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to aggregate all non-grouping columns, which is only valid in the `agg_all_by()` operation.
-#' @return `AggOp` to be used in a call to `agg_by()` or `agg_all_by()`.
+#' @return [`AggOp`][AggOp] to be used in a call to `agg_by()` or `agg_all_by()`.
 #'
 #' @examples
 #' \dontrun{
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
@@ -617,15 +534,15 @@ agg_avg <- function(cols = character()) {
 #'
 #' # compute weighted average of Number1, weighted by Number2
 #' th1 <- th$
-#'   agg_by(agg_w_avg(wcol="Number2", cols="Number1"))
+#'   agg_by(agg_w_avg(wcol = "Number2", cols = "Number1"))
 #'
 #' # compute weighted average of Number1, weighted by Number2, grouped by X
 #' th2 <- th$
-#'   agg_by(agg_w_avg(wcol="Number2", cols="Number1", by="X"))
+#'   agg_by(agg_w_avg(wcol = "Number2", cols = "Number1", by = "X"))
 #'
 #' # compute weighted average of Number1, weighted by Number2, grouped by X and Y
 #' th3 <- th$
-#'   agg_by(agg_w_avg(wcol="Number2", cols="Number1", by=c("X", "Y")))
+#'   agg_by(agg_w_avg(wcol = "Number2", cols = "Number1", by = c("X", "Y")))
 #'
 #' client$close()
 #' }
@@ -634,7 +551,7 @@ agg_avg <- function(cols = character()) {
 agg_w_avg <- function(wcol, cols = character()) {
   verify_string("wcol", wcol, TRUE)
   verify_string("cols", cols, FALSE)
-  return(AggOp$new(INTERNAL_agg_w_avg, "agg_w_avg", wcol=wcol, cols=cols))
+  return(AggOp$new(INTERNAL_agg_w_avg, "agg_w_avg", wcol = wcol, cols = cols))
 }
 
 #' @name
@@ -656,17 +573,20 @@ agg_w_avg <- function(wcol, cols = character()) {
 #' function called an [`AggOp`][AggOp] intended to be used in a call to `agg_by()` or `agg_all_by()`. This detail is
 #' typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 #' as the output of an `agg` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `agg` functions by running
+#' `vignette("agg_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to aggregate. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to aggregate all non-grouping columns, which is only valid in the `agg_all_by()` operation.
-#' @return `AggOp` to be used in a call to `agg_by()` or `agg_all_by()`.
+#' @return [`AggOp`][AggOp] to be used in a call to `agg_by()` or `agg_all_by()`.
 #'
 #' @examples
 #' \dontrun{
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
@@ -683,11 +603,11 @@ agg_w_avg <- function(wcol, cols = character()) {
 #'
 #' # compute median of Number1 and Number2 grouped by X
 #' th2 <- th$
-#'   agg_by(agg_median(c("Number1", "Number2")), by="X")
+#'   agg_by(agg_median(c("Number1", "Number2")), by = "X")
 #'
 #' # compute median of Number1 and Number2 grouped by X and Y
 #' th3 <- th$
-#'   agg_by(agg_median(c("Number1", "Number2")), by=c("X", "Y"))
+#'   agg_by(agg_median(c("Number1", "Number2")), by = c("X", "Y"))
 #'
 #' client$close()
 #' }
@@ -695,7 +615,7 @@ agg_w_avg <- function(wcol, cols = character()) {
 #' @export
 agg_median <- function(cols = character()) {
   verify_string("cols", cols, FALSE)
-  return(AggOp$new(INTERNAL_agg_median, "agg_median", cols=cols))
+  return(AggOp$new(INTERNAL_agg_median, "agg_median", cols = cols))
 }
 
 #' @name
@@ -717,17 +637,20 @@ agg_median <- function(cols = character()) {
 #' function called an [`AggOp`][AggOp] intended to be used in a call to `agg_by()` or `agg_all_by()`. This detail is
 #' typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 #' as the output of an `agg` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `agg` functions by running
+#' `vignette("agg_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to aggregate. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to aggregate all non-grouping columns, which is only valid in the `agg_all_by()` operation.
-#' @return `AggOp` to be used in a call to `agg_by()` or `agg_all_by()`.
+#' @return [`AggOp`][AggOp] to be used in a call to `agg_by()` or `agg_all_by()`.
 #'
 #' @examples
 #' \dontrun{
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
@@ -744,11 +667,11 @@ agg_median <- function(cols = character()) {
 #'
 #' # compute variance of Number1 and Number2 grouped by X
 #' th2 <- th$
-#'   agg_by(agg_var(c("Number1", "Number2")), by="X")
+#'   agg_by(agg_var(c("Number1", "Number2")), by = "X")
 #'
 #' # compute variance of Number1 and Number2 grouped by X and Y
 #' th3 <- th$
-#'   agg_by(agg_var(c("Number1", "Number2")), by=c("X", "Y"))
+#'   agg_by(agg_var(c("Number1", "Number2")), by = c("X", "Y"))
 #'
 #' client$close()
 #' }
@@ -756,7 +679,7 @@ agg_median <- function(cols = character()) {
 #' @export
 agg_var <- function(cols = character()) {
   verify_string("cols", cols, FALSE)
-  return(AggOp$new(INTERNAL_agg_var, "agg_var", cols=cols))
+  return(AggOp$new(INTERNAL_agg_var, "agg_var", cols = cols))
 }
 
 #' @name
@@ -778,17 +701,20 @@ agg_var <- function(cols = character()) {
 #' function called an [`AggOp`][AggOp] intended to be used in a call to `agg_by()` or `agg_all_by()`. This detail is
 #' typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 #' as the output of an `agg` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `agg` functions by running
+#' `vignette("agg_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to aggregate. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to aggregate all non-grouping columns, which is only valid in the `agg_all_by()` operation.
-#' @return `AggOp` to be used in a call to `agg_by()` or `agg_all_by()`.
+#' @return [`AggOp`][AggOp] to be used in a call to `agg_by()` or `agg_all_by()`.
 #'
 #' @examples
 #' \dontrun{
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
@@ -805,11 +731,11 @@ agg_var <- function(cols = character()) {
 #'
 #' # compute standard deviation of Number1 and Number2 grouped by X
 #' th2 <- th$
-#'   agg_by(agg_std(c("Number1", "Number2")), by="X")
+#'   agg_by(agg_std(c("Number1", "Number2")), by = "X")
 #'
 #' # compute standard deviation of Number1 and Number2 grouped by X and Y
 #' th3 <- th$
-#'   agg_by(agg_std(c("Number1", "Number2")), by=c("X", "Y"))
+#'   agg_by(agg_std(c("Number1", "Number2")), by = c("X", "Y"))
 #'
 #' client$close()
 #' }
@@ -817,7 +743,7 @@ agg_var <- function(cols = character()) {
 #' @export
 agg_std <- function(cols = character()) {
   verify_string("cols", cols, FALSE)
-  return(AggOp$new(INTERNAL_agg_std, "agg_std", cols=cols))
+  return(AggOp$new(INTERNAL_agg_std, "agg_std", cols = cols))
 }
 
 #' @name
@@ -839,18 +765,21 @@ agg_std <- function(cols = character()) {
 #' function called an [`AggOp`][AggOp] intended to be used in a call to `agg_by()` or `agg_all_by()`. This detail is
 #' typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 #' as the output of an `agg` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `agg` functions by running
+#' `vignette("agg_by")`.
 #'
 #' @param percentile Numeric scalar between 0 and 1 denoting the percentile to compute.
 #' @param cols String or list of strings denoting the column(s) to aggregate. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to aggregate all non-grouping columns, which is only valid in the `agg_all_by()` operation.
-#' @return `AggOp` to be used in a call to `agg_by()` or `agg_all_by()`.
+#' @return [`AggOp`][AggOp] to be used in a call to `agg_by()` or `agg_all_by()`.
 #'
 #' @examples
 #' \dontrun{
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
@@ -863,15 +792,15 @@ agg_std <- function(cols = character()) {
 #'
 #' # compute 20th percentile of Number1 and Number2
 #' th1 <- th$
-#'   agg_by(agg_percentile(percentile=0.2, cols=c("Number1", "Number2")))
+#'   agg_by(agg_percentile(percentile = 0.2, cols = c("Number1", "Number2")))
 #'
 #' # compute 50th percentile of Number1 and Number2 grouped by X
 #' th2 <- th$
-#'   agg_by(agg_percentile(percentile=0.5, cols=c("Number1", "Number2")), by="X")
+#'   agg_by(agg_percentile(percentile = 0.5, cols = c("Number1", "Number2")), by = "X")
 #'
 #' # compute 75th percentile of Number1 and Number2 grouped by X and Y
 #' th3 <- th$
-#'   agg_by(agg_percentile(percentile=0.75, cols=c("Number1", "Number2")), by=c("X", "Y"))
+#'   agg_by(agg_percentile(percentile = 0.75, cols = c("Number1", "Number2")), by = c("X", "Y"))
 #'
 #' client$close()
 #' }
@@ -880,7 +809,7 @@ agg_std <- function(cols = character()) {
 agg_percentile <- function(percentile, cols = character()) {
   verify_in_unit_interval("percentile", percentile, TRUE)
   verify_string("cols", cols, FALSE)
-  return(AggOp$new(INTERNAL_agg_percentile, "agg_percentile", percentile=percentile, cols=cols))
+  return(AggOp$new(INTERNAL_agg_percentile, "agg_percentile", percentile = percentile, cols = cols))
 }
 
 #' @name
@@ -902,18 +831,21 @@ agg_percentile <- function(percentile, cols = character()) {
 #' function called an [`AggOp`][AggOp] intended to be used in a call to `agg_by()` or `agg_all_by()`. This detail is
 #' typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 #' as the output of an `agg` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `agg` functions by running
+#' `vignette("agg_by")`.
 #'
 #' Note that this operation is not supported in `agg_all_by()`.
 #'
 #' @param col String denoting the name of the new column to hold the counts of each aggregation group.
-#' @return `AggOp` to be used in a call to `agg_by()`.
+#' @return [`AggOp`][AggOp] to be used in a call to `agg_by()`.
 #'
 #' @examples
 #' \dontrun{
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
@@ -926,11 +858,11 @@ agg_percentile <- function(percentile, cols = character()) {
 #'
 #' # count number of elements in each group when grouped by X, name resulting column "count"
 #' th1 <- th$
-#'   agg_by(agg_count("count"), by="X")
+#'   agg_by(agg_count("count"), by = "X")
 #'
 #' # count number of elements in each group when grouped by X and Y, name resulting column "CountingCol"
 #' th2 <- th$
-#'   agg_by(agg_count("CountingCol"), by=c("X", "Y"))
+#'   agg_by(agg_count("CountingCol"), by = c("X", "Y"))
 #'
 #' client$close()
 #' }
@@ -938,5 +870,5 @@ agg_percentile <- function(percentile, cols = character()) {
 #' @export
 agg_count <- function(col) {
   verify_string("col", col, TRUE)
-  return(AggOp$new(INTERNAL_agg_count, "agg_count", col=col))
-}
\ No newline at end of file
+  return(AggOp$new(INTERNAL_agg_count, "agg_count", col = col))
+}
diff --git a/R/rdeephaven/R/client_wrapper.R b/R/rdeephaven/R/client_wrapper.R
index 3d91ce9bc7a..538bede0b92 100644
--- a/R/rdeephaven/R/client_wrapper.R
+++ b/R/rdeephaven/R/client_wrapper.R
@@ -13,7 +13,7 @@
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create a data frame and push it to the server, retrieve a reference to it as a TableHandle
 #' df <- data.frame(
@@ -66,25 +66,26 @@ Client <- R6Class("Client",
       args <- list(...)
       if (length(args) == 1) {
         first_arg <- args[[1]]
-        first_arg_class = first_class(first_arg)
+        first_arg_class <- first_class(first_arg)
         if (first_arg_class != "character" && first_arg_class != "list") {
           if (first_arg_class != "externalptr") {
             stop(paste0(
               "Client initialize first argument must be ",
-              "either a string or an Rcpp::XPtr object."))
+              "either a string or an Rcpp::XPtr object."
+            ))
           }
           return(self$initialize_for_xptr(first_arg))
         }
       }
       return(do.call(self$initialize_for_target, args))
     },
-    
+
     #' @description
     #' Initializes a Client object using a pointer to an existing client connection.
     #' @param xptr External pointer to an existing client connection.
     initialize_for_xptr = function(xptr) {
       verify_type("xptr", xptr, TRUE, "externalptr", "an XPtr")
-      self$.internal_rcpp_object = new(INTERNAL_Client, xptr)
+      self$.internal_rcpp_object <- new(INTERNAL_Client, xptr)
     },
 
     #' @description
@@ -112,18 +113,17 @@ Client <- R6Class("Client",
     #' grpc channel creation. Defaults to an empty list, which implies not using any channel options.
     #' @param extra_headers List of name-value pairs for additional headers and values
     #' to add to server requests. Defaults to an empty list, which implies not using any extra headers.
-    initialize_for_target = function(
-                          target,
-                          auth_type = "anonymous",
-                          username = "",
-                          password = "",
-                          auth_token = "",
-                          session_type = "python",
-                          use_tls = FALSE,
-                          tls_root_certs = "",
-                          int_options = list(),
-                          string_options = list(),
-                          extra_headers = list()) {
+    initialize_for_target = function(target,
+                                     auth_type = "anonymous",
+                                     username = "",
+                                     password = "",
+                                     auth_token = "",
+                                     session_type = "python",
+                                     use_tls = FALSE,
+                                     tls_root_certs = "",
+                                     int_options = list(),
+                                     string_options = list(),
+                                     extra_headers = list()) {
       options <- new(INTERNAL_ClientOptions)
 
       verify_string("target", target, TRUE)
diff --git a/R/rdeephaven/R/exports.R b/R/rdeephaven/R/exports.R
index 2fbe0011cb2..81792ad68c5 100644
--- a/R/rdeephaven/R/exports.R
+++ b/R/rdeephaven/R/exports.R
@@ -1,107 +1 @@
-#' @import Rcpp
-#' @useDynLib rdeephaven, .registration = TRUE
-#' @importFrom Rcpp evalCpp
-#'
-#' @importFrom arrow arrow_table as_arrow_table as_record_batch_reader
-#' @importFrom R6 R6Class
-#' @importFrom dplyr as_tibble as_data_frame
-#' @importFrom utils head tail
-
-#' @name
-#' rdeephaven
-#' @title
-#' The Deephaven Community R Client
-#' @md
-#' @usage NULL
-#' @format NULL
-#'
-#' @description
-#' The Deephaven Community R Client provides an R interface to Deephaven's powerful real-time data engine, [_Deephaven Core_](https://deephaven.io/community/).
-#' To use this package, you must have a Deephaven server running and be able to connect to it. For more information on
-#' how to set up a Deephaven server, see the documentation [here](https://deephaven.io/core/docs/tutorials/quickstart/).
-#'
-#' @section
-#' Building blocks of the Deephaven R Client:
-#' There are two primary R classes that make up the Deephaven R Client, the [`Client`][Client] class and the
-#' [`TableHandle`][TableHandle] class. The `Client` class is used to establish a connection to the Deephaven server with
-#' its constructor `Client$new()`, and to send server requests, such as running a script via `run_script()`, or pushing
-#' local data to the server via `import_table()`. Basic usage of the `Client` class may look something like this:
-#' ```r
-#' library(rdeephaven)
-#'
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
-#' ```
-#' Many of these server requests end up creating or modifying tables that live on the server. To keep track of these
-#' tables, the R client retrieves references to them, and wraps these references in `TableHandle` objects. These
-#' TableHandles have a host of methods that mirror server-side table operations, such as `head()`, `tail()`, `update()`,
-#' and so on. So, you can typically use TableHandles _as if_ they are tables themselves, and all of the corresponding
-#' methods that you call on them will be executed on the server. Here is a simple example of pushing data to the server,
-#' retrieving a TableHandle to the resulting table, and applying some basic table operations to the table:
-#' ```r
-#' df1 <- data.frame(x=1:10, y=11:20)
-#' th1 <- client$import_table(df1)
-#'
-#' th2 <- th1$
-#'   update("z = x + y")$
-#'   where("z % 4 == 0")
-#' ```
-#' TableHandles also support common functional methods for converting server-side Deephaven tables to R objects stored in
-#' local memory such as `as.data.frame()`, `as_tibble()`, and `as_arrow_table()`. Here's an example of converting the
-#' table created above to an R data frame and verifying that other functional methods work as expected:
-#' ```r
-#' df2 <- as.data.frame(th2)
-#'
-#' print(nrow(th2) == nrow(df2))
-#' print(ncol(th2) == ncol(df2))
-#' print(dim(th2) == dim(df2))
-#' print(all(as.data.frame(head(th2, 2)) == head(df2, 2)))
-#' print(all(as.data.frame(tail(th2, 2)) == tail(df2, 2)))
-#' ```
-#' For more information on these classes and all of their methods, see the reference documentation for [`Client`][Client]
-#' and [`TableHandle`][TableHandle] by clicking on their class names, or by running `?Client` or `?TableHandle`.
-#'
-#' @section
-#' Real-time data analysis:
-#' Since TableHandles are references to tables living on the Deephaven server, they may refer to streaming tables, or
-#' tables that are receiving new data periodically (typically once per second). Here's a simple example of creating a
-#' table that adds a new row every second:
-#' ```r
-#' th3 <- client$time_table("PT1s")$
-#'   update(c("X = ii", "Y = sin(X)"))
-#' ```
-#' R objects like data frames or Dplyr tibbles do not have this streaming property - they are always static objects
-#' stored in memory. However, a TableHandle referring to a streaming table may be converted to a data frame or tibble at
-#' any time, and the resulting object will be a snapshot of the table at the time of conversion. This means that you can
-#' use the Deephaven R Client to perform real-time data analysis on streaming data! Here, we make a simple plot of the
-#' ticking table, and call it three times to demonstrate the dynamic nature of the table:
-#' ```r
-#' plot(as.data.frame(th3)$X, as.data.frame(th3)$Y, type="l")
-#' Sys.sleep(5)
-#' plot(as.data.frame(th3)$X, as.data.frame(th3)$Y, type="l")
-#' Sys.sleep(5)
-#' plot(as.data.frame(th3)$X, as.data.frame(th3)$Y, type="l")
-#' ```
-#' There are performance and memory considerations when pulling data from the server, so it is best to use the provided
-#' TableHandle methods to perform as much of your analysis as possible on the server, and to only pull the data when
-#' something _must_ be done in R, like plotting or writing to a local file.
-#'
-#' @section
-#' Powerful table operations:
-#' Much of the power of Deephaven's suite of table operations is achieved through the use of the [`update_by()`][UpdateBy]
-#' and [`agg_by()`][AggBy] methods. These table methods are important enough to warrant their own documentation pages, accessible
-#' by clicking on their names, or by running `?UpdateBy` or `?AggBy`. These methods come with their own suites of functions,
-#' prefixed with `agg_` and `uby_` respectively, that are discoverable from their documentation pages. Running `ls("package:rdeephaven")`
-#' will reveal that most of the functions included in this package are for these methods, so it is important to get acquainted
-#' with them.
-#'
-#' @section
-#' Getting help:
-#' While we've done our best to provide good documentation for this package, you may find you need more help than what
-#' this documentation has to offer. Please visit the official Deephaven Community Core [documentation](https://deephaven.io/core/docs/tutorials/quickstart/)
-#' to learn more about Deephaven and to find comprehensive examples. Additionally, feel free to reach out to us on
-#' the Deephaven [Community Slack channel](https://deephaven.io/slack) with any questions.
-#' We hope you find real-time data analysis in R to be as easy as possible.
-#'
-NULL
-
 loadModule("DeephavenInternalModule", TRUE)
diff --git a/R/rdeephaven/R/operation_control.R b/R/rdeephaven/R/operation_control.R
index 083085acc78..f376a79a1ca 100644
--- a/R/rdeephaven/R/operation_control.R
+++ b/R/rdeephaven/R/operation_control.R
@@ -94,8 +94,7 @@ OperationControl <- R6Class("OperationControl",
 #' print("hello!")
 #'
 #' @export
-op_control <- function(on_null="skip", on_nan="skip", big_value_context="decimal128") {
-
+op_control <- function(on_null = "skip", on_nan = "skip", big_value_context = "decimal128") {
   if (!(on_null %in% c("poison", "reset", "skip", "throw"))) {
     stop(paste0("'on_null' must be one of 'poison', 'reset', 'skip', or 'throw'. Got '", on_null, "'."))
   }
diff --git a/R/rdeephaven/R/table_handle_wrapper.R b/R/rdeephaven/R/table_handle_wrapper.R
index cb474579049..7be2a601b90 100644
--- a/R/rdeephaven/R/table_handle_wrapper.R
+++ b/R/rdeephaven/R/table_handle_wrapper.R
@@ -56,12 +56,12 @@
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create a data frame, push it to the server, and retrieve a TableHandle referencing the new table
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:50],
-#'   boolCol = sample(c(TRUE,FALSE), 50, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 50, TRUE),
 #'   col1 = sample(1000, size = 50, replace = TRUE),
 #'   col2 = sample(1000, size = 50, replace = TRUE),
 #'   col3 = 1:50
diff --git a/R/rdeephaven/R/update_by_ops_wrapper.R b/R/rdeephaven/R/update_by_ops_wrapper.R
index 2ca32ef7411..a27af272b62 100644
--- a/R/rdeephaven/R/update_by_ops_wrapper.R
+++ b/R/rdeephaven/R/update_by_ops_wrapper.R
@@ -3,126 +3,14 @@
 # C++ UpdateByOpWrapper, which is finally a wrapper around a C++ UpdateByOperation. See rdeephaven/src/client.cpp for details.
 # Note that UpdateByOps should not be instantiated directly by user code, but rather by provided uby functions.
 
-
-#' @name
-#' UpdateBy
-#' @title
-#' Deephaven's UpdateBy Operations
-#' @md
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#'
-#' @description
-#' Deephaven's `update_by()` table method and suite of `uby` functions enable cumulative and moving calculations
-#' on static _and_ streaming tables. Complex operations like cumulative minima and maxima, exponential moving averages,
-#' and rolling standard deviations are all possible and effortless to execute. As always in Deephaven,
-#' the results of these calculations will continue to update as their parent tables are updated. Additionally, it's easy
-#' to group data by one or more columns, enabling complex group-wise calculations with a single line of code.
-#'
-#' @section
-#' Applying UpdateBy operations to a table:
-#' The table method `update_by()` is the entry point for UpdateBy operations. It takes two arguments: the first is an
-#' [`UpdateByOp`][UpdateByOp] or a list of `UpdateByOp`s denoting the calculations to perform on specific columns of the
-#' table. Then, it takes a column name or a list of column names that define the groups on which to perform the calculations.
-#' If you don't want grouped calculations, omit this argument.
-#'
-#' The `update_by()` method itself does not know anything about the columns on which you want to perform calculations.
-#' Rather, the desired columns are passed to individual `uby` functions, enabling a massive amount of flexibility.
-#'
-#' @section
-#' `uby` functions:
-#' `uby` functions are the workers that actually execute the complex UpdateBy calculations. These functions are
-#' _generators_, meaning they return _functions_ that the Deephaven engine knows how to interpret. We call the functions
-#' that they return [`UpdateByOp`][UpdateByOp]s. These `UpdateByOp`s are not R-level functions, but Deephaven-specific
-#' data types that perform all of the intensive calculations. Here is a list of all `uby` functions available in Deephaven:
-#'
-#' - [`uby_cum_min()`][uby_cum_min]
-#' - [`uby_cum_max()`][uby_cum_max]
-#' - [`uby_cum_sum()`][uby_cum_sum]
-#' - [`uby_cum_prod()`][uby_cum_prod]
-#' - [`uby_forward_fill()`][uby_forward_fill]
-#' - [`uby_delta()`][uby_delta]
-#' - [`uby_emmin_tick()`][uby_emmin_tick]
-#' - [`uby_emmin_time()`][uby_emmin_time]
-#' - [`uby_emmax_tick()`][uby_emmax_tick]
-#' - [`uby_emmax_time()`][uby_emmax_time]
-#' - [`uby_ems_tick()`][uby_ems_tick]
-#' - [`uby_ems_time()`][uby_ems_time]
-#' - [`uby_ema_tick()`][uby_ema_tick]
-#' - [`uby_ema_time()`][uby_ema_time]
-#' - [`uby_emstd_tick()`][uby_emstd_tick]
-#' - [`uby_emstd_time()`][uby_emstd_time]
-#' - [`uby_rolling_count_tick()`][uby_rolling_count_tick]
-#' - [`uby_rolling_count_time()`][uby_rolling_count_time]
-#' - [`uby_rolling_group_tick()`][uby_rolling_group_tick]
-#' - [`uby_rolling_group_time()`][uby_rolling_group_time]
-#' - [`uby_rolling_min_tick()`][uby_rolling_min_tick]
-#' - [`uby_rolling_min_time()`][uby_rolling_min_time]
-#' - [`uby_rolling_max_tick()`][uby_rolling_max_tick]
-#' - [`uby_rolling_max_time()`][uby_rolling_max_time]
-#' - [`uby_rolling_sum_tick()`][uby_rolling_sum_tick]
-#' - [`uby_rolling_sum_time()`][uby_rolling_sum_time]
-#' - [`uby_rolling_prod_tick()`][uby_rolling_prod_tick]
-#' - [`uby_rolling_prod_time()`][uby_rolling_prod_time]
-#' - [`uby_rolling_avg_tick()`][uby_rolling_avg_tick]
-#' - [`uby_rolling_avg_time()`][uby_rolling_avg_time]
-#' - [`uby_rolling_wavg_tick()`][uby_rolling_wavg_tick]
-#' - [`uby_rolling_wavg_time()`][uby_rolling_wavg_time]
-#' - [`uby_rolling_std_tick()`][uby_rolling_std_tick]
-#' - [`uby_rolling_std_time()`][uby_rolling_std_time]
-#'
-#' For more details on each aggregation function, click on one of the methods above or see the reference documentation
-#' by running `?uby_cum_min`, `?uby_delta`, etc.
-#'
-#' @examples
-#' \dontrun{
-#' library(rdeephaven)
-#'
-#' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
-#'
-#' # create data frame, push to server, retrieve TableHandle
-#' df <- data.frame(
-#'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
-#'   col1 = sample(10000, size = 500, replace = TRUE),
-#'   col2 = sample(10000, size = 500, replace = TRUE),
-#'   col3 = 1:500
-#' )
-#' th <- client$import_table(df)
-#'
-#' # compute 10-row exponential weighted moving average of col1 and col2, grouped by boolCol
-#' th1 <- th$
-#'   update_by(uby_ema_tick(decay_ticks=10, cols=c("col1Ema = col1", "col2Ema = col2")), by="boolCol")
-#'
-#' # compute rolling 10-second weighted average and standard deviation of col1 and col2, weighted by col3
-#' th2 <- th$
-#'   update_by(
-#'     c(uby_rolling_wavg_time(ts_col="timeCol", wcol="col3", cols=c("col1WAvg = col1", "col2WAvg = col2"), rev_time="PT10s"),
-#'       uby_rolling_std_time(ts_col="timeCol", cols=c("col1Std = col1", "col2Std = col2"), rev_time="PT10s")))
-#'
-#' # compute cumulative minimum and maximum of col1 and col2 respectively, and the rolling 20-row sum of col3, grouped by boolCol
-#' th3 <- th$
-#'   update_by(
-#'     c(uby_cum_min(cols="col1"),
-#'       uby_cum_max(cols="col2"),
-#'       uby_rolling_sum_tick(cols="col3", rev_ticks=20)),
-#'     by="boolCol")
-#'
-#' client$close()
-#' }
-#'
-NULL
-
-
 #' @name UpdateByOp
 #' @title Deephaven UpdateByOps
 #' @md
 #' @description
 #' An `UpdateByOp` is the return type of one of Deephaven's [`uby`][UpdateBy] functions. It is a function that performs
 #' the computation specified by the `uby` function. These are intended to be passed directly to `update_by()`,
-#' and should never be instantiated directly be user code.
+#' and should never be instantiated directly be user code. For more information, see the vignette on
+#' `uby` functions with `vignette("update_by")`.
 #'
 #' If multiple tables have the same schema and the same UpdateBy operations need to be applied to each table, saving
 #' these objects directly in a variable may be useful to avoid having to re-create them each time:
@@ -172,6 +60,9 @@ UpdateByOp <- R6Class("UpdateByOp",
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to compute the cumulative sum for all non-grouping columns.
@@ -182,12 +73,12 @@ UpdateByOp <- R6Class("UpdateByOp",
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -200,12 +91,12 @@ UpdateByOp <- R6Class("UpdateByOp",
 #'
 #' # compute cumulative sum of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_cum_sum(c("col1CumSum = col1", "col2CumSum = col2")), by="boolCol")
+#'   update_by(uby_cum_sum(c("col1CumSum = col1", "col2CumSum = col2")), by = "boolCol")
 #'
 #' # compute cumulative sum of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_cum_sum(c("col1CumSum = col1", "col2CumSum = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_cum_sum(c("col1CumSum = col1", "col2CumSum = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -235,6 +126,9 @@ uby_cum_sum <- function(cols = character()) {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to compute the cumulative product for all non-grouping columns.
@@ -245,12 +139,12 @@ uby_cum_sum <- function(cols = character()) {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -263,12 +157,12 @@ uby_cum_sum <- function(cols = character()) {
 #'
 #' # compute cumulative product of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_cum_prod(c("col1CumProd = col1", "col2CumProd = col2")), by="boolCol")
+#'   update_by(uby_cum_prod(c("col1CumProd = col1", "col2CumProd = col2")), by = "boolCol")
 #'
 #' # compute cumulative product of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_cum_prod(c("col1CumProd = col1", "col2CumProd = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_cum_prod(c("col1CumProd = col1", "col2CumProd = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -298,6 +192,9 @@ uby_cum_prod <- function(cols = character()) {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to compute the cumulative minimum for all non-grouping columns.
@@ -308,12 +205,12 @@ uby_cum_prod <- function(cols = character()) {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -326,12 +223,12 @@ uby_cum_prod <- function(cols = character()) {
 #'
 #' # compute cumulative minimum of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_cum_min(c("col1CumMin = col1", "col2CumMin = col2")), by="boolCol")
+#'   update_by(uby_cum_min(c("col1CumMin = col1", "col2CumMin = col2")), by = "boolCol")
 #'
 #' # compute cumulative minimum of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_cum_min(c("col1CumMin = col1", "col2CumMin = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_cum_min(c("col1CumMin = col1", "col2CumMin = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -361,6 +258,9 @@ uby_cum_min <- function(cols = character()) {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to compute the cumulative maximum for all non-grouping columns.
@@ -371,12 +271,12 @@ uby_cum_min <- function(cols = character()) {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -389,12 +289,12 @@ uby_cum_min <- function(cols = character()) {
 #'
 #' # compute cumulative maximum of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_cum_max(c("col1CumMax = col1", "col2CumMax = col2")), by="boolCol")
+#'   update_by(uby_cum_max(c("col1CumMax = col1", "col2CumMax = col2")), by = "boolCol")
 #'
 #' # compute cumulative maximum of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_cum_max(c("col1CumMax = col1", "col2CumMax = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_cum_max(c("col1CumMax = col1", "col2CumMax = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -425,6 +325,9 @@ uby_cum_max <- function(cols = character()) {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to perform a forward fill on all non-grouping columns.
@@ -435,12 +338,12 @@ uby_cum_max <- function(cols = character()) {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = replace(sample(10000, size = 500, replace = TRUE), sample(500, 100), NA),
 #'   col2 = replace(sample(10000, size = 500, replace = TRUE), sample(500, 100), NA),
 #'   col3 = replace(1:500, sample(500, 100), NA)
@@ -453,13 +356,13 @@ uby_cum_max <- function(cols = character()) {
 #'
 #' # forward fill col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'  update_by(uby_forward_fill(c("col1", "col2")), by="boolCol")
+#'   update_by(uby_forward_fill(c("col1", "col2")), by = "boolCol")
 #'
 #' # forward fill col3, compute parity of col3, and forward fill col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update_by(uby_forward_fill("col3"))$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_forward_fill(c("col1", "col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_forward_fill(c("col1", "col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -497,6 +400,9 @@ uby_forward_fill <- function(cols = character()) {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to apply the delta operation to all non-grouping columns.
@@ -508,12 +414,12 @@ uby_forward_fill <- function(cols = character()) {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -526,12 +432,12 @@ uby_forward_fill <- function(cols = character()) {
 #'
 #' # compute consecutive differences of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_delta(c("col1Delta = col1", "col2Delta = col2")), by="boolCol")
+#'   update_by(uby_delta(c("col1Delta = col1", "col2Delta = col2")), by = "boolCol")
 #'
 #' # compute consecutive differences of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_delta(c("col1Delta = col1", "col2Delta = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_delta(c("col1Delta = col1", "col2Delta = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -575,6 +481,9 @@ uby_delta <- function(cols = character(), delta_control = "null_dominates") {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param decay_ticks Numeric scalar denoting the decay rate in ticks.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -587,12 +496,12 @@ uby_delta <- function(cols = character(), delta_control = "null_dominates") {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -601,16 +510,16 @@ uby_delta <- function(cols = character(), delta_control = "null_dominates") {
 #'
 #' # compute 10-row exponential moving average of col1 and col2
 #' th1 <- th$
-#'   update_by(uby_ema_tick(decay_ticks=10, cols=c("col1Ema = col1", "col2Ema = col2")))
+#'   update_by(uby_ema_tick(decay_ticks = 10, cols = c("col1Ema = col1", "col2Ema = col2")))
 #'
 #' # compute 5-row exponential moving average of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_ema_tick(decay_ticks=5, cols=c("col1Ema = col1", "col2Ema = col2")), by="boolCol")
+#'   update_by(uby_ema_tick(decay_ticks = 5, cols = c("col1Ema = col1", "col2Ema = col2")), by = "boolCol")
 #'
 #' # compute 20-row exponential moving average of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_ema_tick(decay_ticks=20, cols=c("col1Ema = col1", "col2Ema = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_ema_tick(decay_ticks = 20, cols = c("col1Ema = col1", "col2Ema = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -654,6 +563,9 @@ uby_ema_tick <- function(decay_ticks, cols = character(), operation_control = op
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param ts_col String denoting the column to use as the timestamp.
 #' @param decay_time ISO-8601-formatted duration string specifying the decay rate.
@@ -667,12 +579,12 @@ uby_ema_tick <- function(decay_ticks, cols = character(), operation_control = op
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -681,16 +593,16 @@ uby_ema_tick <- function(decay_ticks, cols = character(), operation_control = op
 #'
 #' # compute 10-second exponential moving average of col1 and col2
 #' th1 <- th$
-#'   update_by(uby_ema_time(ts_col="timeCol", decay_time="PT10s", cols=c("col1Ema = col1", "col2Ema = col2")))
+#'   update_by(uby_ema_time(ts_col = "timeCol", decay_time = "PT10s", cols = c("col1Ema = col1", "col2Ema = col2")))
 #'
 #' # compute 5-second exponential moving average of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_ema_time(ts_col="timeCol", decay_time="PT5s", cols=c("col1Ema = col1", "col2Ema = col2")), by="boolCol")
+#'   update_by(uby_ema_time(ts_col = "timeCol", decay_time = "PT5s", cols = c("col1Ema = col1", "col2Ema = col2")), by = "boolCol")
 #'
 #' # compute 20-second exponential moving average of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_ema_time(ts_col="timeCol", decay_time="PT20s", cols=c("col1Ema = col1", "col2Ema = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_ema_time(ts_col = "timeCol", decay_time = "PT20s", cols = c("col1Ema = col1", "col2Ema = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -734,6 +646,9 @@ uby_ema_time <- function(ts_col, decay_time, cols = character(), operation_contr
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param decay_ticks Numeric scalar denoting the decay rate in ticks.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -746,12 +661,12 @@ uby_ema_time <- function(ts_col, decay_time, cols = character(), operation_contr
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -760,16 +675,16 @@ uby_ema_time <- function(ts_col, decay_time, cols = character(), operation_contr
 #'
 #' # compute 10-row exponential moving sum of col1 and col2
 #' th1 <- th$
-#'   update_by(uby_ems_tick(decay_ticks=10, cols=c("col1Ems = col1", "col2Ems = col2")))
+#'   update_by(uby_ems_tick(decay_ticks = 10, cols = c("col1Ems = col1", "col2Ems = col2")))
 #'
 #' # compute 5-row exponential moving sum of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_ems_tick(decay_ticks=5, cols=c("col1Ems = col1", "col2Ems = col2")), by="boolCol")
+#'   update_by(uby_ems_tick(decay_ticks = 5, cols = c("col1Ems = col1", "col2Ems = col2")), by = "boolCol")
 #'
 #' # compute 20-row exponential moving sum of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_ems_tick(decay_ticks=20, cols=c("col1Ems = col1", "col2Ems = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_ems_tick(decay_ticks = 20, cols = c("col1Ems = col1", "col2Ems = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -813,6 +728,9 @@ uby_ems_tick <- function(decay_ticks, cols = character(), operation_control = op
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param decay_time ISO-8601-formatted duration string specifying the decay rate.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -825,12 +743,12 @@ uby_ems_tick <- function(decay_ticks, cols = character(), operation_control = op
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -839,16 +757,16 @@ uby_ems_tick <- function(decay_ticks, cols = character(), operation_control = op
 #'
 #' # compute 10-second exponential moving sum of col1 and col2
 #' th1 <- th$
-#'   update_by(uby_ems_time(ts_col="timeCol", decay_time="PT10s", cols=c("col1Ems = col1", "col2Ems = col2")))
+#'   update_by(uby_ems_time(ts_col = "timeCol", decay_time = "PT10s", cols = c("col1Ems = col1", "col2Ems = col2")))
 #'
 #' # compute 5-second exponential moving sum of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_ems_time(ts_col="timeCol", decay_time="PT5s", cols=c("col1Ems = col1", "col2Ems = col2")), by="boolCol")
+#'   update_by(uby_ems_time(ts_col = "timeCol", decay_time = "PT5s", cols = c("col1Ems = col1", "col2Ems = col2")), by = "boolCol")
 #'
 #' # compute 20-second exponential moving sum of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_ems_time(ts_col="timeCol", decay_time="PT20s", cols=c("col1Ems = col1", "col2Ems = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_ems_time(ts_col = "timeCol", decay_time = "PT20s", cols = c("col1Ems = col1", "col2Ems = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -892,6 +810,9 @@ uby_ems_time <- function(ts_col, decay_time, cols = character(), operation_contr
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param decay_ticks Numeric scalar denoting the decay rate in ticks.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -904,12 +825,12 @@ uby_ems_time <- function(ts_col, decay_time, cols = character(), operation_contr
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -918,16 +839,16 @@ uby_ems_time <- function(ts_col, decay_time, cols = character(), operation_contr
 #'
 #' # compute 10-row exponential moving minimum of col1 and col2
 #' th1 <- th$
-#'   update_by(uby_emmin_tick(decay_ticks=10, cols=c("col1Emmin = col1", "col2Emmin = col2")))
+#'   update_by(uby_emmin_tick(decay_ticks = 10, cols = c("col1Emmin = col1", "col2Emmin = col2")))
 #'
 #' # compute 5-row exponential moving minimum of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_emmin_tick(decay_ticks=5, cols=c("col1Emmin = col1", "col2Emmin = col2")), by="boolCol")
+#'   update_by(uby_emmin_tick(decay_ticks = 5, cols = c("col1Emmin = col1", "col2Emmin = col2")), by = "boolCol")
 #'
 #' # compute 20-row exponential moving minimum of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_emmin_tick(decay_ticks=20, cols=c("col1Emmin = col1", "col2Emmin = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_emmin_tick(decay_ticks = 20, cols = c("col1Emmin = col1", "col2Emmin = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -971,6 +892,9 @@ uby_emmin_tick <- function(decay_ticks, cols = character(), operation_control =
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param decay_time ISO-8601-formatted duration string specifying the decay rate.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -983,12 +907,12 @@ uby_emmin_tick <- function(decay_ticks, cols = character(), operation_control =
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -997,16 +921,16 @@ uby_emmin_tick <- function(decay_ticks, cols = character(), operation_control =
 #'
 #' # compute 10-second exponential moving minimum of col1 and col2
 #' th1 <- th$
-#'   update_by(uby_emmin_time(ts_col="timeCol", decay_time="PT10s", cols=c("col1Emmin = col1", "col2Emmin = col2")))
+#'   update_by(uby_emmin_time(ts_col = "timeCol", decay_time = "PT10s", cols = c("col1Emmin = col1", "col2Emmin = col2")))
 #'
 #' # compute 5-second exponential moving minimum of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_emmin_time(ts_col="timeCol", decay_time="PT5s", cols=c("col1Emmin = col1", "col2Emmin = col2")), by="boolCol")
+#'   update_by(uby_emmin_time(ts_col = "timeCol", decay_time = "PT5s", cols = c("col1Emmin = col1", "col2Emmin = col2")), by = "boolCol")
 #'
 #' # compute 20-second exponential moving minimum of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_emmin_time(ts_col="timeCol", decay_time="PT20s", cols=c("col1Emmin = col1", "col2Emmin = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_emmin_time(ts_col = "timeCol", decay_time = "PT20s", cols = c("col1Emmin = col1", "col2Emmin = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -1050,6 +974,9 @@ uby_emmin_time <- function(ts_col, decay_time, cols = character(), operation_con
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param decay_ticks Numeric scalar denoting the decay rate in ticks.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -1062,12 +989,12 @@ uby_emmin_time <- function(ts_col, decay_time, cols = character(), operation_con
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -1076,16 +1003,16 @@ uby_emmin_time <- function(ts_col, decay_time, cols = character(), operation_con
 #'
 #' # compute 10-row exponential moving maximum of col1 and col2
 #' th1 <- th$
-#'   update_by(uby_emmax_tick(decay_ticks=10, cols=c("col1Emmax = col1", "col2Emmax = col2")))
+#'   update_by(uby_emmax_tick(decay_ticks = 10, cols = c("col1Emmax = col1", "col2Emmax = col2")))
 #'
 #' # compute 5-row exponential moving maximum of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_emmax_tick(decay_ticks=5, cols=c("col1Emmax = col1", "col2Emmax = col2")), by="boolCol")
+#'   update_by(uby_emmax_tick(decay_ticks = 5, cols = c("col1Emmax = col1", "col2Emmax = col2")), by = "boolCol")
 #'
 #' # compute 20-row exponential moving maximum of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_emmax_tick(decay_ticks=20, cols=c("col1Emmax = col1", "col2Emmax = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_emmax_tick(decay_ticks = 20, cols = c("col1Emmax = col1", "col2Emmax = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -1129,6 +1056,9 @@ uby_emmax_tick <- function(decay_ticks, cols = character(), operation_control =
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param decay_time ISO-8601-formatted duration string specifying the decay rate.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -1141,12 +1071,12 @@ uby_emmax_tick <- function(decay_ticks, cols = character(), operation_control =
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -1155,16 +1085,16 @@ uby_emmax_tick <- function(decay_ticks, cols = character(), operation_control =
 #'
 #' # compute 10-second exponential moving maximum of col1 and col2
 #' th1 <- th$
-#'   update_by(uby_emmax_time(ts_col="timeCol", decay_time="PT10s", cols=c("col1Emmax = col1", "col2Emmax = col2")))
+#'   update_by(uby_emmax_time(ts_col = "timeCol", decay_time = "PT10s", cols = c("col1Emmax = col1", "col2Emmax = col2")))
 #'
 #' # compute 5-second exponential moving maximum of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_emmax_time(ts_col="timeCol", decay_time="PT5s", cols=c("col1Emmax = col1", "col2Emmax = col2")), by="boolCol")
+#'   update_by(uby_emmax_time(ts_col = "timeCol", decay_time = "PT5s", cols = c("col1Emmax = col1", "col2Emmax = col2")), by = "boolCol")
 #'
 #' # compute 20-second exponential moving maximum of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_emmax_time(ts_col="timeCol", decay_time="PT20s", cols=c("col1Emmax = col1", "col2Emmax = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_emmax_time(ts_col = "timeCol", decay_time = "PT20s", cols = c("col1Emmax = col1", "col2Emmax = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -1213,6 +1143,9 @@ uby_emmax_time <- function(ts_col, decay_time, cols = character(), operation_con
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param decay_ticks Numeric scalar denoting the decay rate in ticks.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -1225,12 +1158,12 @@ uby_emmax_time <- function(ts_col, decay_time, cols = character(), operation_con
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -1239,16 +1172,16 @@ uby_emmax_time <- function(ts_col, decay_time, cols = character(), operation_con
 #'
 #' # compute 10-row exponential moving standard deviation of col1 and col2
 #' th1 <- th$
-#'   update_by(uby_emstd_tick(decay_ticks=10, cols=c("col1Emstd = col1", "col2Emstd = col2")))
+#'   update_by(uby_emstd_tick(decay_ticks = 10, cols = c("col1Emstd = col1", "col2Emstd = col2")))
 #'
 #' # compute 5-row exponential moving standard deviation of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_emstd_tick(decay_ticks=5, cols=c("col1Emstd = col1", "col2Emstd = col2")), by="boolCol")
+#'   update_by(uby_emstd_tick(decay_ticks = 5, cols = c("col1Emstd = col1", "col2Emstd = col2")), by = "boolCol")
 #'
 #' # compute 20-row exponential moving standard deviation of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_emstd_tick(decay_ticks=20, cols=c("col1Emstd = col1", "col2Emstd = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_emstd_tick(decay_ticks = 20, cols = c("col1Emstd = col1", "col2Emstd = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -1297,6 +1230,9 @@ uby_emstd_tick <- function(decay_ticks, cols = character(), operation_control =
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param decay_time ISO-8601-formatted duration string specifying the decay rate.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -1309,12 +1245,12 @@ uby_emstd_tick <- function(decay_ticks, cols = character(), operation_control =
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -1323,16 +1259,16 @@ uby_emstd_tick <- function(decay_ticks, cols = character(), operation_control =
 #'
 #' # compute 10-second exponential moving standard deviation of col1 and col2
 #' th1 <- th$
-#'   update_by(uby_emstd_time(ts_col="timeCol", decay_time="PT10s", cols=c("col1Emstd = col1", "col2Emstd = col2")))
+#'   update_by(uby_emstd_time(ts_col = "timeCol", decay_time = "PT10s", cols = c("col1Emstd = col1", "col2Emstd = col2")))
 #'
 #' # compute 5-second exponential moving standard deviation of col1 and col2, grouped by boolCol
 #' th2 <- th$
-#'   update_by(uby_emstd_time(ts_col="timeCol", decay_time="PT5s", cols=c("col1Emstd = col1", "col2Emstd = col2")), by="boolCol")
+#'   update_by(uby_emstd_time(ts_col = "timeCol", decay_time = "PT5s", cols = c("col1Emstd = col1", "col2Emstd = col2")), by = "boolCol")
 #'
 #' # compute 20-second exponential moving standard deviation of col1 and col2, grouped by boolCol and parity of col3
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_emstd_time(ts_col="timeCol", decay_time="PT20s", cols=c("col1Emstd = col1", "col2Emstd = col2")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_emstd_time(ts_col = "timeCol", decay_time = "PT20s", cols = c("col1Emstd = col1", "col2Emstd = col2")), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -1380,6 +1316,9 @@ uby_emstd_time <- function(ts_col, decay_time, cols = character(), operation_con
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to compute the rolling sum for all non-grouping columns.
@@ -1392,12 +1331,12 @@ uby_emstd_time <- function(ts_col, decay_time, cols = character(), operation_con
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -1406,16 +1345,16 @@ uby_emstd_time <- function(ts_col, decay_time, cols = character(), operation_con
 #'
 #' # compute rolling sum of col1 and col2, using the previous 5 rows and current row
 #' th1 <- th$
-#'   update_by(uby_rolling_sum_tick(cols=c("col1RollSum = col1", "col2RollSum = col2"), rev_ticks=6))
+#'   update_by(uby_rolling_sum_tick(cols = c("col1RollSum = col1", "col2RollSum = col2"), rev_ticks = 6))
 #'
 #' # compute rolling sum of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 #' th2 <- th$
-#'   update_by(uby_rolling_sum_tick(cols=c("col1RollSum = col1", "col2RollSum = col2"), rev_ticks=6, fwd_ticks=5)), by="boolCol")
+#'   update_by(uby_rolling_sum_tick(cols = c("col1RollSum = col1", "col2RollSum = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 #'
 #' # compute rolling sum of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_sum_tick(cols=c("col1RollSum = col1", "col2RollSum = col2"), rev_ticks=1, fwd_ticks=10)), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_sum_tick(cols = c("col1RollSum = col1", "col2RollSum = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -1459,6 +1398,9 @@ uby_rolling_sum_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param ts_col String denoting the column to use as the timestamp.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -1472,12 +1414,12 @@ uby_rolling_sum_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -1486,16 +1428,16 @@ uby_rolling_sum_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #'
 #' # compute rolling sum of col1 and col2, using the previous 5 seconds
 #' th1 <- th$
-#'   update_by(uby_rolling_sum_time(ts_col="timeCol", cols=c("col1RollSum = col1", "col2RollSum = col2"), rev_time="PT5s"))
+#'   update_by(uby_rolling_sum_time(ts_col = "timeCol", cols = c("col1RollSum = col1", "col2RollSum = col2"), rev_time = "PT5s"))
 #'
 #' # compute rolling sum of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 #' th2 <- th$
-#'   update_by(uby_rolling_sum_time(ts_col="timeCol", cols=c("col1RollSum = col1", "col2RollSum = col2"), rev_time="PT5s", fwd_ticks="PT5s")), by="boolCol")
+#'   update_by(uby_rolling_sum_time(ts_col = "timeCol", cols = c("col1RollSum = col1", "col2RollSum = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 #'
 #' # compute rolling sum of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_sum_time(ts_col="timeCol", cols=c("col1RollSum = col1", "col2RollSum = col2"), rev_time="PT0s", fwd_time="PT10s")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_sum_time(ts_col = "timeCol", cols = c("col1RollSum = col1", "col2RollSum = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -1543,6 +1485,9 @@ uby_rolling_sum_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to compute the rolling group for all non-grouping columns.
@@ -1555,12 +1500,12 @@ uby_rolling_sum_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -1569,12 +1514,12 @@ uby_rolling_sum_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #'
 #' # compute rolling group of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 #' th1 <- th$
-#'   update_by(uby_rolling_group_tick(cols=c("col1RollGroup = col1", "col2RollGroup = col2"), rev_ticks=6, fwd_ticks=5)), by="boolCol")
+#'   update_by(uby_rolling_group_tick(cols = c("col1RollGroup = col1", "col2RollGroup = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 #'
 #' # compute rolling group of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 #' th2 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_group_tick(cols=c("col1RollGroup = col1", "col2RollGroup = col2"), rev_ticks=1, fwd_ticks=10)), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_group_tick(cols = c("col1RollGroup = col1", "col2RollGroup = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -1618,6 +1563,9 @@ uby_rolling_group_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param ts_col String denoting the column to use as the timestamp.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -1631,12 +1579,12 @@ uby_rolling_group_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -1645,12 +1593,12 @@ uby_rolling_group_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #'
 #' # compute rolling group of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 #' th1 <- th$
-#'   update_by(uby_rolling_group_time(ts_col="timeCol", cols=c("col1RollGroup = col1", "col2RollGroup = col2"), rev_time="PT5s", fwd_ticks="PT5s")), by="boolCol")
+#'   update_by(uby_rolling_group_time(ts_col = "timeCol", cols = c("col1RollGroup = col1", "col2RollGroup = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 #'
 #' # compute rolling group of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 #' th2 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_group_time(ts_col="timeCol", cols=c("col1RollGroup = col1", "col2RollGroup = col2"), rev_time="PT0s", fwd_time="PT10s")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_group_time(ts_col = "timeCol", cols = c("col1RollGroup = col1", "col2RollGroup = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -1698,6 +1646,9 @@ uby_rolling_group_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to compute the rolling average for all non-grouping columns.
@@ -1710,12 +1661,12 @@ uby_rolling_group_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -1724,16 +1675,16 @@ uby_rolling_group_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #'
 #' # compute rolling average of col1 and col2, using the previous 5 rows and current row
 #' th1 <- th$
-#'   update_by(uby_rolling_avg_tick(cols=c("col1RollAvg = col1", "col2RollAvg = col2"), rev_ticks=6))
+#'   update_by(uby_rolling_avg_tick(cols = c("col1RollAvg = col1", "col2RollAvg = col2"), rev_ticks = 6))
 #'
 #' # compute rolling average of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 #' th2 <- th$
-#'   update_by(uby_rolling_avg_tick(cols=c("col1RollAvg = col1", "col2RollAvg = col2"), rev_ticks=6, fwd_ticks=5)), by="boolCol")
+#'   update_by(uby_rolling_avg_tick(cols = c("col1RollAvg = col1", "col2RollAvg = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 #'
 #' # compute rolling average of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_avg_tick(cols=c("col1RollAvg = col1", "col2RollAvg = col2"), rev_ticks=1, fwd_ticks=10)), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_avg_tick(cols = c("col1RollAvg = col1", "col2RollAvg = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -1777,6 +1728,9 @@ uby_rolling_avg_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param ts_col String denoting the column to use as the timestamp.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -1790,12 +1744,12 @@ uby_rolling_avg_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -1804,16 +1758,16 @@ uby_rolling_avg_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #'
 #' # compute rolling average of col1 and col2, using the previous 5 seconds
 #' th1 <- th$
-#'   update_by(uby_rolling_avg_time(ts_col="timeCol", cols=c("col1RollAvg = col1", "col2RollAvg = col2"), rev_time="PT5s"))
+#'   update_by(uby_rolling_avg_time(ts_col = "timeCol", cols = c("col1RollAvg = col1", "col2RollAvg = col2"), rev_time = "PT5s"))
 #'
 #' # compute rolling average of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 #' th2 <- th$
-#'   update_by(uby_rolling_avg_time(ts_col="timeCol", cols=c("col1RollAvg = col1", "col2RollAvg = col2"), rev_time="PT5s", fwd_ticks="PT5s")), by="boolCol")
+#'   update_by(uby_rolling_avg_time(ts_col = "timeCol", cols = c("col1RollAvg = col1", "col2RollAvg = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 #'
 #' # compute rolling average of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_avg_time(ts_col="timeCol", cols=c("col1RollAvg = col1", "col2RollAvg = col2"), rev_time="PT0s", fwd_time="PT10s")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_avg_time(ts_col = "timeCol", cols = c("col1RollAvg = col1", "col2RollAvg = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -1861,6 +1815,9 @@ uby_rolling_avg_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to compute the rolling minimum for all non-grouping columns.
@@ -1873,12 +1830,12 @@ uby_rolling_avg_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -1887,16 +1844,16 @@ uby_rolling_avg_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #'
 #' # compute rolling minimum of col1 and col2, using the previous 5 rows and current row
 #' th1 <- th$
-#'   update_by(uby_rolling_min_tick(cols=c("col1RollMin = col1", "col2RollMin = col2"), rev_ticks=6))
+#'   update_by(uby_rolling_min_tick(cols = c("col1RollMin = col1", "col2RollMin = col2"), rev_ticks = 6))
 #'
 #' # compute rolling minimum of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 #' th2 <- th$
-#'   update_by(uby_rolling_min_tick(cols=c("col1RollMin = col1", "col2RollMin = col2"), rev_ticks=6, fwd_ticks=5)), by="boolCol")
+#'   update_by(uby_rolling_min_tick(cols = c("col1RollMin = col1", "col2RollMin = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 #'
 #' # compute rolling minimum of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_min_tick(cols=c("col1RollMin = col1", "col2RollMin = col2"), rev_ticks=1, fwd_ticks=10)), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_min_tick(cols = c("col1RollMin = col1", "col2RollMin = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -1940,6 +1897,9 @@ uby_rolling_min_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param ts_col String denoting the column to use as the timestamp.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -1953,12 +1913,12 @@ uby_rolling_min_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -1967,16 +1927,16 @@ uby_rolling_min_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #'
 #' # compute rolling minimum of col1 and col2, using the previous 5 seconds
 #' th1 <- th$
-#'   update_by(uby_rolling_min_time(ts_col="timeCol", cols=c("col1RollMin = col1", "col2RollMin = col2"), rev_time="PT5s"))
+#'   update_by(uby_rolling_min_time(ts_col = "timeCol", cols = c("col1RollMin = col1", "col2RollMin = col2"), rev_time = "PT5s"))
 #'
 #' # compute rolling minimum of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 #' th2 <- th$
-#'   update_by(uby_rolling_min_time(ts_col="timeCol", cols=c("col1RollMin = col1", "col2RollMin = col2"), rev_time="PT5s", fwd_ticks="PT5s")), by="boolCol")
+#'   update_by(uby_rolling_min_time(ts_col = "timeCol", cols = c("col1RollMin = col1", "col2RollMin = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 #'
 #' # compute rolling minimum of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_min_time(ts_col="timeCol", cols=c("col1RollMin = col1", "col2RollMin = col2"), rev_time="PT0s", fwd_time="PT10s")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_min_time(ts_col = "timeCol", cols = c("col1RollMin = col1", "col2RollMin = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -2024,6 +1984,9 @@ uby_rolling_min_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to compute the rolling maximum for all non-grouping columns.
@@ -2036,12 +1999,12 @@ uby_rolling_min_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -2050,16 +2013,16 @@ uby_rolling_min_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #'
 #' # compute rolling maximum of col1 and col2, using the previous 5 rows and current row
 #' th1 <- th$
-#'   update_by(uby_rolling_max_tick(cols=c("col1RollMax = col1", "col2RollMax = col2"), rev_ticks=6))
+#'   update_by(uby_rolling_max_tick(cols = c("col1RollMax = col1", "col2RollMax = col2"), rev_ticks = 6))
 #'
 #' # compute rolling maximum of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 #' th2 <- th$
-#'   update_by(uby_rolling_max_tick(cols=c("col1RollMax = col1", "col2RollMax = col2"), rev_ticks=6, fwd_ticks=5)), by="boolCol")
+#'   update_by(uby_rolling_max_tick(cols = c("col1RollMax = col1", "col2RollMax = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 #'
 #' # compute rolling maximum of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_max_tick(cols=c("col1RollMax = col1", "col2RollMax = col2"), rev_ticks=1, fwd_ticks=10)), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_max_tick(cols = c("col1RollMax = col1", "col2RollMax = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -2103,6 +2066,9 @@ uby_rolling_max_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param ts_col String denoting the column to use as the timestamp.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -2116,12 +2082,12 @@ uby_rolling_max_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -2130,16 +2096,16 @@ uby_rolling_max_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #'
 #' # compute rolling maximum of col1 and col2, using the previous 5 seconds
 #' th1 <- th$
-#'   update_by(uby_rolling_max_time(ts_col="timeCol", cols=c("col1RollMax = col1", "col2RollMax = col2"), rev_time="PT5s"))
+#'   update_by(uby_rolling_max_time(ts_col = "timeCol", cols = c("col1RollMax = col1", "col2RollMax = col2"), rev_time = "PT5s"))
 #'
 #' # compute rolling maximum of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 #' th2 <- th$
-#'   update_by(uby_rolling_max_time(ts_col="timeCol", cols=c("col1RollMax = col1", "col2RollMax = col2"), rev_time="PT5s", fwd_ticks="PT5s")), by="boolCol")
+#'   update_by(uby_rolling_max_time(ts_col = "timeCol", cols = c("col1RollMax = col1", "col2RollMax = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 #'
 #' # compute rolling maximum of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_max_time(ts_col="timeCol", cols=c("col1RollMax = col1", "col2RollMax = col2"), rev_time="PT0s", fwd_time="PT10s")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_max_time(ts_col = "timeCol", cols = c("col1RollMax = col1", "col2RollMax = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -2187,6 +2153,9 @@ uby_rolling_max_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to compute the rolling product for all non-grouping columns.
@@ -2199,12 +2168,12 @@ uby_rolling_max_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -2213,16 +2182,16 @@ uby_rolling_max_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #'
 #' # compute rolling product of col1 and col2, using the previous 5 rows and current row
 #' th1 <- th$
-#'   update_by(uby_rolling_prod_tick(cols=c("col1RollProd = col1", "col2RollProd = col2"), rev_ticks=6))
+#'   update_by(uby_rolling_prod_tick(cols = c("col1RollProd = col1", "col2RollProd = col2"), rev_ticks = 6))
 #'
 #' # compute rolling product of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 #' th2 <- th$
-#'   update_by(uby_rolling_prod_tick(cols=c("col1RollProd = col1", "col2RollProd = col2"), rev_ticks=6, fwd_ticks=5)), by="boolCol")
+#'   update_by(uby_rolling_prod_tick(cols = c("col1RollProd = col1", "col2RollProd = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 #'
 #' # compute rolling product of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_prod_tick(cols=c("col1RollProd = col1", "col2RollProd = col2"), rev_ticks=1, fwd_ticks=10)), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_prod_tick(cols = c("col1RollProd = col1", "col2RollProd = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -2266,6 +2235,9 @@ uby_rolling_prod_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param ts_col String denoting the column to use as the timestamp.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -2279,12 +2251,12 @@ uby_rolling_prod_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -2293,16 +2265,16 @@ uby_rolling_prod_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #'
 #' # compute rolling product of col1 and col2, using the previous 5 seconds
 #' th1 <- th$
-#'   update_by(uby_rolling_prod_time(ts_col="timeCol", cols=c("col1RollProd = col1", "col2RollProd = col2"), rev_time="PT5s"))
+#'   update_by(uby_rolling_prod_time(ts_col = "timeCol", cols = c("col1RollProd = col1", "col2RollProd = col2"), rev_time = "PT5s"))
 #'
 #' # compute rolling product of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 #' th2 <- th$
-#'   update_by(uby_rolling_prod_time(ts_col="timeCol", cols=c("col1RollProd = col1", "col2RollProd = col2"), rev_time="PT5s", fwd_ticks="PT5s")), by="boolCol")
+#'   update_by(uby_rolling_prod_time(ts_col = "timeCol", cols = c("col1RollProd = col1", "col2RollProd = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 #'
 #' # compute rolling product of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_prod_time(ts_col="timeCol", cols=c("col1RollProd = col1", "col2RollProd = col2"), rev_time="PT0s", fwd_time="PT10s")), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_prod_time(ts_col = "timeCol", cols = c("col1RollProd = col1", "col2RollProd = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -2350,6 +2322,9 @@ uby_rolling_prod_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to compute the rolling count for all non-grouping columns.
@@ -2362,12 +2337,12 @@ uby_rolling_prod_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -2376,16 +2351,16 @@ uby_rolling_prod_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #'
 #' # compute rolling count of col1 and col2, using the previous 5 rows and current row
 #' th1 <- th$
-#'   update_by(uby_rolling_count_tick(cols=c("col1RollCount = col1", "col2RollCount = col2"), rev_ticks=6))
+#'   update_by(uby_rolling_count_tick(cols = c("col1RollCount = col1", "col2RollCount = col2"), rev_ticks = 6))
 #'
 #' # compute rolling count of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 #' th2 <- th$
-#'   update_by(uby_rolling_count_tick(cols=c("col1RollCount = col1", "col2RollCount = col2"), rev_ticks=6, fwd_ticks=5), by="boolCol")
+#'   update_by(uby_rolling_count_tick(cols = c("col1RollCount = col1", "col2RollCount = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 #'
 #' # compute rolling count of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_count_tick(cols=c("col1RollCount = col1", "col2RollCount = col2"), rev_ticks=1, fwd_ticks=10), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_count_tick(cols = c("col1RollCount = col1", "col2RollCount = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -2429,6 +2404,9 @@ uby_rolling_count_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param ts_col String denoting the column to use as the timestamp.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -2442,12 +2420,12 @@ uby_rolling_count_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -2456,16 +2434,16 @@ uby_rolling_count_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #'
 #' # compute rolling count of col1 and col2, using the previous 5 seconds
 #' th1 <- th$
-#'   update_by(uby_rolling_count_time(ts_col="timeCol", cols=c("col1RollCount = col1", "col2RollCount = col2"), rev_time="PT5s"))
+#'   update_by(uby_rolling_count_time(ts_col = "timeCol", cols = c("col1RollCount = col1", "col2RollCount = col2"), rev_time = "PT5s"))
 #'
 #' # compute rolling count of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 #' th2 <- th$
-#'   update_by(uby_rolling_count_time(ts_col="timeCol", cols=c("col1RollCount = col1", "col2RollCount = col2"), rev_time="PT5s", fwd_ticks="PT5s"), by="boolCol")
+#'   update_by(uby_rolling_count_time(ts_col = "timeCol", cols = c("col1RollCount = col1", "col2RollCount = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 #'
 #' # compute rolling count of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_count_time(ts_col="timeCol", cols=c("col1RollCount = col1", "col2RollCount = col2"), rev_time="PT0s", fwd_time="PT10s"), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_count_time(ts_col = "timeCol", cols = c("col1RollCount = col1", "col2RollCount = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -2513,6 +2491,9 @@ uby_rolling_count_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
 #' Default is to compute the rolling standard deviation for all non-grouping columns.
@@ -2525,12 +2506,12 @@ uby_rolling_count_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -2539,16 +2520,16 @@ uby_rolling_count_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #'
 #' # compute rolling standard deviation of col1 and col2, using the previous 5 rows and current row
 #' th1 <- th$
-#'   update_by(uby_rolling_std_tick(cols=c("col1RollStd = col1", "col2RollStd = col2"), rev_ticks=6))
+#'   update_by(uby_rolling_std_tick(cols = c("col1RollStd = col1", "col2RollStd = col2"), rev_ticks = 6))
 #'
 #' # compute rolling standard deviation of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 #' th2 <- th$
-#'   update_by(uby_rolling_std_tick(cols=c("col1RollStd = col1", "col2RollStd = col2"), rev_ticks=6, fwd_ticks=5), by="boolCol")
+#'   update_by(uby_rolling_std_tick(cols = c("col1RollStd = col1", "col2RollStd = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 #'
 #' # compute rolling standard deviation of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_std_tick(cols=c("col1RollStd = col1", "col2RollStd = col2"), rev_ticks=1, fwd_ticks=10), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_std_tick(cols = c("col1RollStd = col1", "col2RollStd = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -2592,6 +2573,9 @@ uby_rolling_std_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param ts_col String denoting the column to use as the timestamp.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -2605,12 +2589,12 @@ uby_rolling_std_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -2619,16 +2603,16 @@ uby_rolling_std_tick <- function(cols, rev_ticks, fwd_ticks = 0) {
 #'
 #' # compute rolling standard deviation of col1 and col2, using the previous 5 seconds
 #' th1 <- th$
-#'   update_by(uby_rolling_std_time(ts_col="timeCol", cols=c("col1RollStd = col1", "col2RollStd = col2"), rev_time="PT5s"))
+#'   update_by(uby_rolling_std_time(ts_col = "timeCol", cols = c("col1RollStd = col1", "col2RollStd = col2"), rev_time = "PT5s"))
 #'
 #' # compute rolling standard deviation of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 #' th2 <- th$
-#'   update_by(uby_rolling_std_time(ts_col="timeCol", cols=c("col1RollStd = col1", "col2RollStd = col2"), rev_time="PT5s", fwd_ticks="PT5s"), by="boolCol")
+#'   update_by(uby_rolling_std_time(ts_col = "timeCol", cols = c("col1RollStd = col1", "col2RollStd = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 #'
 #' # compute rolling standard deviation of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_std_time(ts_col="timeCol", cols=c("col1RollStd = col1", "col2RollStd = col2"), rev_time="PT0s", fwd_time="PT10s"), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_std_time(ts_col = "timeCol", cols = c("col1RollStd = col1", "col2RollStd = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -2676,6 +2660,9 @@ uby_rolling_std_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param wcol String denoting the column to use for weights. This must be a numeric column.
 #' @param cols String or list of strings denoting the column(s) to operate on. Can be renaming expressions, i.e. “new_col = col”.
@@ -2689,12 +2676,12 @@ uby_rolling_std_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -2703,16 +2690,16 @@ uby_rolling_std_time <- function(ts_col, cols, rev_time, fwd_time = "PT0s") {
 #'
 #' # compute rolling weighted average of col1 and col2, weighted by col3, using the previous 5 rows and current row
 #' th1 <- th$
-#'   update_by(uby_rolling_wavg_tick(wcol="col3", cols=c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_ticks=6))
+#'   update_by(uby_rolling_wavg_tick(wcol = "col3", cols = c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_ticks = 6))
 #'
 #' # compute rolling weighted average of col1 and col2, weighted by col3, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 #' th2 <- th$
-#'   update_by(uby_rolling_wavg_tick(wcol="col3", cols=c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_ticks=6, fwd_ticks=5), by="boolCol")
+#'   update_by(uby_rolling_wavg_tick(wcol = "col3", cols = c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 #'
 #' # compute rolling weighted average of col1 and col2, weighted by col3, grouped by boolCol and parity of col3, using current row and following 10 rows
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_wavg_tick(wcol="col3", cols=c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_ticks=1, fwd_ticks=10), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_wavg_tick(wcol = "col3", cols = c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
@@ -2757,6 +2744,9 @@ uby_rolling_wavg_tick <- function(wcol, cols, rev_ticks, fwd_ticks = 0) {
 #' function called an [`UpdateByOp`][UpdateByOp] intended to be used in a call to `update_by()`. This detail is typically
 #' hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 #' a `uby` function can otherwise seem unexpected.
+#' 
+#' For more information, see the vignette on `uby` functions by running
+#' `vignette("update_by")`.
 #'
 #' @param ts_col String denoting the column to use as the timestamp.
 #' @param wcol String denoting the column to use for weights. This must be a numeric column.
@@ -2771,12 +2761,12 @@ uby_rolling_wavg_tick <- function(wcol, cols, rev_ticks, fwd_ticks = 0) {
 #' library(rdeephaven)
 #'
 #' # connecting to Deephaven server
-#' client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+#' client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 #'
 #' # create data frame, push to server, retrieve TableHandle
 #' df <- data.frame(
 #'   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-#'   boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+#'   boolCol = sample(c(TRUE, FALSE), 500, TRUE),
 #'   col1 = sample(10000, size = 500, replace = TRUE),
 #'   col2 = sample(10000, size = 500, replace = TRUE),
 #'   col3 = 1:500
@@ -2785,16 +2775,16 @@ uby_rolling_wavg_tick <- function(wcol, cols, rev_ticks, fwd_ticks = 0) {
 #'
 #' # compute rolling weighted average of col1 and col2, weighted by col3, using the previous 5 seconds
 #' th1 <- th$
-#'   update_by(uby_rolling_wavg_time(ts_col="timeCol", wcol="col3", cols=c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_time="PT5s"))
+#'   update_by(uby_rolling_wavg_time(ts_col = "timeCol", wcol = "col3", cols = c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_time = "PT5s"))
 #'
 #' # compute rolling weighted average of col1 and col2, weighted by col3, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 #' th2 <- th$
-#'   update_by(uby_rolling_wavg_time(ts_col="timeCol", wcol="col3", cols=c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_time="PT5s", fwd_ticks="PT5s"), by="boolCol")
+#'   update_by(uby_rolling_wavg_time(ts_col = "timeCol", wcol = "col3", cols = c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 #'
 #' # compute rolling weighted average of col1 and col2, weighted by col3, grouped by boolCol and parity of col3, using following 10 seconds
 #' th3 <- th$
 #'   update("col3Parity = col3 % 2")$
-#'   update_by(uby_rolling_wavg_time(ts_col="timeCol", wcol="col3", cols=c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_time="PT0s", fwd_time="PT10s"), by=c("boolCol", "col3Parity"))
+#'   update_by(uby_rolling_wavg_time(ts_col = "timeCol", wcol = "col3", cols = c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 #'
 #' client$close()
 #' }
diff --git a/R/rdeephaven/R/utility_functions.R b/R/rdeephaven/R/utility_functions.R
index 579fd85b964..54226772ee3 100644
--- a/R/rdeephaven/R/utility_functions.R
+++ b/R/rdeephaven/R/utility_functions.R
@@ -3,29 +3,28 @@ first_class <- function(arg) {
 }
 
 vector_wrong_element_type_or_value <- function(arg_name, candidate, type_message, descriptor_message) {
-  stripped_type_message = sub(".*? ", "", type_message)
+  stripped_type_message <- sub(".*? ", "", type_message)
   return(paste0("'", arg_name, "' must be ", type_message, ", or a vector of ", stripped_type_message, "s", descriptor_message, ". Got a vector with at least one element that is not ", type_message, descriptor_message, "."))
 }
 vector_wrong_type <- function(arg_name, candidate, type_message, descriptor_message) {
-  stripped_type_message = sub(".*? ", "", type_message)
+  stripped_type_message <- sub(".*? ", "", type_message)
   return(paste0("'", arg_name, "' must be ", type_message, " or a vector of ", stripped_type_message, "s", descriptor_message, ". Got an object of class ", first_class(candidate), "."))
 }
 vector_needed_scalar <- function(arg_name, candidate, type_message, descriptor_message) {
-  stripped_type_message = sub(".*? ", "", type_message)
+  stripped_type_message <- sub(".*? ", "", type_message)
   return(paste0("'", arg_name, "' must be a single ", stripped_type_message, descriptor_message, ". Got a vector of length ", length(candidate), "."))
 }
 scalar_wrong_type <- function(arg_name, candidate, type_message, descriptor_message) {
-  stripped_type_message = sub(".*? ", "", type_message)
+  stripped_type_message <- sub(".*? ", "", type_message)
   return(paste0("'", arg_name, "' must be a single ", stripped_type_message, descriptor_message, ". Got an object of class ", first_class(candidate), "."))
 }
 scalar_wrong_value <- function(arg_name, candidate, type_message, descriptor_message) {
-  stripped_type_message = sub(".*? ", "", type_message)
+  stripped_type_message <- sub(".*? ", "", type_message)
   return(paste0("'", arg_name, "' must be a single ", stripped_type_message, descriptor_message, ". Got '", arg_name, "' = ", candidate, "."))
 }
 
 # if required_type is a list, this will not behave correctly because of R's type coercion rules
 verify_type <- function(arg_name, candidate, is_scalar, required_type, type_message, descriptor_message = "") {
-
   if (!is_scalar && (first_class(candidate) == "list")) {
     if (any(lapply(candidate, first_class) != required_type)) {
       stop(vector_wrong_element_type_or_value(arg_name, candidate, type_message, descriptor_message))
@@ -49,7 +48,6 @@ verify_type <- function(arg_name, candidate, is_scalar, required_type, type_mess
 
 # does not attempt to verify that candidate is numeric, intended to be used after `verify_type()`
 verify_int <- function(arg_name, candidate, is_scalar, type_message, descriptor_message = "") {
-
   if (is_scalar && (length(c(candidate)) != 1)) {
     stop(vector_needed_scalar(arg_name, candidate, type_message, descriptor_message))
   } else if (candidate != as.integer(candidate)) {
@@ -63,12 +61,10 @@ verify_int <- function(arg_name, candidate, is_scalar, type_message, descriptor_
 
 # does not attempt to verify that candidate is numeric, intended to be used after `verify_type()`
 verify_in_range <- function(arg_name, candidate, is_scalar, type_message, descriptor_message, lb, ub, lb_open, ub_open) {
-
   if (is_scalar && (length(c(candidate)) != 1)) {
-    stripped_type_message = sub(".*? ", "", type_message)
+    stripped_type_message <- sub(".*? ", "", type_message)
     stop(paste0("Every element of '", arg_name, "' must be ", stripped_type_message, range_message, ". Got at least one element that is not ", stripped_type_message, range_message, "."))
-  }
-  else if (((!is.null(lb)) && ((any(candidate <= lb) && (lb_open)) || (any(candidate < lb) && (!lb_open)))) ||
+  } else if (((!is.null(lb)) && ((any(candidate <= lb) && (lb_open)) || (any(candidate < lb) && (!lb_open)))) ||
     ((!is.null(ub)) && ((any(candidate >= ub) && (ub_open)) || (any(candidate > ub) && (!ub_open))))) {
     if (!is_scalar) {
       stop(vector_wrong_element_type_or_value(arg_name, candidate, type_message, descriptor_message))
diff --git a/R/rdeephaven/README.md b/R/rdeephaven/README.md
index b799f7e79b7..eec85cd5371 100644
--- a/R/rdeephaven/README.md
+++ b/R/rdeephaven/README.md
@@ -1,11 +1,12 @@
-# The Deephaven R Client
 
-The Deephaven R client is an R package that enables R users to interface with a Deephaven server and perform various
+# The Deephaven Core R Client
+
+The Deephaven Core R client is an R package that enables R users to interface with a Deephaven server and perform various
 server-side operations from the comfort of RStudio or any other R interface.
 
 ## What can the R client do?
 
-The Deephaven Client provides the following functionalities:
+The R Client provides the following functionalities:
 
 1. Connect to a Deephaven server
    -   with anonymous authentication (no username or password)
@@ -112,13 +113,13 @@ Currently, the R client is only supported on Ubuntu 20.04 or 22.04 and must be b
    ```r
    install.packages(c('Rcpp', 'arrow', 'R6', 'dplyr'))
    ```
-   then install the deephaven client itself:
+   Then, exit the R console with `quit()`. From the rdeephaven directory, build and install the R client:
    ```r
-   install.packages(".", repos=NULL, type="source")
+   cd .. && R CMD build rdeephaven && R CMD INSTALL --no-multiarch --with-keep.source rdeephaven_*.tar.gz && rm rdeephaven_*.tar.gz
    ```
-   This last command can also be executed from RStudio without the need for explicitly starting an R console.
+   This is needed over the typical `install.packages()` to ensure that the vignettes get built and installed.
+
 
-   ---
    **NOTE**
 
    If using RStudio for this step, the environment variables that were set in step 3 may not persist into the RStudio
@@ -138,15 +139,17 @@ Currently, the R client is only supported on Ubuntu 20.04 or 22.04 and must be b
    for the RStudio Server installation (the location of that file may depend on your particular RStudio server
    installation, but a common location is `/etc/rstudio/rserver.conf`).
    
-   ---
+
 
 6. Now, run
    ```r
    library(rdeephaven)
    ```
    in the R session, and start using the client!
+   
+   For an introduction to the package, run `vignette("rdeephaven")`.
+
 
----
 **NOTE**
 
 If an error like this occurs in step 4:
@@ -167,7 +170,7 @@ this means that the C++ compiler does not know where to find the relevant header
    export DHCPP=/path/to/dhcpp
    ```
 3. The Deephaven C++ client is installed and the `DHCPP` environment variable is set, but the current project is not configured to allow the compiler to access the Deephaven `dhcpp` and `src` directories. This is more difficult to give advice on, as it is an IDE-dependent problem. Consult your IDE's documentation on C/C++ compiler include paths for more information.
----
+
 
 ## Running the unit tests
 
diff --git a/R/rdeephaven/inst/tests/testthat/helper.R b/R/rdeephaven/inst/tests/testthat/helper.R
index fcb9ad5703c..862a5bd1595 100644
--- a/R/rdeephaven/inst/tests/testthat/helper.R
+++ b/R/rdeephaven/inst/tests/testthat/helper.R
@@ -1,11 +1,11 @@
 get_dh_target <- function() {
-  dh_host = Sys.getenv("DH_HOST")
-  if (dh_host == '') {
-    dh_host = "localhost"
+  dh_host <- Sys.getenv("DH_HOST")
+  if (dh_host == "") {
+    dh_host <- "localhost"
   }
-  dh_port = Sys.getenv("DH_PORT")
-  if (dh_port == '') {
-    dh_port = 10000
+  dh_port <- Sys.getenv("DH_PORT")
+  if (dh_port == "") {
+    dh_port <- 10000
   }
-  return(paste0(dh_host, ':', dh_port))
+  return(paste0(dh_host, ":", dh_port))
 }
diff --git a/R/rdeephaven/inst/tests/testthat/test_agg_by.R b/R/rdeephaven/inst/tests/testthat/test_agg_by.R
index 1b7a7f7d49c..8d6cc0e13ce 100644
--- a/R/rdeephaven/inst/tests/testthat/test_agg_by.R
+++ b/R/rdeephaven/inst/tests/testthat/test_agg_by.R
@@ -960,14 +960,20 @@ test_that("agg_count behaves as expected", {
 test_that("agg_by behaves nicely when given bad input", {
   data <- setup()
 
-  expect_error(data$th1$agg_by(agg_first()),
-    "Aggregations with no columns cannot be used in 'agg_by'. Got 'agg_first' at index 1 with an empty 'cols' argument.")
+  expect_error(
+    data$th1$agg_by(agg_first()),
+    "Aggregations with no columns cannot be used in 'agg_by'. Got 'agg_first' at index 1 with an empty 'cols' argument."
+  )
 
-  expect_error(data$th1$agg_by(c(agg_first("int_col"), agg_last())),
-    "Aggregations with no columns cannot be used in 'agg_by'. Got 'agg_last' at index 2 with an empty 'cols' argument.")
+  expect_error(
+    data$th1$agg_by(c(agg_first("int_col"), agg_last())),
+    "Aggregations with no columns cannot be used in 'agg_by'. Got 'agg_last' at index 2 with an empty 'cols' argument."
+  )
 
-  expect_error(data$th1$agg_by(c(agg_first("int_col"), agg_last("int_col"), agg_count("n"), agg_avg())),
-    "Aggregations with no columns cannot be used in 'agg_by'. Got 'agg_avg' at index 4 with an empty 'cols' argument.")
+  expect_error(
+    data$th1$agg_by(c(agg_first("int_col"), agg_last("int_col"), agg_count("n"), agg_avg())),
+    "Aggregations with no columns cannot be used in 'agg_by'. Got 'agg_avg' at index 4 with an empty 'cols' argument."
+  )
 
   data$client$close()
 })
diff --git a/R/rdeephaven/inst/tests/testthat/test_client_wrapper.R b/R/rdeephaven/inst/tests/testthat/test_client_wrapper.R
index 65d8902a419..686b532cc24 100644
--- a/R/rdeephaven/inst/tests/testthat/test_client_wrapper.R
+++ b/R/rdeephaven/inst/tests/testthat/test_client_wrapper.R
@@ -30,9 +30,7 @@ setup <- function() {
 ##### TESTING GOOD INPUTS #####
 
 test_that("client dhConnection works in the simple case of anonymous authentication", {
-
   expect_no_error(client <- Client$new(target = target))
-  
 })
 
 test_that("import_table does not fail with data frame inputs of simple column types", {
@@ -150,7 +148,6 @@ int_col("Name_Int_Col", [44, 55, 66])
 ##### TESTING BAD INPUTS #####
 
 test_that("client constructor fails nicely with bad inputs", {
-  
   expect_error(
     Client$new(target = target, auth_type = "basic"),
     "Basic authentication was requested, but 'auth_token' was not provided, and at most one of 'username' or 'password' was provided. Please provide either 'username' and 'password', or 'auth_token'."
@@ -239,7 +236,6 @@ test_that("client constructor fails nicely with bad inputs", {
     Client$new(target = target, extra_headers = list(a = 123)),
     "'value' must be a single string. Got an object of class numeric."
   )
-  
 })
 
 test_that("import_table fails nicely with bad inputs", {
@@ -298,8 +294,10 @@ test_that("run_script fails nicely with bad input types", {
 })
 
 test_that("Running Client$new with wrong argument types gives good errors", {
-  expect_error(Client$new(12345),
-    "Client initialize first argument must be either a string or an Rcpp::XPtr object.")
+  expect_error(
+    Client$new(12345),
+    "Client initialize first argument must be either a string or an Rcpp::XPtr object."
+  )
 })
 
 test_that("A Client created from an Rcpp::XPtr is functional.", {
@@ -309,7 +307,7 @@ test_that("A Client created from an Rcpp::XPtr is functional.", {
   client2 <- Client$new(client_xptr)
   t <- client2$open_table("t")
   df <- t$as_data_frame()
-  expect_true(df[1,1] == 42)
+  expect_true(df[1, 1] == 42)
   client$close()
 })
 
@@ -318,7 +316,7 @@ test_that("ticket_to_table works.", {
   client$empty_table(1)$update("A = 43")$bind_to_variable("t")
   t <- client$ticket_to_table("s/t")
   df <- t$as_data_frame()
-  expect_true(df[1,1] == 43)
+  expect_true(df[1, 1] == 43)
   client$close()
 })
 
diff --git a/R/rdeephaven/inst/tests/testthat/test_table_handle_wrapper.R b/R/rdeephaven/inst/tests/testthat/test_table_handle_wrapper.R
index 3c8d1627a33..a16ee21c3e3 100644
--- a/R/rdeephaven/inst/tests/testthat/test_table_handle_wrapper.R
+++ b/R/rdeephaven/inst/tests/testthat/test_table_handle_wrapper.R
@@ -68,23 +68,23 @@ test_that("nrow returns the correct number of rows", {
 
 test_that("ncol returns the correct number of columns", {
   data <- setup()
-  
+
   expect_equal(ncol(data$th1), ncol(data$df1))
   expect_equal(ncol(data$th2), ncol(data$df2))
   expect_equal(ncol(data$th3), ncol(data$df3))
   expect_equal(ncol(data$th4), ncol(data$df4))
-  
+
   data$client$close()
 })
 
 test_that("dim returns the correct dimension", {
   data <- setup()
-  
+
   expect_equal(dim(data$th1), dim(data$df1))
   expect_equal(dim(data$th2), dim(data$df2))
   expect_equal(dim(data$th3), dim(data$df3))
   expect_equal(dim(data$th4), dim(data$df4))
-  
+
   data$client$close()
 })
 
diff --git a/R/rdeephaven/inst/tests/testthat/test_table_ops.R b/R/rdeephaven/inst/tests/testthat/test_table_ops.R
index e47afa3847e..fe08d316d0a 100644
--- a/R/rdeephaven/inst/tests/testthat/test_table_ops.R
+++ b/R/rdeephaven/inst/tests/testthat/test_table_ops.R
@@ -59,32 +59,32 @@ setup <- function() {
 
 test_that("merge_tables behaves as expected", {
   data <- setup()
-  
+
   new_df1 <- rbind(data$df5)
   new_th1a <- data$th5$merge()
   new_th1b <- merge_tables(data$th5)
   expect_equal(as.data.frame(new_th1a), new_df1)
   expect_equal(as.data.frame(new_th1b), new_df1)
-  
+
   new_df2 <- rbind(data$df5, data$df6)
   new_th2a <- data$th5$merge(data$th6)
   new_th2b <- merge_tables(data$th5, data$th6)
   expect_equal(as.data.frame(new_th2a), new_df2)
   expect_equal(as.data.frame(new_th2b), new_df2)
-  
+
   new_df3 <- rbind(data$df5, data$df6, data$df6, data$df5)
   new_th3a <- data$th5$merge(data$th6, data$th6, data$th5)
   new_th3b <- merge_tables(data$th5, data$th6, data$th6, data$th5)
   expect_equal(as.data.frame(new_th3a), new_df3)
   expect_equal(as.data.frame(new_th3b), new_df3)
-  
+
   new_th4a <- data$th5$merge(c(data$th6))
   new_th4b <- merge_tables(data$th5, c(data$th6))
   new_th4c <- merge_tables(c(data$th5, data$th6))
   expect_equal(as.data.frame(new_th4a), new_df2)
   expect_equal(as.data.frame(new_th4b), new_df2)
   expect_equal(as.data.frame(new_th4c), new_df2)
-  
+
   new_th5a <- data$th5$merge(c(data$th6, NULL, data$th6, data$th5))
   new_th5b <- merge_tables(data$th5, c(data$th6, NULL, data$th6, data$th5))
   new_th5c <- merge_tables(c(data$th5, data$th6, NULL, data$th6, data$th5))
@@ -97,7 +97,7 @@ test_that("merge_tables behaves as expected", {
   expect_equal(as.data.frame(new_th5d), new_df3)
   expect_equal(as.data.frame(new_th5e), new_df3)
   expect_equal(as.data.frame(new_th5f), new_df3)
-  
+
   data$client$close()
 })
 
@@ -773,9 +773,9 @@ test_that("join behaves as expected", {
 
   new_th1 <- data$th5$
     join(data$th6,
-      on = character(),
-      joins = c("X_y = X", "Y_y = Y", "Number1_y = Number1", "Number2_y = Number2")
-    )
+    on = character(),
+    joins = c("X_y = X", "Y_y = Y", "Number1_y = Number1", "Number2_y = Number2")
+  )
   new_tb1 <- data$df5 %>%
     cross_join(data$df6) %>%
     rename(
@@ -795,9 +795,9 @@ test_that("natural_join behaves as expected", {
     avg_by("X")
   new_th1 <- data$th5$
     natural_join(new_th2,
-      on = "X",
-      joins = c("Number3 = Number1", "Number4 = Number2")
-    )
+    on = "X",
+    joins = c("Number3 = Number1", "Number4 = Number2")
+  )
 
   new_tb2 <- data$df6 %>%
     select(-Y) %>%
@@ -822,9 +822,9 @@ test_that("exact_join behaves as expected", {
     avg_by("X")
   new_th1 <- data$th5$
     exact_join(new_th2,
-      on = "X",
-      joins = c("Number3 = Number1", "Number4 = Number2")
-    )
+    on = "X",
+    joins = c("Number3 = Number1", "Number4 = Number2")
+  )
 
   new_tb2 <- data$df6 %>%
     select(-Y) %>%
diff --git a/R/rdeephaven/inst/tests/testthat/test_update_by.R b/R/rdeephaven/inst/tests/testthat/test_update_by.R
index 7b858e3e909..372e04c265a 100644
--- a/R/rdeephaven/inst/tests/testthat/test_update_by.R
+++ b/R/rdeephaven/inst/tests/testthat/test_update_by.R
@@ -6,7 +6,7 @@ library(zoo)
 
 # We suppress warnings because warnings are thrown when min() and max() are
 # applied to empty sets, which happens in the pure-R versions of rolling_*_time()
-options(warn=-1)
+options(warn = -1)
 
 setup <- function() {
   df1 <- data.frame(
@@ -26,7 +26,7 @@ setup <- function() {
     bool_col = sample(c(TRUE, FALSE), 250000, TRUE),
     int_col = sample(0:10000, 250000, TRUE)
   )
-  
+
   deterministic_df3 <- data.frame(
     time_col = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 30), by = "1 sec")[1:500],
     bool_col = rep(c(TRUE, TRUE, FALSE, FALSE, TRUE, FALSE, TRUE, FALSE, FALSE, TRUE), 50),
@@ -57,14 +57,24 @@ setup <- function() {
   deterministic_th3 <- client$import_table(deterministic_df3)
   th4 <- client$import_table(df4)
   th5 <- client$import_table(df5)
-  
+
   # create variants with missing data to test NULL stuff
-  null_df1 <- as.data.frame(lapply(df1, function(x) {replace(x, sample(length(x), .5*length(x)), NA)}))
-  null_df2 <- as.data.frame(lapply(df2, function(x) {replace(x, sample(length(x), .5*length(x)), NA)}))
-  null_df3 <- as.data.frame(lapply(df3, function(x) {replace(x, sample(length(x), .5*length(x)), NA)}))
-  null_df4 <- as.data.frame(lapply(df4, function(x) {replace(x, sample(length(x), .5*length(x)), NA)}))
-  null_df5 <- as.data.frame(lapply(df5, function(x) {replace(x, sample(length(x), .5*length(x)), NA)}))
-  
+  null_df1 <- as.data.frame(lapply(df1, function(x) {
+    replace(x, sample(length(x), .5 * length(x)), NA)
+  }))
+  null_df2 <- as.data.frame(lapply(df2, function(x) {
+    replace(x, sample(length(x), .5 * length(x)), NA)
+  }))
+  null_df3 <- as.data.frame(lapply(df3, function(x) {
+    replace(x, sample(length(x), .5 * length(x)), NA)
+  }))
+  null_df4 <- as.data.frame(lapply(df4, function(x) {
+    replace(x, sample(length(x), .5 * length(x)), NA)
+  }))
+  null_df5 <- as.data.frame(lapply(df5, function(x) {
+    replace(x, sample(length(x), .5 * length(x)), NA)
+  }))
+
   null_th1 <- client$import_table(null_df1)
   null_th2 <- client$import_table(null_df2)
   null_th3 <- client$import_table(null_df3)
@@ -89,300 +99,300 @@ custom_rolling_time_op <- function(col, group_col, ...) {
 
 test_that("uby_cum_sum behaves as expected", {
   data <- setup()
-  
+
   new_tb1 <- data$df1 %>%
     mutate(sum_int_col = cumsum(int_col))
   new_th1 <- data$th1$
     update_by(uby_cum_sum("sum_int_col = int_col"))
   expect_equal(as.data.frame(new_th1), as.data.frame(new_tb1))
-  
+
   new_tb2 <- data$df2 %>%
     mutate(sum_col1 = cumsum(col1), sum_col3 = cumsum(col3))
   new_th2 <- data$th2$
     update_by(uby_cum_sum(c("sum_col1 = col1", "sum_col3 = col3")))
   expect_equal(as.data.frame(new_th2), as.data.frame(new_tb2))
-  
+
   new_tb3 <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(sum_int_col = cumsum(int_col))
   new_th3 <- data$th3$
     update_by(uby_cum_sum("sum_int_col = int_col"), by = "bool_col")
   expect_equal(as.data.frame(new_th3), as.data.frame(new_tb3))
-  
+
   new_tb4 <- data$df4 %>%
     group_by(X) %>%
     mutate(sum_Number1 = cumsum(Number1), sum_Number2 = cumsum(Number2))
   new_th4 <- data$th4$
     update_by(uby_cum_sum(c("sum_Number1 = Number1", "sum_Number2 = Number2")), by = "X")
   expect_equal(as.data.frame(new_th4), as.data.frame(new_tb4))
-  
+
   new_tb5 <- data$df5 %>%
     group_by(Y) %>%
     mutate(sum_Number1 = cumsum(Number1), sum_Number2 = cumsum(Number2))
   new_th5 <- data$th5$
     update_by(uby_cum_sum(c("sum_Number1 = Number1", "sum_Number2 = Number2")), by = "Y")
   expect_equal(as.data.frame(new_th5), as.data.frame(new_tb5))
-  
+
   new_tb6 <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
     mutate(sum_Number1 = cumsum(Number1), sum_Number2 = cumsum(Number2))
   new_th6 <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_cum_sum(c("sum_Number1 = Number1", "sum_Number2 = Number2")), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6), as.data.frame(new_tb6))
-  
+
   data$client$close()
 })
 
 test_that("uby_cum_prod behaves as expected", {
   data <- setup()
-  
+
   new_tb1 <- data$df1 %>%
     mutate(prod_int_col = cumprod(int_col))
   new_th1 <- data$th1$
     update_by(uby_cum_prod("prod_int_col = int_col"))
   expect_equal(as.data.frame(new_th1), as.data.frame(new_tb1))
-  
+
   new_tb2 <- data$df2 %>%
     mutate(prod_col1 = cumprod(col1), prod_col3 = cumprod(col3))
   new_th2 <- data$th2$
     update_by(uby_cum_prod(c("prod_col1 = col1", "prod_col3 = col3")))
   expect_equal(as.data.frame(new_th2), as.data.frame(new_tb2))
-  
+
   # Using df3 yields gigantic products, which leads to overflow on the server
   # due to the column being an int. Clients cannot cast to java BigInt type,
   # so once a table has an int type, we cannot change it from here. Thus, using
   # cum_prod on int columns from R should be done with an abundance of caution,
   # and probably not at all. Make it a double before pushing to the server.
-  
+
   new_tb4 <- data$df4 %>%
     group_by(X) %>%
     mutate(prod_Number1 = cumprod(Number1), prod_Number2 = cumprod(Number2))
   new_th4 <- data$th4$
     update_by(uby_cum_prod(c("prod_Number1 = Number1", "prod_Number2 = Number2")), by = "X")
   expect_equal(as.data.frame(new_th4), as.data.frame(new_tb4))
-  
+
   new_tb5 <- data$df5 %>%
     group_by(Y) %>%
     mutate(prod_Number1 = cumprod(Number1), prod_Number2 = cumprod(Number2))
   new_th5 <- data$th5$
     update_by(uby_cum_prod(c("prod_Number1 = Number1", "prod_Number2 = Number2")), by = "Y")
   expect_equal(as.data.frame(new_th5), as.data.frame(new_tb5))
-  
+
   new_tb6 <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
     mutate(prod_Number1 = cumprod(Number1), prod_Number2 = cumprod(Number2))
   new_th6 <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_cum_prod(c("prod_Number1 = Number1", "prod_Number2 = Number2")), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6), as.data.frame(new_tb6))
-  
+
   data$client$close()
 })
 
 test_that("uby_cum_min behaves as expected", {
   data <- setup()
-  
+
   new_tb1 <- data$df1 %>%
     mutate(min_int_col = cummin(int_col))
   new_th1 <- data$th1$
     update_by(uby_cum_min("min_int_col = int_col"))
   expect_equal(as.data.frame(new_th1), as.data.frame(new_tb1))
-  
+
   new_tb2 <- data$df2 %>%
     mutate(min_col1 = cummin(col1), min_col3 = cummin(col3))
   new_th2 <- data$th2$
     update_by(uby_cum_min(c("min_col1 = col1", "min_col3 = col3")))
   expect_equal(as.data.frame(new_th2), as.data.frame(new_tb2))
-  
+
   new_tb3 <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(min_int_col = cummin(int_col))
   new_th3 <- data$th3$
     update_by(uby_cum_min("min_int_col = int_col"), by = "bool_col")
   expect_equal(as.data.frame(new_th3), as.data.frame(new_tb3))
-  
+
   new_tb4 <- data$df4 %>%
     group_by(X) %>%
     mutate(min_Number1 = cummin(Number1), min_Number2 = cummin(Number2))
   new_th4 <- data$th4$
     update_by(uby_cum_min(c("min_Number1 = Number1", "min_Number2 = Number2")), by = "X")
   expect_equal(as.data.frame(new_th4), as.data.frame(new_tb4))
-  
+
   new_tb5 <- data$df5 %>%
     group_by(Y) %>%
     mutate(min_Number1 = cummin(Number1), min_Number2 = cummin(Number2))
   new_th5 <- data$th5$
     update_by(uby_cum_min(c("min_Number1 = Number1", "min_Number2 = Number2")), by = "Y")
   expect_equal(as.data.frame(new_th5), as.data.frame(new_tb5))
-  
+
   new_tb6 <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
     mutate(min_Number1 = cummin(Number1), min_Number2 = cummin(Number2))
   new_th6 <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_cum_min(c("min_Number1 = Number1", "min_Number2 = Number2")), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6), as.data.frame(new_tb6))
-  
+
   data$client$close()
 })
 
 test_that("uby_cum_max behaves as expected", {
   data <- setup()
-  
+
   new_tb1 <- data$df1 %>%
     mutate(max_int_col = cummax(int_col))
   new_th1 <- data$th1$
     update_by(uby_cum_max("max_int_col = int_col"))
   expect_equal(as.data.frame(new_th1), as.data.frame(new_tb1))
-  
+
   new_tb2 <- data$df2 %>%
     mutate(max_col1 = cummax(col1), max_col3 = cummax(col3))
   new_th2 <- data$th2$
     update_by(uby_cum_max(c("max_col1 = col1", "max_col3 = col3")))
   expect_equal(as.data.frame(new_th2), as.data.frame(new_tb2))
-  
+
   new_tb3 <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(max_int_col = cummax(int_col))
   new_th3 <- data$th3$
     update_by(uby_cum_max("max_int_col = int_col"), by = "bool_col")
   expect_equal(as.data.frame(new_th3), as.data.frame(new_tb3))
-  
+
   new_tb4 <- data$df4 %>%
     group_by(X) %>%
     mutate(max_Number1 = cummax(Number1), max_Number2 = cummax(Number2))
   new_th4 <- data$th4$
     update_by(uby_cum_max(c("max_Number1 = Number1", "max_Number2 = Number2")), by = "X")
   expect_equal(as.data.frame(new_th4), as.data.frame(new_tb4))
-  
+
   new_tb5 <- data$df5 %>%
     group_by(Y) %>%
     mutate(max_Number1 = cummax(Number1), max_Number2 = cummax(Number2))
   new_th5 <- data$th5$
     update_by(uby_cum_max(c("max_Number1 = Number1", "max_Number2 = Number2")), by = "Y")
   expect_equal(as.data.frame(new_th5), as.data.frame(new_tb5))
-  
+
   new_tb6 <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
     mutate(max_Number1 = cummax(Number1), max_Number2 = cummax(Number2))
   new_th6 <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_cum_max(c("max_Number1 = Number1", "max_Number2 = Number2")), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6), as.data.frame(new_tb6))
-  
+
   data$client$close()
 })
 
 test_that("uby_forward_fill behaves as expected", {
   data <- setup()
-  
+
   new_th1 <- data$null_th1$
     update_by(uby_forward_fill())
   expect_equal(as.data.frame(new_th1), na.locf(data$null_df1, na.rm = FALSE))
-  
+
   new_th2 <- data$null_th2$
     update_by(uby_forward_fill())
   expect_equal(as.data.frame(new_th2), na.locf(data$null_df2, na.rm = FALSE))
-  
+
   new_th3 <- data$null_th3$
     update_by(uby_forward_fill())
   expect_equal(as.data.frame(new_th3), na.locf(data$null_df3, na.rm = FALSE))
-  
+
   new_th4 <- data$null_th4$
     update_by(uby_forward_fill())
   expect_equal(as.data.frame(new_th4), na.locf(data$null_df4, na.rm = FALSE))
-  
+
   new_th5 <- data$null_th5$
     update_by(uby_forward_fill())
   expect_equal(as.data.frame(new_th5), na.locf(data$null_df5, na.rm = FALSE))
-  
+
   data$client$close()
 })
 
 test_that("uby_delta behaves as expected", {
   data <- setup()
-  
+
   new_tb1 <- data$df1 %>%
     mutate(delta_int_col = c(NaN, diff(int_col)))
   new_th1 <- data$th1$
     update_by(uby_delta("delta_int_col = int_col"))
   expect_equal(as.data.frame(new_th1), as.data.frame(new_tb1))
-  
+
   new_tb2 <- data$df2 %>%
     mutate(delta_col1 = c(NaN, diff(col1)), delta_col3 = c(NaN, diff(col3)))
   new_th2 <- data$th2$
     update_by(uby_delta(c("delta_col1 = col1", "delta_col3 = col3")))
   expect_equal(as.data.frame(new_th2), as.data.frame(new_tb2))
-  
+
   new_tb3 <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(delta_int_col = c(NaN, diff(int_col)))
   new_th3 <- data$th3$
     update_by(uby_delta("delta_int_col = int_col"), by = "bool_col")
   expect_equal(as.data.frame(new_th3), as.data.frame(new_tb3))
-  
+
   new_tb4 <- data$df4 %>%
     group_by(X) %>%
     mutate(delta_Number1 = c(NaN, diff(Number1)), delta_Number2 = c(NaN, diff(Number2)))
   new_th4 <- data$th4$
     update_by(uby_delta(c("delta_Number1 = Number1", "delta_Number2 = Number2")), by = "X")
   expect_equal(as.data.frame(new_th4), as.data.frame(new_tb4))
-  
+
   new_tb5 <- data$df5 %>%
     group_by(Y) %>%
     mutate(delta_Number1 = c(NaN, diff(Number1)), delta_Number2 = c(NaN, diff(Number2)))
   new_th5 <- data$th5$
     update_by(uby_delta(c("delta_Number1 = Number1", "delta_Number2 = Number2")), by = "Y")
   expect_equal(as.data.frame(new_th5), as.data.frame(new_tb5))
-  
+
   new_tb6 <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
     mutate(delta_Number1 = c(NaN, diff(Number1)), delta_Number2 = c(NaN, diff(Number2)))
   new_th6 <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_delta(c("delta_Number1 = Number1", "delta_Number2 = Number2")), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6), as.data.frame(new_tb6))
-  
+
   data$client$close()
 })
 
 test_that("uby_ema_tick behaves as expected", {
   data <- setup()
-  
+
   custom_ema <- function(decay_ticks, x) {
     if (length(x) == 1) {
       return(x)
     }
-    a = exp(-1/decay_ticks)
-    ema = c(x[1])
-    for(i in seq(2,length(x))) {
-      ema[i] = a*ema[i-1] + (1-a)*x[i]
+    a <- exp(-1 / decay_ticks)
+    ema <- c(x[1])
+    for (i in seq(2, length(x))) {
+      ema[i] <- a * ema[i - 1] + (1 - a) * x[i]
     }
     return(ema)
   }
-  
+
   new_tb1 <- data$df1 %>%
     mutate(dbl_col = custom_ema(2, dbl_col))
   new_th1 <- data$th1$
     update_by(uby_ema_tick(2, "dbl_col"))
   expect_equal(as.data.frame(new_th1), as.data.frame(new_tb1))
-  
+
   new_tb2 <- data$df2 %>%
     mutate(col1 = custom_ema(5, col1), col3 = custom_ema(5, col3))
   new_th2 <- data$th2$
     update_by(uby_ema_tick(5, c("col1", "col3")))
   expect_equal(as.data.frame(new_th2), as.data.frame(new_tb2))
-  
+
   new_tb3 <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(ema_int_col = custom_ema(9, int_col))
   new_th3 <- data$th3$
     update_by(uby_ema_tick(9, "ema_int_col = int_col"), by = "bool_col")
   expect_equal(as.data.frame(new_th3), as.data.frame(new_tb3))
-  
+
   new_tb4 <- data$df4 %>%
     group_by(X) %>%
     mutate(ema_Number1 = custom_ema(3, Number1), ema_Number2 = custom_ema(3, Number2))
   new_th4 <- data$th4$
     update_by(uby_ema_tick(3, c("ema_Number1 = Number1", "ema_Number2 = Number2")), by = "X")
   expect_equal(as.data.frame(new_th4), as.data.frame(new_tb4))
-  
+
   new_tb5 <- data$df5 %>%
     group_by(Y) %>%
     mutate(ema_Number1 = custom_ema(3, Number1), ema_Number2 = custom_ema(3, Number2))
@@ -396,327 +406,327 @@ test_that("uby_ema_tick behaves as expected", {
   new_th6 <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_ema_tick(3, c("ema_Number1 = Number1", "ema_Number2 = Number2")), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6), as.data.frame(new_tb6))
-  
+
   data$client$close()
 })
 
 test_that("uby_ema_time behaves as expected", {
   data <- setup()
-  
+
   custom_ema_time <- function(ts, decay_time, x) {
     if (length(x) == 1) {
       return(x)
     }
-    time_diffs = as.numeric(ts[2:length(ts)] - ts[1:length(ts)-1])
-    a = exp(-time_diffs/as.numeric(duration(decay_time)))
-    ema = c(x[1])
-    for(i in seq(2,length(x))) {
-      ema[i] = a[i-1]*ema[i-1] + (1-a[i-1])*x[i]
+    time_diffs <- as.numeric(ts[2:length(ts)] - ts[1:length(ts) - 1])
+    a <- exp(-time_diffs / as.numeric(duration(decay_time)))
+    ema <- c(x[1])
+    for (i in seq(2, length(x))) {
+      ema[i] <- a[i - 1] * ema[i - 1] + (1 - a[i - 1]) * x[i]
     }
     return(ema)
   }
-  
+
   new_tb1 <- data$df3 %>%
     mutate(ema_int_col = custom_ema_time(time_col, "PT3s", int_col))
   new_th1 <- data$th3$
     update_by(uby_ema_time("time_col", "PT3s", "ema_int_col = int_col"))
   expect_equal(as.data.frame(new_th1), as.data.frame(new_tb1))
-  
+
   new_tb2 <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(ema_int_col = custom_ema_time(time_col, "PT3s", int_col))
   new_th2 <- data$th3$
     update_by(uby_ema_time("time_col", "PT3s", "ema_int_col = int_col"), by = "bool_col")
   expect_equal(as.data.frame(new_th2), as.data.frame(new_tb2))
-      
+
   data$client$close()
 })
 
 test_that("uby_ems_tick behaves as expected", {
   data <- setup()
-  
+
   custom_ems <- function(decay_ticks, x) {
     if (length(x) == 1) {
       return(x)
     }
-    a = exp(-1/decay_ticks)
-    ems = c(x[1])
-    for(i in seq(2,length(x))) {
-      ems[i] = a*ems[i-1] + x[i]
+    a <- exp(-1 / decay_ticks)
+    ems <- c(x[1])
+    for (i in seq(2, length(x))) {
+      ems[i] <- a * ems[i - 1] + x[i]
     }
     return(ems)
   }
-  
+
   new_tb1 <- data$df1 %>%
     mutate(dbl_col = custom_ems(2, dbl_col))
   new_th1 <- data$th1$
     update_by(uby_ems_tick(2, "dbl_col"))
   expect_equal(as.data.frame(new_th1), as.data.frame(new_tb1))
-  
+
   new_tb2 <- data$df2 %>%
     mutate(col1 = custom_ems(5, col1), col3 = custom_ems(5, col3))
   new_th2 <- data$th2$
     update_by(uby_ems_tick(5, c("col1", "col3")))
   expect_equal(as.data.frame(new_th2), as.data.frame(new_tb2))
-  
+
   new_tb3 <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(ems_int_col = custom_ems(9, int_col))
   new_th3 <- data$th3$
     update_by(uby_ems_tick(9, "ems_int_col = int_col"), by = "bool_col")
   expect_equal(as.data.frame(new_th3), as.data.frame(new_tb3))
-  
+
   new_tb4 <- data$df4 %>%
     group_by(X) %>%
     mutate(ems_Number1 = custom_ems(3, Number1), ems_Number2 = custom_ems(3, Number2))
   new_th4 <- data$th4$
     update_by(uby_ems_tick(3, c("ems_Number1 = Number1", "ems_Number2 = Number2")), by = "X")
   expect_equal(as.data.frame(new_th4), as.data.frame(new_tb4))
-  
+
   new_tb5 <- data$df5 %>%
     group_by(Y) %>%
     mutate(ems_Number1 = custom_ems(3, Number1), ems_Number2 = custom_ems(3, Number2))
   new_th5 <- data$th5$
     update_by(uby_ems_tick(3, c("ems_Number1 = Number1", "ems_Number2 = Number2")), by = "Y")
   expect_equal(as.data.frame(new_th5), as.data.frame(new_tb5))
-  
+
   new_tb6 <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
     mutate(ems_Number1 = custom_ems(3, Number1), ems_Number2 = custom_ems(3, Number2))
   new_th6 <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_ems_tick(3, c("ems_Number1 = Number1", "ems_Number2 = Number2")), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6), as.data.frame(new_tb6))
-  
+
   data$client$close()
 })
 
 test_that("uby_ems_time behaves as expected", {
   data <- setup()
-  
+
   custom_ems_time <- function(ts, decay_time, x) {
     if (length(x) == 1) {
       return(x)
     }
-    time_diffs = as.numeric(ts[2:length(ts)] - ts[1:length(ts)-1])
-    a = exp(-time_diffs/as.numeric(duration(decay_time)))
-    ems = c(x[1])
-    for(i in seq(2,length(x))) {
-      ems[i] = a[i-1]*ems[i-1] + x[i]
+    time_diffs <- as.numeric(ts[2:length(ts)] - ts[1:length(ts) - 1])
+    a <- exp(-time_diffs / as.numeric(duration(decay_time)))
+    ems <- c(x[1])
+    for (i in seq(2, length(x))) {
+      ems[i] <- a[i - 1] * ems[i - 1] + x[i]
     }
     return(ems)
   }
-  
+
   new_tb1 <- data$df3 %>%
     mutate(ems_int_col = custom_ems_time(time_col, "PT3s", int_col))
   new_th1 <- data$th3$
     update_by(uby_ems_time("time_col", "PT3s", "ems_int_col = int_col"))
   expect_equal(as.data.frame(new_th1), as.data.frame(new_tb1))
-  
+
   new_tb2 <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(ems_int_col = custom_ems_time(time_col, "PT3s", int_col))
   new_th2 <- data$th3$
     update_by(uby_ems_time("time_col", "PT3s", "ems_int_col = int_col"), by = "bool_col")
   expect_equal(as.data.frame(new_th2), as.data.frame(new_tb2))
-  
+
   data$client$close()
 })
 
 test_that("uby_emmin_tick behaves as expected", {
   data <- setup()
-  
+
   custom_emmin <- function(decay_ticks, x) {
     if (length(x) == 1) {
       return(x)
     }
-    a = exp(-1/decay_ticks)
-    emmin = c(x[1])
-    for(i in seq(2,length(x))) {
-      emmin[i] = min(a*emmin[i-1], x[i])
+    a <- exp(-1 / decay_ticks)
+    emmin <- c(x[1])
+    for (i in seq(2, length(x))) {
+      emmin[i] <- min(a * emmin[i - 1], x[i])
     }
     return(emmin)
   }
-  
+
   new_tb1 <- data$df1 %>%
     mutate(dbl_col = custom_emmin(2, dbl_col))
   new_th1 <- data$th1$
     update_by(uby_emmin_tick(2, "dbl_col"))
   expect_equal(as.data.frame(new_th1), as.data.frame(new_tb1))
-  
+
   new_tb2 <- data$df2 %>%
     mutate(col1 = custom_emmin(5, col1), col3 = custom_emmin(5, col3))
   new_th2 <- data$th2$
     update_by(uby_emmin_tick(5, c("col1", "col3")))
   expect_equal(as.data.frame(new_th2), as.data.frame(new_tb2))
-  
+
   new_tb3 <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(emmin_int_col = custom_emmin(9, int_col))
   new_th3 <- data$th3$
     update_by(uby_emmin_tick(9, "emmin_int_col = int_col"), by = "bool_col")
   expect_equal(as.data.frame(new_th3), as.data.frame(new_tb3))
-  
+
   new_tb4 <- data$df4 %>%
     group_by(X) %>%
     mutate(emmin_Number1 = custom_emmin(3, Number1), emmin_Number2 = custom_emmin(3, Number2))
   new_th4 <- data$th4$
     update_by(uby_emmin_tick(3, c("emmin_Number1 = Number1", "emmin_Number2 = Number2")), by = "X")
   expect_equal(as.data.frame(new_th4), as.data.frame(new_tb4))
-  
+
   new_tb5 <- data$df5 %>%
     group_by(Y) %>%
     mutate(emmin_Number1 = custom_emmin(3, Number1), emmin_Number2 = custom_emmin(3, Number2))
   new_th5 <- data$th5$
     update_by(uby_emmin_tick(3, c("emmin_Number1 = Number1", "emmin_Number2 = Number2")), by = "Y")
   expect_equal(as.data.frame(new_th5), as.data.frame(new_tb5))
-  
+
   new_tb6 <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
     mutate(emmin_Number1 = custom_emmin(3, Number1), emmin_Number2 = custom_emmin(3, Number2))
   new_th6 <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_emmin_tick(3, c("emmin_Number1 = Number1", "emmin_Number2 = Number2")), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6), as.data.frame(new_tb6))
-  
+
   data$client$close()
 })
 
 test_that("uby_emmin_time behaves as expected", {
   data <- setup()
-  
+
   custom_emmin_time <- function(ts, decay_time, x) {
     if (length(x) == 1) {
       return(x)
     }
-    time_diffs = as.numeric(ts[2:length(ts)] - ts[1:length(ts)-1])
-    a = exp(-time_diffs/as.numeric(duration(decay_time)))
-    emmin = c(x[1])
-    for(i in seq(2,length(x))) {
-      emmin[i] = min(a[i-1]*emmin[i-1], x[i])
+    time_diffs <- as.numeric(ts[2:length(ts)] - ts[1:length(ts) - 1])
+    a <- exp(-time_diffs / as.numeric(duration(decay_time)))
+    emmin <- c(x[1])
+    for (i in seq(2, length(x))) {
+      emmin[i] <- min(a[i - 1] * emmin[i - 1], x[i])
     }
     return(emmin)
   }
-  
+
   new_tb1 <- data$df3 %>%
     mutate(emmin_int_col = custom_emmin_time(time_col, "PT3s", int_col))
   new_th1 <- data$th3$
     update_by(uby_emmin_time("time_col", "PT3s", "emmin_int_col = int_col"))
   expect_equal(as.data.frame(new_th1), as.data.frame(new_tb1))
-  
+
   new_tb2 <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(emmin_int_col = custom_emmin_time(time_col, "PT3s", int_col))
   new_th2 <- data$th3$
     update_by(uby_emmin_time("time_col", "PT3s", "emmin_int_col = int_col"), by = "bool_col")
   expect_equal(as.data.frame(new_th2), as.data.frame(new_tb2))
-  
+
   data$client$close()
 })
 
 test_that("uby_emmax_tick behaves as expected", {
   data <- setup()
-  
+
   custom_emmax <- function(decay_ticks, x) {
     if (length(x) == 1) {
       return(x)
     }
-    a = exp(-1/decay_ticks)
-    emmax = c(x[1])
-    for(i in seq(2,length(x))) {
-      emmax[i] = max(a*emmax[i-1], x[i])
+    a <- exp(-1 / decay_ticks)
+    emmax <- c(x[1])
+    for (i in seq(2, length(x))) {
+      emmax[i] <- max(a * emmax[i - 1], x[i])
     }
     return(emmax)
   }
-  
+
   new_tb1 <- data$df1 %>%
     mutate(dbl_col = custom_emmax(2, dbl_col))
   new_th1 <- data$th1$
     update_by(uby_emmax_tick(2, "dbl_col"))
   expect_equal(as.data.frame(new_th1), as.data.frame(new_tb1))
-  
+
   new_tb2 <- data$df2 %>%
     mutate(col1 = custom_emmax(5, col1), col3 = custom_emmax(5, col3))
   new_th2 <- data$th2$
     update_by(uby_emmax_tick(5, c("col1", "col3")))
   expect_equal(as.data.frame(new_th2), as.data.frame(new_tb2))
-  
+
   new_tb3 <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(emmax_int_col = custom_emmax(9, int_col))
   new_th3 <- data$th3$
     update_by(uby_emmax_tick(9, "emmax_int_col = int_col"), by = "bool_col")
   expect_equal(as.data.frame(new_th3), as.data.frame(new_tb3))
-  
+
   new_tb4 <- data$df4 %>%
     group_by(X) %>%
     mutate(emmax_Number1 = custom_emmax(3, Number1), emmax_Number2 = custom_emmax(3, Number2))
   new_th4 <- data$th4$
     update_by(uby_emmax_tick(3, c("emmax_Number1 = Number1", "emmax_Number2 = Number2")), by = "X")
   expect_equal(as.data.frame(new_th4), as.data.frame(new_tb4))
-  
+
   new_tb5 <- data$df5 %>%
     group_by(Y) %>%
     mutate(emmax_Number1 = custom_emmax(3, Number1), emmax_Number2 = custom_emmax(3, Number2))
   new_th5 <- data$th5$
     update_by(uby_emmax_tick(3, c("emmax_Number1 = Number1", "emmax_Number2 = Number2")), by = "Y")
   expect_equal(as.data.frame(new_th5), as.data.frame(new_tb5))
-  
+
   new_tb6 <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
     mutate(emmax_Number1 = custom_emmax(3, Number1), emmax_Number2 = custom_emmax(3, Number2))
   new_th6 <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_emmax_tick(3, c("emmax_Number1 = Number1", "emmax_Number2 = Number2")), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6), as.data.frame(new_tb6))
-  
+
   data$client$close()
 })
 
 test_that("uby_emmax_time behaves as expected", {
   data <- setup()
-  
+
   custom_emmax_time <- function(ts, decay_time, x) {
     if (length(x) == 1) {
       return(x)
     }
-    time_diffs = as.numeric(ts[2:length(ts)] - ts[1:length(ts)-1])
-    a = exp(-time_diffs/as.numeric(duration(decay_time)))
-    emmax = c(x[1])
-    for(i in seq(2,length(x))) {
-      emmax[i] = max(a[i-1]*emmax[i-1], x[i])
+    time_diffs <- as.numeric(ts[2:length(ts)] - ts[1:length(ts) - 1])
+    a <- exp(-time_diffs / as.numeric(duration(decay_time)))
+    emmax <- c(x[1])
+    for (i in seq(2, length(x))) {
+      emmax[i] <- max(a[i - 1] * emmax[i - 1], x[i])
     }
     return(emmax)
   }
-  
+
   new_tb1 <- data$df3 %>%
     mutate(emmax_int_col = custom_emmax_time(time_col, "PT3s", int_col))
   new_th1 <- data$th3$
     update_by(uby_emmax_time("time_col", "PT3s", "emmax_int_col = int_col"))
   expect_equal(as.data.frame(new_th1), as.data.frame(new_tb1))
-  
+
   new_tb2 <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(emmax_int_col = custom_emmax_time(time_col, "PT3s", int_col))
   new_th2 <- data$th3$
     update_by(uby_emmax_time("time_col", "PT3s", "emmax_int_col = int_col"), by = "bool_col")
   expect_equal(as.data.frame(new_th2), as.data.frame(new_tb2))
-  
+
   data$client$close()
 })
 
 test_that("uby_emstd_tick behaves as expected", {
   data <- setup()
-  
+
   custom_emstd <- function(decay_ticks, x) {
     if (length(x) == 1) {
       return(NA)
     }
-    a = exp(-1/decay_ticks)
-    current_ema = x[1]
-    emvar = c(0)
-    for(i in seq(2,length(x))) {
-      emvar[i] = a*(emvar[i-1] + (1-a)*((x[i] - current_ema)^2))
-      current_ema = a*current_ema + (1-a)*x[i]
+    a <- exp(-1 / decay_ticks)
+    current_ema <- x[1]
+    emvar <- c(0)
+    for (i in seq(2, length(x))) {
+      emvar[i] <- a * (emvar[i - 1] + (1 - a) * ((x[i] - current_ema)^2))
+      current_ema <- a * current_ema + (1 - a) * x[i]
     }
-    emvar[1] = NA
+    emvar[1] <- NA
     return(sqrt(emvar))
   }
 
@@ -765,632 +775,710 @@ test_that("uby_emstd_tick behaves as expected", {
 
 test_that("uby_emstd_time behaves as expected", {
   data <- setup()
-  
+
   custom_emstd_time <- function(ts, decay_time, x) {
     if (length(x) == 1) {
       return(NA)
     }
-    time_diffs = as.numeric(ts[2:length(ts)] - ts[1:length(ts)-1])
-    a = exp(-time_diffs/as.numeric(duration(decay_time)))
-    current_ema = x[1]
-    emvar = c(0)
-    for(i in seq(2,length(x))) {
-      emvar[i] = a[i-1]*(emvar[i-1] + (1-a[i-1])*((x[i] - current_ema)^2))
-      current_ema = a[i-1]*current_ema + (1-a[i-1])*x[i]
+    time_diffs <- as.numeric(ts[2:length(ts)] - ts[1:length(ts) - 1])
+    a <- exp(-time_diffs / as.numeric(duration(decay_time)))
+    current_ema <- x[1]
+    emvar <- c(0)
+    for (i in seq(2, length(x))) {
+      emvar[i] <- a[i - 1] * (emvar[i - 1] + (1 - a[i - 1]) * ((x[i] - current_ema)^2))
+      current_ema <- a[i - 1] * current_ema + (1 - a[i - 1]) * x[i]
     }
-    emvar[1] = NA
+    emvar[1] <- NA
     return(sqrt(emvar))
   }
-  
+
   new_tb1 <- data$df3 %>%
     mutate(emstd_int_col = custom_emstd_time(time_col, "PT3s", int_col))
   new_th1 <- data$th3$
     update_by(uby_emstd_time("time_col", "PT3s", "emstd_int_col = int_col"))
   expect_equal(as.data.frame(new_th1), as.data.frame(new_tb1))
-  
+
   new_tb2 <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(emstd_int_col = custom_emstd_time(time_col, "PT3s", int_col))
   new_th2 <- data$th3$
     update_by(uby_emstd_time("time_col", "PT3s", "emstd_int_col = int_col"), by = "bool_col")
   expect_equal(as.data.frame(new_th2), as.data.frame(new_tb2))
-  
+
   data$client$close()
 })
 
 test_that("uby_rolling_sum_tick behaves as expected", {
   data <- setup()
-  
+
   new_tb1a <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, sum, partial = TRUE, align = "right"))
   new_th1a <- data$th1$
     update_by(uby_rolling_sum_tick("dbl_col", rev_ticks = 3))
   expect_equal(as.data.frame(new_th1a), as.data.frame(new_tb1a))
-  
+
   new_tb1b <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, sum, partial = TRUE, align = "left"))
   new_th1b <- data$th1$
     update_by(uby_rolling_sum_tick("dbl_col", rev_ticks = 1, fwd_ticks = 2))
   expect_equal(as.data.frame(new_th1b), as.data.frame(new_tb1b))
-  
+
   new_tb1c <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, sum, partial = TRUE, align = "center"))
   new_th1c <- data$th1$
     update_by(uby_rolling_sum_tick("dbl_col", rev_ticks = 2, fwd_ticks = 1))
   expect_equal(as.data.frame(new_th1c), as.data.frame(new_tb1c))
-  
+
   new_tb2a <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, sum, partial = TRUE, align = "right"),
-           col3 = rollapply(col3, 5, sum, partial = TRUE, align = "right"))
+    mutate(
+      col1 = rollapply(col1, 5, sum, partial = TRUE, align = "right"),
+      col3 = rollapply(col3, 5, sum, partial = TRUE, align = "right")
+    )
   new_th2a <- data$th2$
     update_by(uby_rolling_sum_tick(c("col1", "col3"), rev_ticks = 5))
   expect_equal(as.data.frame(new_th2a), as.data.frame(new_tb2a))
-  
+
   new_tb2b <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, sum, partial = TRUE, align = "left"),
-           col3 = rollapply(col3, 5, sum, partial = TRUE, align = "left"))
+    mutate(
+      col1 = rollapply(col1, 5, sum, partial = TRUE, align = "left"),
+      col3 = rollapply(col3, 5, sum, partial = TRUE, align = "left")
+    )
   new_th2b <- data$th2$
     update_by(uby_rolling_sum_tick(c("col1", "col3"), rev_ticks = 1, fwd_ticks = 4))
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   new_tb2c <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, sum, partial = TRUE, align = "center"),
-           col3 = rollapply(col3, 5, sum, partial = TRUE, align = "center"))
+    mutate(
+      col1 = rollapply(col1, 5, sum, partial = TRUE, align = "center"),
+      col3 = rollapply(col3, 5, sum, partial = TRUE, align = "center")
+    )
   new_th2c <- data$th2$
     update_by(uby_rolling_sum_tick(c("col1", "col3"), rev_ticks = 3, fwd_ticks = 2))
   expect_equal(as.data.frame(new_th2c), as.data.frame(new_tb2c))
-  
+
   new_tb3a <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, sum, partial = TRUE, align = "right"))
   new_th3a <- data$th3$
     update_by(uby_rolling_sum_tick("int_col", rev_ticks = 9), by = "bool_col")
   expect_equal(as.data.frame(new_th3a), as.data.frame(new_tb3a))
-  
+
   new_tb3b <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, sum, partial = TRUE, align = "left"))
   new_th3b <- data$th3$
     update_by(uby_rolling_sum_tick("int_col", rev_ticks = 1, fwd_ticks = 8), by = "bool_col")
   expect_equal(as.data.frame(new_th3b), as.data.frame(new_tb3b))
-  
+
   new_tb3c <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, sum, partial = TRUE, align = "center"))
   new_th3c <- data$th3$
     update_by(uby_rolling_sum_tick("int_col", rev_ticks = 5, fwd_ticks = 4), by = "bool_col")
   expect_equal(as.data.frame(new_th3c), as.data.frame(new_tb3c))
-  
+
   new_tb4a <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "right")
+    )
   new_th4a <- data$th4$
     update_by(uby_rolling_sum_tick(c("Number1", "Number2"), rev_ticks = 3), by = "X")
   expect_equal(as.data.frame(new_th4a), as.data.frame(new_tb4a))
-  
+
   new_tb4b <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "left")
+    )
   new_th4b <- data$th4$
     update_by(uby_rolling_sum_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "X")
   expect_equal(as.data.frame(new_th4b), as.data.frame(new_tb4b))
-  
+
   new_tb4c <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "center")
+    )
   new_th4c <- data$th4$
     update_by(uby_rolling_sum_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "X")
   expect_equal(as.data.frame(new_th4c), as.data.frame(new_tb4c))
-  
+
   new_tb5a <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "right")
+    )
   new_th5a <- data$th5$
     update_by(uby_rolling_sum_tick(c("Number1", "Number2"), rev_ticks = 3), by = "Y")
   expect_equal(as.data.frame(new_th5a), as.data.frame(new_tb5a))
-  
+
   new_tb5b <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "left")
+    )
   new_th5b <- data$th5$
     update_by(uby_rolling_sum_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "Y")
   expect_equal(as.data.frame(new_th5b), as.data.frame(new_tb5b))
-  
+
   new_tb5c <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "center")
+    )
   new_th5c <- data$th5$
     update_by(uby_rolling_sum_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "Y")
   expect_equal(as.data.frame(new_th5c), as.data.frame(new_tb5c))
-  
+
   new_tb6a <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "right")
+    )
   new_th6a <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_sum_tick(c("Number1", "Number2"), rev_ticks = 3), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6a), as.data.frame(new_tb6a))
-  
+
   new_tb6b <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "left")
+    )
   new_th6b <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_sum_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6b), as.data.frame(new_tb6b))
-  
+
   new_tb6c <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sum, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, sum, partial = TRUE, align = "center")
+    )
   new_th6c <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_sum_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6c), as.data.frame(new_tb6c))
-  
+
   data$client$close()
 })
 
 test_that("uby_rolling_sum_time behaves as expected", {
   data <- setup()
-  
+
   new_tb1a <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, sum, partial=TRUE, align="right"))
+    mutate(int_col = rollapply(int_col, 9, sum, partial = TRUE, align = "right"))
   new_th1a <- head(data$th3, 500)$
     update_by(uby_rolling_sum_time("time_col", "int_col", "PT8s"))
   expect_equal(as.data.frame(new_th1a), as.data.frame(new_tb1a))
-  
+
   new_tb1b <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, sum, partial=TRUE, align="left"))
+    mutate(int_col = rollapply(int_col, 9, sum, partial = TRUE, align = "left"))
   new_th1b <- head(data$th3, 500)$
     update_by(uby_rolling_sum_time("time_col", "int_col", "PT0s", "PT8s"))
   expect_equal(as.data.frame(new_th1b), as.data.frame(new_tb1b))
-  
+
   new_tb1c <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, sum, partial=TRUE, align="center"))
+    mutate(int_col = rollapply(int_col, 9, sum, partial = TRUE, align = "center"))
   new_th1c <- head(data$th3, 500)$
     update_by(uby_rolling_sum_time("time_col", "int_col", "PT4s", "PT4s"))
   expect_equal(as.data.frame(new_th1c), as.data.frame(new_tb1c))
-  
+
   new_tb2a <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=sum, partial=TRUE, align="right", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = sum, partial = TRUE, align = "right", na.rm = TRUE))
   new_th2a <- head(data$th3, 500)$
     update_by(uby_rolling_sum_time("time_col", "int_col", "PT8s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2a), as.data.frame(new_tb2a))
-  
+
   new_tb2b <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=sum, partial=TRUE, align="left", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = sum, partial = TRUE, align = "left", na.rm = TRUE))
   new_th2b <- head(data$th3, 500)$
     update_by(uby_rolling_sum_time("time_col", "int_col", "PT0s", "PT8s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   new_tb2c <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=sum, partial=TRUE, align="center", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = sum, partial = TRUE, align = "center", na.rm = TRUE))
   new_th2c <- head(data$th3, 500)$
     update_by(uby_rolling_sum_time("time_col", "int_col", "PT4s", "PT4s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   data$client$close()
 })
 
 test_that("uby_rolling_group_tick behaves as expected", {
   data <- setup()
-  
-  right_group <- list(1.65,
-                    c(1.6500, 3.1234),
-                    c(1.6500, 3.1234, 100000.5000),
-                    c(3.1234, 100000.5000, 543.234567),
-                    c(100000.5000, 543.234567, 0.0000))
+
+  right_group <- list(
+    1.65,
+    c(1.6500, 3.1234),
+    c(1.6500, 3.1234, 100000.5000),
+    c(3.1234, 100000.5000, 543.234567),
+    c(100000.5000, 543.234567, 0.0000)
+  )
   new_th1a <- data$th1$
     update_by(uby_rolling_group_tick("dbl_col_group = dbl_col", rev_ticks = 3))
   expect_equal(as.list(as.data.frame(new_th1a)$dbl_col_group), right_group)
-  
-  
-  left_group <- list(c(1.6500, 3.1234, 100000.5000),
-                     c(3.1234, 100000.5000, 543.234567),
-                     c(100000.5000, 543.234567, 0.0000),
-                     c(543.234567, 0.0000),
-                     0)
+
+
+  left_group <- list(
+    c(1.6500, 3.1234, 100000.5000),
+    c(3.1234, 100000.5000, 543.234567),
+    c(100000.5000, 543.234567, 0.0000),
+    c(543.234567, 0.0000),
+    0
+  )
   new_th1b <- data$th1$
     update_by(uby_rolling_group_tick("dbl_col_group = dbl_col", rev_ticks = 1, fwd_ticks = 2))
   expect_equal(as.list(as.data.frame(new_th1b)$dbl_col_group), left_group)
-  
-  
-  center_group <- list(c(1.6500, 3.1234),
-                       c(1.6500, 3.1234, 100000.5000),
-                       c(3.1234, 100000.5000, 543.234567),
-                       c(100000.5000, 543.234567, 0.0000),
-                       c(543.234567, 0.0000))
+
+
+  center_group <- list(
+    c(1.6500, 3.1234),
+    c(1.6500, 3.1234, 100000.5000),
+    c(3.1234, 100000.5000, 543.234567),
+    c(100000.5000, 543.234567, 0.0000),
+    c(543.234567, 0.0000)
+  )
   new_th1c <- data$th1$
     update_by(uby_rolling_group_tick("dbl_col_group = dbl_col", rev_ticks = 2, fwd_ticks = 1))
   expect_equal(as.list(as.data.frame(new_th1c)$dbl_col_group), center_group)
-  
+
   data$client$close()
 })
 
 test_that("uby_rolling_group_time behaves as expected", {
   data <- setup()
-  
-  right_group <- c(lapply(1:9, function(x) 1:x), lapply(2:492, function(x) c(x:(x+8))))
+
+  right_group <- c(lapply(1:9, function(x) 1:x), lapply(2:492, function(x) c(x:(x + 8))))
   new_th1a <- data$deterministic_th3$
     update_by(uby_rolling_group_time("time_col", "int_col_group = int_col", "PT8s"))
   expect_equal(as.list(as.data.frame(new_th1a)$int_col_group), right_group)
-  
-  left_group <- c(lapply(1:491, function(x) c(x:(x+8))), lapply(492:500, function(x) x:500)) 
+
+  left_group <- c(lapply(1:491, function(x) c(x:(x + 8))), lapply(492:500, function(x) x:500))
   new_th1b <- data$deterministic_th3$
     update_by(uby_rolling_group_time("time_col", "int_col_group = int_col", "PT0s", "PT8s"))
   expect_equal(as.list(as.data.frame(new_th1b)$int_col_group), left_group)
-  
-  center_group <- c(lapply(5:9, function(x) 1:x), lapply(2:491, function(x) c(x:(x+8))), lapply(492:496, function(x) x:500))
+
+  center_group <- c(lapply(5:9, function(x) 1:x), lapply(2:491, function(x) c(x:(x + 8))), lapply(492:496, function(x) x:500))
   new_th1c <- data$deterministic_th3$
     update_by(uby_rolling_group_time("time_col", "int_col_group = int_col", "PT4s", "PT4s"))
   expect_equal(as.list(as.data.frame(new_th1c)$int_col_group), center_group)
-  
+
   data$client$close()
 })
 
 test_that("uby_rolling_avg_tick behaves as expected", {
   data <- setup()
-  
+
   new_tb1a <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, mean, partial = TRUE, align = "right"))
   new_th1a <- data$th1$
     update_by(uby_rolling_avg_tick("dbl_col", rev_ticks = 3))
   expect_equal(as.data.frame(new_th1a), as.data.frame(new_tb1a))
-  
+
   new_tb1b <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, mean, partial = TRUE, align = "left"))
   new_th1b <- data$th1$
     update_by(uby_rolling_avg_tick("dbl_col", rev_ticks = 1, fwd_ticks = 2))
   expect_equal(as.data.frame(new_th1b), as.data.frame(new_tb1b))
-  
+
   new_tb1c <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, mean, partial = TRUE, align = "center"))
   new_th1c <- data$th1$
     update_by(uby_rolling_avg_tick("dbl_col", rev_ticks = 2, fwd_ticks = 1))
   expect_equal(as.data.frame(new_th1c), as.data.frame(new_tb1c))
-  
+
   new_tb2a <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, mean, partial = TRUE, align = "right"),
-           col3 = rollapply(col3, 5, mean, partial = TRUE, align = "right"))
+    mutate(
+      col1 = rollapply(col1, 5, mean, partial = TRUE, align = "right"),
+      col3 = rollapply(col3, 5, mean, partial = TRUE, align = "right")
+    )
   new_th2a <- data$th2$
     update_by(uby_rolling_avg_tick(c("col1", "col3"), rev_ticks = 5))
   expect_equal(as.data.frame(new_th2a), as.data.frame(new_tb2a))
-  
+
   new_tb2b <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, mean, partial = TRUE, align = "left"),
-           col3 = rollapply(col3, 5, mean, partial = TRUE, align = "left"))
+    mutate(
+      col1 = rollapply(col1, 5, mean, partial = TRUE, align = "left"),
+      col3 = rollapply(col3, 5, mean, partial = TRUE, align = "left")
+    )
   new_th2b <- data$th2$
     update_by(uby_rolling_avg_tick(c("col1", "col3"), rev_ticks = 1, fwd_ticks = 4))
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   new_tb2c <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, mean, partial = TRUE, align = "center"),
-           col3 = rollapply(col3, 5, mean, partial = TRUE, align = "center"))
+    mutate(
+      col1 = rollapply(col1, 5, mean, partial = TRUE, align = "center"),
+      col3 = rollapply(col3, 5, mean, partial = TRUE, align = "center")
+    )
   new_th2c <- data$th2$
     update_by(uby_rolling_avg_tick(c("col1", "col3"), rev_ticks = 3, fwd_ticks = 2))
   expect_equal(as.data.frame(new_th2c), as.data.frame(new_tb2c))
-  
+
   new_tb3a <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, mean, partial = TRUE, align = "right"))
   new_th3a <- data$th3$
     update_by(uby_rolling_avg_tick("int_col", rev_ticks = 9), by = "bool_col")
   expect_equal(as.data.frame(new_th3a), as.data.frame(new_tb3a))
-  
+
   new_tb3b <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, mean, partial = TRUE, align = "left"))
   new_th3b <- data$th3$
     update_by(uby_rolling_avg_tick("int_col", rev_ticks = 1, fwd_ticks = 8), by = "bool_col")
   expect_equal(as.data.frame(new_th3b), as.data.frame(new_tb3b))
-  
+
   new_tb3c <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, mean, partial = TRUE, align = "center"))
   new_th3c <- data$th3$
     update_by(uby_rolling_avg_tick("int_col", rev_ticks = 5, fwd_ticks = 4), by = "bool_col")
   expect_equal(as.data.frame(new_th3c), as.data.frame(new_tb3c))
-  
+
   new_tb4a <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "right")
+    )
   new_th4a <- data$th4$
     update_by(uby_rolling_avg_tick(c("Number1", "Number2"), rev_ticks = 3), by = "X")
   expect_equal(as.data.frame(new_th4a), as.data.frame(new_tb4a))
-  
+
   new_tb4b <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "left")
+    )
   new_th4b <- data$th4$
     update_by(uby_rolling_avg_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "X")
   expect_equal(as.data.frame(new_th4b), as.data.frame(new_tb4b))
-  
+
   new_tb4c <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "center")
+    )
   new_th4c <- data$th4$
     update_by(uby_rolling_avg_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "X")
   expect_equal(as.data.frame(new_th4c), as.data.frame(new_tb4c))
-  
+
   new_tb5a <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "right")
+    )
   new_th5a <- data$th5$
     update_by(uby_rolling_avg_tick(c("Number1", "Number2"), rev_ticks = 3), by = "Y")
   expect_equal(as.data.frame(new_th5a), as.data.frame(new_tb5a))
-  
+
   new_tb5b <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "left")
+    )
   new_th5b <- data$th5$
     update_by(uby_rolling_avg_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "Y")
   expect_equal(as.data.frame(new_th5b), as.data.frame(new_tb5b))
-  
+
   new_tb5c <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "center")
+    )
   new_th5c <- data$th5$
     update_by(uby_rolling_avg_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "Y")
   expect_equal(as.data.frame(new_th5c), as.data.frame(new_tb5c))
-  
+
   new_tb6a <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "right")
+    )
   new_th6a <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_avg_tick(c("Number1", "Number2"), rev_ticks = 3), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6a), as.data.frame(new_tb6a))
-  
+
   new_tb6b <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "left")
+    )
   new_th6b <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_avg_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6b), as.data.frame(new_tb6b))
-  
+
   new_tb6c <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, mean, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, mean, partial = TRUE, align = "center")
+    )
   new_th6c <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_avg_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6c), as.data.frame(new_tb6c))
-  
+
   data$client$close()
 })
 
 test_that("uby_rolling_avg_time behaves as expected", {
   data <- setup()
-  
+
   new_tb1a <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, mean, partial=TRUE, align="right"))
+    mutate(int_col = rollapply(int_col, 9, mean, partial = TRUE, align = "right"))
   new_th1a <- head(data$th3, 500)$
     update_by(uby_rolling_avg_time("time_col", "int_col", "PT8s"))
   expect_equal(as.data.frame(new_th1a), as.data.frame(new_tb1a))
-  
+
   new_tb1b <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, mean, partial=TRUE, align="left"))
+    mutate(int_col = rollapply(int_col, 9, mean, partial = TRUE, align = "left"))
   new_th1b <- head(data$th3, 500)$
     update_by(uby_rolling_avg_time("time_col", "int_col", "PT0s", "PT8s"))
   expect_equal(as.data.frame(new_th1b), as.data.frame(new_tb1b))
-  
+
   new_tb1c <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, mean, partial=TRUE, align="center"))
+    mutate(int_col = rollapply(int_col, 9, mean, partial = TRUE, align = "center"))
   new_th1c <- head(data$th3, 500)$
     update_by(uby_rolling_avg_time("time_col", "int_col", "PT4s", "PT4s"))
   expect_equal(as.data.frame(new_th1c), as.data.frame(new_tb1c))
-  
+
   new_tb2a <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=mean, partial=TRUE, align="right", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = mean, partial = TRUE, align = "right", na.rm = TRUE))
   new_th2a <- head(data$th3, 500)$
     update_by(uby_rolling_avg_time("time_col", "int_col", "PT8s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2a), as.data.frame(new_tb2a))
-  
+
   new_tb2b <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=mean, partial=TRUE, align="left", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = mean, partial = TRUE, align = "left", na.rm = TRUE))
   new_th2b <- head(data$th3, 500)$
     update_by(uby_rolling_avg_time("time_col", "int_col", "PT0s", "PT8s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   new_tb2c <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=mean, partial=TRUE, align="center", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = mean, partial = TRUE, align = "center", na.rm = TRUE))
   new_th2c <- head(data$th3, 500)$
     update_by(uby_rolling_avg_time("time_col", "int_col", "PT4s", "PT4s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   data$client$close()
 })
 
 test_that("uby_rolling_min_tick behaves as expected", {
   data <- setup()
-  
+
   new_tb1a <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, min, partial = TRUE, align = "right"))
   new_th1a <- data$th1$
     update_by(uby_rolling_min_tick("dbl_col", rev_ticks = 3))
   expect_equal(as.data.frame(new_th1a), as.data.frame(new_tb1a))
-  
+
   new_tb1b <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, min, partial = TRUE, align = "left"))
   new_th1b <- data$th1$
     update_by(uby_rolling_min_tick("dbl_col", rev_ticks = 1, fwd_ticks = 2))
   expect_equal(as.data.frame(new_th1b), as.data.frame(new_tb1b))
-  
+
   new_tb1c <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, min, partial = TRUE, align = "center"))
   new_th1c <- data$th1$
     update_by(uby_rolling_min_tick("dbl_col", rev_ticks = 2, fwd_ticks = 1))
   expect_equal(as.data.frame(new_th1c), as.data.frame(new_tb1c))
-  
+
   new_tb2a <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, min, partial = TRUE, align = "right"),
-           col3 = rollapply(col3, 5, min, partial = TRUE, align = "right"))
+    mutate(
+      col1 = rollapply(col1, 5, min, partial = TRUE, align = "right"),
+      col3 = rollapply(col3, 5, min, partial = TRUE, align = "right")
+    )
   new_th2a <- data$th2$
     update_by(uby_rolling_min_tick(c("col1", "col3"), rev_ticks = 5))
   expect_equal(as.data.frame(new_th2a), as.data.frame(new_tb2a))
-  
+
   new_tb2b <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, min, partial = TRUE, align = "left"),
-           col3 = rollapply(col3, 5, min, partial = TRUE, align = "left"))
+    mutate(
+      col1 = rollapply(col1, 5, min, partial = TRUE, align = "left"),
+      col3 = rollapply(col3, 5, min, partial = TRUE, align = "left")
+    )
   new_th2b <- data$th2$
     update_by(uby_rolling_min_tick(c("col1", "col3"), rev_ticks = 1, fwd_ticks = 4))
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   new_tb2c <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, min, partial = TRUE, align = "center"),
-           col3 = rollapply(col3, 5, min, partial = TRUE, align = "center"))
+    mutate(
+      col1 = rollapply(col1, 5, min, partial = TRUE, align = "center"),
+      col3 = rollapply(col3, 5, min, partial = TRUE, align = "center")
+    )
   new_th2c <- data$th2$
     update_by(uby_rolling_min_tick(c("col1", "col3"), rev_ticks = 3, fwd_ticks = 2))
   expect_equal(as.data.frame(new_th2c), as.data.frame(new_tb2c))
-  
+
   new_tb3a <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, min, partial = TRUE, align = "right"))
   new_th3a <- data$th3$
     update_by(uby_rolling_min_tick("int_col", rev_ticks = 9), by = "bool_col")
   expect_equal(as.data.frame(new_th3a), as.data.frame(new_tb3a))
-  
+
   new_tb3b <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, min, partial = TRUE, align = "left"))
   new_th3b <- data$th3$
     update_by(uby_rolling_min_tick("int_col", rev_ticks = 1, fwd_ticks = 8), by = "bool_col")
   expect_equal(as.data.frame(new_th3b), as.data.frame(new_tb3b))
-  
+
   new_tb3c <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, min, partial = TRUE, align = "center"))
   new_th3c <- data$th3$
     update_by(uby_rolling_min_tick("int_col", rev_ticks = 5, fwd_ticks = 4), by = "bool_col")
   expect_equal(as.data.frame(new_th3c), as.data.frame(new_tb3c))
-  
+
   new_tb4a <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "right")
+    )
   new_th4a <- data$th4$
     update_by(uby_rolling_min_tick(c("Number1", "Number2"), rev_ticks = 3), by = "X")
   expect_equal(as.data.frame(new_th4a), as.data.frame(new_tb4a))
-  
+
   new_tb4b <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "left")
+    )
   new_th4b <- data$th4$
     update_by(uby_rolling_min_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "X")
   expect_equal(as.data.frame(new_th4b), as.data.frame(new_tb4b))
-  
+
   new_tb4c <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "center")
+    )
   new_th4c <- data$th4$
     update_by(uby_rolling_min_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "X")
   expect_equal(as.data.frame(new_th4c), as.data.frame(new_tb4c))
-  
+
   new_tb5a <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "right")
+    )
   new_th5a <- data$th5$
     update_by(uby_rolling_min_tick(c("Number1", "Number2"), rev_ticks = 3), by = "Y")
   expect_equal(as.data.frame(new_th5a), as.data.frame(new_tb5a))
-  
+
   new_tb5b <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "left")
+    )
   new_th5b <- data$th5$
     update_by(uby_rolling_min_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "Y")
   expect_equal(as.data.frame(new_th5b), as.data.frame(new_tb5b))
-  
+
   new_tb5c <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "center")
+    )
   new_th5c <- data$th5$
     update_by(uby_rolling_min_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "Y")
   expect_equal(as.data.frame(new_th5c), as.data.frame(new_tb5c))
-  
+
   new_tb6a <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "right")
+    )
   new_th6a <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_min_tick(c("Number1", "Number2"), rev_ticks = 3), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6a), as.data.frame(new_tb6a))
-  
+
   new_tb6b <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "left")
+    )
   new_th6b <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_min_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6b), as.data.frame(new_tb6b))
-  
+
   new_tb6c <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, min, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, min, partial = TRUE, align = "center")
+    )
   new_th6c <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_min_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6c), as.data.frame(new_tb6c))
-  
+
   data$client$close()
 })
 
 test_that("uby_rolling_min_time behaves as expected", {
   data <- setup()
-  
+
   new_tb1a <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, min, partial=TRUE, align="right"))
+    mutate(int_col = rollapply(int_col, 9, min, partial = TRUE, align = "right"))
   new_th1a <- head(data$th3, 500)$
     update_by(uby_rolling_min_time("time_col", "int_col", "PT8s"))
   expect_equal(as.data.frame(new_th1a), as.data.frame(new_tb1a))
-  
+
   new_tb1b <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, min, partial=TRUE, align="left"))
+    mutate(int_col = rollapply(int_col, 9, min, partial = TRUE, align = "left"))
   new_th1b <- head(data$th3, 500)$
     update_by(uby_rolling_min_time("time_col", "int_col", "PT0s", "PT8s"))
   expect_equal(as.data.frame(new_th1b), as.data.frame(new_tb1b))
-  
+
   new_tb1c <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, min, partial=TRUE, align="center"))
+    mutate(int_col = rollapply(int_col, 9, min, partial = TRUE, align = "center"))
   new_th1c <- head(data$th3, 500)$
     update_by(uby_rolling_min_time("time_col", "int_col", "PT4s", "PT4s"))
   expect_equal(as.data.frame(new_th1c), as.data.frame(new_tb1c))
-  
+
   new_tb2a <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=min, partial=TRUE, align="right", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = min, partial = TRUE, align = "right", na.rm = TRUE))
   new_th2a <- head(data$th3, 500)$
     update_by(uby_rolling_min_time("time_col", "int_col", "PT8s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2a), as.data.frame(new_tb2a))
-  
+
   new_tb2b <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=min, partial=TRUE, align="left", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = min, partial = TRUE, align = "left", na.rm = TRUE))
   new_th2b <- head(data$th3, 500)$
     update_by(uby_rolling_min_time("time_col", "int_col", "PT0s", "PT8s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   new_tb2c <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=min, partial=TRUE, align="center", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = min, partial = TRUE, align = "center", na.rm = TRUE))
   new_th2c <- head(data$th3, 500)$
     update_by(uby_rolling_min_time("time_col", "int_col", "PT4s", "PT4s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   data$client$close()
 })
 
@@ -1416,22 +1504,28 @@ test_that("uby_rolling_max_tick behaves as expected", {
   expect_equal(as.data.frame(new_th1c), as.data.frame(new_tb1c))
 
   new_tb2a <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, max, partial = TRUE, align = "right"),
-           col3 = rollapply(col3, 5, max, partial = TRUE, align = "right"))
+    mutate(
+      col1 = rollapply(col1, 5, max, partial = TRUE, align = "right"),
+      col3 = rollapply(col3, 5, max, partial = TRUE, align = "right")
+    )
   new_th2a <- data$th2$
     update_by(uby_rolling_max_tick(c("col1", "col3"), rev_ticks = 5))
   expect_equal(as.data.frame(new_th2a), as.data.frame(new_tb2a))
 
   new_tb2b <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, max, partial = TRUE, align = "left"),
-           col3 = rollapply(col3, 5, max, partial = TRUE, align = "left"))
+    mutate(
+      col1 = rollapply(col1, 5, max, partial = TRUE, align = "left"),
+      col3 = rollapply(col3, 5, max, partial = TRUE, align = "left")
+    )
   new_th2b <- data$th2$
     update_by(uby_rolling_max_tick(c("col1", "col3"), rev_ticks = 1, fwd_ticks = 4))
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
 
   new_tb2c <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, max, partial = TRUE, align = "center"),
-           col3 = rollapply(col3, 5, max, partial = TRUE, align = "center"))
+    mutate(
+      col1 = rollapply(col1, 5, max, partial = TRUE, align = "center"),
+      col3 = rollapply(col3, 5, max, partial = TRUE, align = "center")
+    )
   new_th2c <- data$th2$
     update_by(uby_rolling_max_tick(c("col1", "col3"), rev_ticks = 3, fwd_ticks = 2))
   expect_equal(as.data.frame(new_th2c), as.data.frame(new_tb2c))
@@ -1459,72 +1553,90 @@ test_that("uby_rolling_max_tick behaves as expected", {
 
   new_tb4a <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "right")
+    )
   new_th4a <- data$th4$
     update_by(uby_rolling_max_tick(c("Number1", "Number2"), rev_ticks = 3), by = "X")
   expect_equal(as.data.frame(new_th4a), as.data.frame(new_tb4a))
 
   new_tb4b <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "left")
+    )
   new_th4b <- data$th4$
     update_by(uby_rolling_max_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "X")
   expect_equal(as.data.frame(new_th4b), as.data.frame(new_tb4b))
 
   new_tb4c <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "center")
+    )
   new_th4c <- data$th4$
     update_by(uby_rolling_max_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "X")
   expect_equal(as.data.frame(new_th4c), as.data.frame(new_tb4c))
 
   new_tb5a <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "right")
+    )
   new_th5a <- data$th5$
     update_by(uby_rolling_max_tick(c("Number1", "Number2"), rev_ticks = 3), by = "Y")
   expect_equal(as.data.frame(new_th5a), as.data.frame(new_tb5a))
 
   new_tb5b <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "left")
+    )
   new_th5b <- data$th5$
     update_by(uby_rolling_max_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "Y")
   expect_equal(as.data.frame(new_th5b), as.data.frame(new_tb5b))
 
   new_tb5c <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "center")
+    )
   new_th5c <- data$th5$
     update_by(uby_rolling_max_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "Y")
   expect_equal(as.data.frame(new_th5c), as.data.frame(new_tb5c))
 
   new_tb6a <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "right")
+    )
   new_th6a <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_max_tick(c("Number1", "Number2"), rev_ticks = 3), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6a), as.data.frame(new_tb6a))
 
   new_tb6b <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "left")
+    )
   new_th6b <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_max_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6b), as.data.frame(new_tb6b))
 
   new_tb6c <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, max, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, max, partial = TRUE, align = "center")
+    )
   new_th6c <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_max_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6c), as.data.frame(new_tb6c))
@@ -1534,405 +1646,455 @@ test_that("uby_rolling_max_tick behaves as expected", {
 
 test_that("uby_rolling_max_time behaves as expected", {
   data <- setup()
-  
+
   new_tb1a <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, max, partial=TRUE, align="right"))
+    mutate(int_col = rollapply(int_col, 9, max, partial = TRUE, align = "right"))
   new_th1a <- head(data$th3, 500)$
     update_by(uby_rolling_max_time("time_col", "int_col", "PT8s"))
   expect_equal(as.data.frame(new_th1a), as.data.frame(new_tb1a))
-  
+
   new_tb1b <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, max, partial=TRUE, align="left"))
+    mutate(int_col = rollapply(int_col, 9, max, partial = TRUE, align = "left"))
   new_th1b <- head(data$th3, 500)$
     update_by(uby_rolling_max_time("time_col", "int_col", "PT0s", "PT8s"))
   expect_equal(as.data.frame(new_th1b), as.data.frame(new_tb1b))
-  
+
   new_tb1c <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, max, partial=TRUE, align="center"))
+    mutate(int_col = rollapply(int_col, 9, max, partial = TRUE, align = "center"))
   new_th1c <- head(data$th3, 500)$
     update_by(uby_rolling_max_time("time_col", "int_col", "PT4s", "PT4s"))
   expect_equal(as.data.frame(new_th1c), as.data.frame(new_tb1c))
-  
+
   new_tb2a <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=max, partial=TRUE, align="right", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = max, partial = TRUE, align = "right", na.rm = TRUE))
   new_th2a <- head(data$th3, 500)$
     update_by(uby_rolling_max_time("time_col", "int_col", "PT8s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2a), as.data.frame(new_tb2a))
-  
+
   new_tb2b <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=max, partial=TRUE, align="left", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = max, partial = TRUE, align = "left", na.rm = TRUE))
   new_th2b <- head(data$th3, 500)$
     update_by(uby_rolling_max_time("time_col", "int_col", "PT0s", "PT8s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   new_tb2c <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=max, partial=TRUE, align="center", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = max, partial = TRUE, align = "center", na.rm = TRUE))
   new_th2c <- head(data$th3, 500)$
     update_by(uby_rolling_max_time("time_col", "int_col", "PT4s", "PT4s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   data$client$close()
 })
 
 test_that("uby_rolling_prod_tick behaves as expected", {
   data <- setup()
-  
+
   new_tb1a <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, prod, partial = TRUE, align = "right"))
   new_th1a <- data$th1$
     update_by(uby_rolling_prod_tick("dbl_col", rev_ticks = 3))
   expect_equal(as.data.frame(new_th1a), as.data.frame(new_tb1a))
-  
+
   new_tb1b <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, prod, partial = TRUE, align = "left"))
   new_th1b <- data$th1$
     update_by(uby_rolling_prod_tick("dbl_col", rev_ticks = 1, fwd_ticks = 2))
   expect_equal(as.data.frame(new_th1b), as.data.frame(new_tb1b))
-  
+
   new_tb1c <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, prod, partial = TRUE, align = "center"))
   new_th1c <- data$th1$
     update_by(uby_rolling_prod_tick("dbl_col", rev_ticks = 2, fwd_ticks = 1))
   expect_equal(as.data.frame(new_th1c), as.data.frame(new_tb1c))
-  
+
   new_tb2a <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, prod, partial = TRUE, align = "right"),
-           col3 = rollapply(col3, 5, prod, partial = TRUE, align = "right"))
+    mutate(
+      col1 = rollapply(col1, 5, prod, partial = TRUE, align = "right"),
+      col3 = rollapply(col3, 5, prod, partial = TRUE, align = "right")
+    )
   new_th2a <- data$th2$
     update_by(uby_rolling_prod_tick(c("col1", "col3"), rev_ticks = 5))
   expect_equal(as.data.frame(new_th2a), as.data.frame(new_tb2a))
-  
+
   new_tb2b <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, prod, partial = TRUE, align = "left"),
-           col3 = rollapply(col3, 5, prod, partial = TRUE, align = "left"))
+    mutate(
+      col1 = rollapply(col1, 5, prod, partial = TRUE, align = "left"),
+      col3 = rollapply(col3, 5, prod, partial = TRUE, align = "left")
+    )
   new_th2b <- data$th2$
     update_by(uby_rolling_prod_tick(c("col1", "col3"), rev_ticks = 1, fwd_ticks = 4))
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   new_tb2c <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, prod, partial = TRUE, align = "center"),
-           col3 = rollapply(col3, 5, prod, partial = TRUE, align = "center"))
+    mutate(
+      col1 = rollapply(col1, 5, prod, partial = TRUE, align = "center"),
+      col3 = rollapply(col3, 5, prod, partial = TRUE, align = "center")
+    )
   new_th2c <- data$th2$
     update_by(uby_rolling_prod_tick(c("col1", "col3"), rev_ticks = 3, fwd_ticks = 2))
   expect_equal(as.data.frame(new_th2c), as.data.frame(new_tb2c))
-  
+
   new_tb3a <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, prod, partial = TRUE, align = "right"))
   new_th3a <- data$th3$
     update_by(uby_rolling_prod_tick("int_col", rev_ticks = 9), by = "bool_col")
   expect_equal(as.data.frame(new_th3a), as.data.frame(new_tb3a))
-  
+
   new_tb3b <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, prod, partial = TRUE, align = "left"))
   new_th3b <- data$th3$
     update_by(uby_rolling_prod_tick("int_col", rev_ticks = 1, fwd_ticks = 8), by = "bool_col")
   expect_equal(as.data.frame(new_th3b), as.data.frame(new_tb3b))
-  
+
   new_tb3c <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, prod, partial = TRUE, align = "center"))
   new_th3c <- data$th3$
     update_by(uby_rolling_prod_tick("int_col", rev_ticks = 5, fwd_ticks = 4), by = "bool_col")
   expect_equal(as.data.frame(new_th3c), as.data.frame(new_tb3c))
-  
+
   new_tb4a <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "right")
+    )
   new_th4a <- data$th4$
     update_by(uby_rolling_prod_tick(c("Number1", "Number2"), rev_ticks = 3), by = "X")
   expect_equal(as.data.frame(new_th4a), as.data.frame(new_tb4a))
-  
+
   new_tb4b <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "left")
+    )
   new_th4b <- data$th4$
     update_by(uby_rolling_prod_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "X")
   expect_equal(as.data.frame(new_th4b), as.data.frame(new_tb4b))
-  
+
   new_tb4c <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "center")
+    )
   new_th4c <- data$th4$
     update_by(uby_rolling_prod_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "X")
   expect_equal(as.data.frame(new_th4c), as.data.frame(new_tb4c))
-  
+
   new_tb5a <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "right")
+    )
   new_th5a <- data$th5$
     update_by(uby_rolling_prod_tick(c("Number1", "Number2"), rev_ticks = 3), by = "Y")
   expect_equal(as.data.frame(new_th5a), as.data.frame(new_tb5a))
-  
+
   new_tb5b <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "left")
+    )
   new_th5b <- data$th5$
     update_by(uby_rolling_prod_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "Y")
   expect_equal(as.data.frame(new_th5b), as.data.frame(new_tb5b))
-  
+
   new_tb5c <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "center")
+    )
   new_th5c <- data$th5$
     update_by(uby_rolling_prod_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "Y")
   expect_equal(as.data.frame(new_th5c), as.data.frame(new_tb5c))
-  
+
   new_tb6a <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "right")
+    )
   new_th6a <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_prod_tick(c("Number1", "Number2"), rev_ticks = 3), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6a), as.data.frame(new_tb6a))
-  
+
   new_tb6b <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "left")
+    )
   new_th6b <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_prod_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6b), as.data.frame(new_tb6b))
-  
+
   new_tb6c <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, prod, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, prod, partial = TRUE, align = "center")
+    )
   new_th6c <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_prod_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6c), as.data.frame(new_tb6c))
-  
+
   data$client$close()
 })
 
 test_that("uby_rolling_prod_time behaves as expected", {
   data <- setup()
-  
+
   new_tb1a <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, prod, partial=TRUE, align="right"))
+    mutate(int_col = rollapply(int_col, 9, prod, partial = TRUE, align = "right"))
   new_th1a <- head(data$th3, 500)$
     update_by(uby_rolling_prod_time("time_col", "int_col", "PT8s"))
   expect_equal(as.data.frame(new_th1a), as.data.frame(new_tb1a))
-  
+
   new_tb1b <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, prod, partial=TRUE, align="left"))
+    mutate(int_col = rollapply(int_col, 9, prod, partial = TRUE, align = "left"))
   new_th1b <- head(data$th3, 500)$
     update_by(uby_rolling_prod_time("time_col", "int_col", "PT0s", "PT8s"))
   expect_equal(as.data.frame(new_th1b), as.data.frame(new_tb1b))
-  
+
   new_tb1c <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, prod, partial=TRUE, align="center"))
+    mutate(int_col = rollapply(int_col, 9, prod, partial = TRUE, align = "center"))
   new_th1c <- head(data$th3, 500)$
     update_by(uby_rolling_prod_time("time_col", "int_col", "PT4s", "PT4s"))
   expect_equal(as.data.frame(new_th1c), as.data.frame(new_tb1c))
-  
+
   new_tb2a <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=prod, partial=TRUE, align="right", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = prod, partial = TRUE, align = "right", na.rm = TRUE))
   new_th2a <- head(data$th3, 500)$
     update_by(uby_rolling_prod_time("time_col", "int_col", "PT8s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2a), as.data.frame(new_tb2a))
-  
+
   new_tb2b <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=prod, partial=TRUE, align="left", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = prod, partial = TRUE, align = "left", na.rm = TRUE))
   new_th2b <- head(data$th3, 500)$
     update_by(uby_rolling_prod_time("time_col", "int_col", "PT0s", "PT8s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   new_tb2c <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=prod, partial=TRUE, align="center", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = prod, partial = TRUE, align = "center", na.rm = TRUE))
   new_th2c <- head(data$th3, 500)$
     update_by(uby_rolling_prod_time("time_col", "int_col", "PT4s", "PT4s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   data$client$close()
 })
 
 test_that("uby_rolling_count_tick behaves as expected", {
   data <- setup()
-  
+
   new_tb1a <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, length, partial = TRUE, align = "right"))
   new_th1a <- data$th1$
     update_by(uby_rolling_count_tick("dbl_col", rev_ticks = 3))
   expect_equal(as.data.frame(new_th1a), as.data.frame(new_tb1a))
-  
+
   new_tb1b <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, length, partial = TRUE, align = "left"))
   new_th1b <- data$th1$
     update_by(uby_rolling_count_tick("dbl_col", rev_ticks = 1, fwd_ticks = 2))
   expect_equal(as.data.frame(new_th1b), as.data.frame(new_tb1b))
-  
+
   new_tb1c <- data$df1 %>%
     mutate(dbl_col = rollapply(dbl_col, 3, length, partial = TRUE, align = "center"))
   new_th1c <- data$th1$
     update_by(uby_rolling_count_tick("dbl_col", rev_ticks = 2, fwd_ticks = 1))
   expect_equal(as.data.frame(new_th1c), as.data.frame(new_tb1c))
-  
+
   new_tb2a <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, length, partial = TRUE, align = "right"),
-           col3 = rollapply(col3, 5, length, partial = TRUE, align = "right"))
+    mutate(
+      col1 = rollapply(col1, 5, length, partial = TRUE, align = "right"),
+      col3 = rollapply(col3, 5, length, partial = TRUE, align = "right")
+    )
   new_th2a <- data$th2$
     update_by(uby_rolling_count_tick(c("col1", "col3"), rev_ticks = 5))
   expect_equal(as.data.frame(new_th2a), as.data.frame(new_tb2a))
-  
+
   new_tb2b <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, length, partial = TRUE, align = "left"),
-           col3 = rollapply(col3, 5, length, partial = TRUE, align = "left"))
+    mutate(
+      col1 = rollapply(col1, 5, length, partial = TRUE, align = "left"),
+      col3 = rollapply(col3, 5, length, partial = TRUE, align = "left")
+    )
   new_th2b <- data$th2$
     update_by(uby_rolling_count_tick(c("col1", "col3"), rev_ticks = 1, fwd_ticks = 4))
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   new_tb2c <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, length, partial = TRUE, align = "center"),
-           col3 = rollapply(col3, 5, length, partial = TRUE, align = "center"))
+    mutate(
+      col1 = rollapply(col1, 5, length, partial = TRUE, align = "center"),
+      col3 = rollapply(col3, 5, length, partial = TRUE, align = "center")
+    )
   new_th2c <- data$th2$
     update_by(uby_rolling_count_tick(c("col1", "col3"), rev_ticks = 3, fwd_ticks = 2))
   expect_equal(as.data.frame(new_th2c), as.data.frame(new_tb2c))
-  
+
   new_tb3a <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, length, partial = TRUE, align = "right"))
   new_th3a <- data$th3$
     update_by(uby_rolling_count_tick("int_col", rev_ticks = 9), by = "bool_col")
   expect_equal(as.data.frame(new_th3a), as.data.frame(new_tb3a))
-  
+
   new_tb3b <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, length, partial = TRUE, align = "left"))
   new_th3b <- data$th3$
     update_by(uby_rolling_count_tick("int_col", rev_ticks = 1, fwd_ticks = 8), by = "bool_col")
   expect_equal(as.data.frame(new_th3b), as.data.frame(new_tb3b))
-  
+
   new_tb3c <- data$df3 %>%
     group_by(bool_col) %>%
     mutate(int_col = rollapply(int_col, 9, length, partial = TRUE, align = "center"))
   new_th3c <- data$th3$
     update_by(uby_rolling_count_tick("int_col", rev_ticks = 5, fwd_ticks = 4), by = "bool_col")
   expect_equal(as.data.frame(new_th3c), as.data.frame(new_tb3c))
-  
+
   new_tb4a <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "right")
+    )
   new_th4a <- data$th4$
     update_by(uby_rolling_count_tick(c("Number1", "Number2"), rev_ticks = 3), by = "X")
   expect_equal(as.data.frame(new_th4a), as.data.frame(new_tb4a))
-  
+
   new_tb4b <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "left")
+    )
   new_th4b <- data$th4$
     update_by(uby_rolling_count_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "X")
   expect_equal(as.data.frame(new_th4b), as.data.frame(new_tb4b))
-  
+
   new_tb4c <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "center")
+    )
   new_th4c <- data$th4$
     update_by(uby_rolling_count_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "X")
   expect_equal(as.data.frame(new_th4c), as.data.frame(new_tb4c))
-  
+
   new_tb5a <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "right")
+    )
   new_th5a <- data$th5$
     update_by(uby_rolling_count_tick(c("Number1", "Number2"), rev_ticks = 3), by = "Y")
   expect_equal(as.data.frame(new_th5a), as.data.frame(new_tb5a))
-  
+
   new_tb5b <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "left")
+    )
   new_th5b <- data$th5$
     update_by(uby_rolling_count_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "Y")
   expect_equal(as.data.frame(new_th5b), as.data.frame(new_tb5b))
-  
+
   new_tb5c <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "center")
+    )
   new_th5c <- data$th5$
     update_by(uby_rolling_count_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "Y")
   expect_equal(as.data.frame(new_th5c), as.data.frame(new_tb5c))
-  
+
   new_tb6a <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "right")
+    )
   new_th6a <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_count_tick(c("Number1", "Number2"), rev_ticks = 3), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6a), as.data.frame(new_tb6a))
-  
+
   new_tb6b <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "left")
+    )
   new_th6b <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_count_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6b), as.data.frame(new_tb6b))
-  
+
   new_tb6c <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, length, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, length, partial = TRUE, align = "center")
+    )
   new_th6c <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_count_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6c), as.data.frame(new_tb6c))
-  
+
   data$client$close()
 })
 
 test_that("uby_rolling_count_time behaves as expected", {
   data <- setup()
-  
-  custom_count <- function(x) {return(sum(!is.na(x)))}
-  
+
+  custom_count <- function(x) {
+    return(sum(!is.na(x)))
+  }
+
   new_tb1a <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, length, partial=TRUE, align="right"))
+    mutate(int_col = rollapply(int_col, 9, length, partial = TRUE, align = "right"))
   new_th1a <- head(data$th3, 500)$
     update_by(uby_rolling_count_time("time_col", "int_col", "PT8s"))
   expect_equal(as.data.frame(new_th1a), as.data.frame(new_tb1a))
-  
+
   new_tb1b <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, length, partial=TRUE, align="left"))
+    mutate(int_col = rollapply(int_col, 9, length, partial = TRUE, align = "left"))
   new_th1b <- head(data$th3, 500)$
     update_by(uby_rolling_count_time("time_col", "int_col", "PT0s", "PT8s"))
   expect_equal(as.data.frame(new_th1b), as.data.frame(new_tb1b))
-  
+
   new_tb1c <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, length, partial=TRUE, align="center"))
+    mutate(int_col = rollapply(int_col, 9, length, partial = TRUE, align = "center"))
   new_th1c <- head(data$th3, 500)$
     update_by(uby_rolling_count_time("time_col", "int_col", "PT4s", "PT4s"))
   expect_equal(as.data.frame(new_th1c), as.data.frame(new_tb1c))
-  
+
   new_tb2a <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=custom_count, partial=TRUE, align="right"))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = custom_count, partial = TRUE, align = "right"))
   new_th2a <- head(data$th3, 500)$
     update_by(uby_rolling_count_time("time_col", "int_col", "PT8s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2a), as.data.frame(new_tb2a))
-  
+
   new_tb2b <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=custom_count, partial=TRUE, align="left"))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = custom_count, partial = TRUE, align = "left"))
   new_th2b <- head(data$th3, 500)$
     update_by(uby_rolling_count_time("time_col", "int_col", "PT0s", "PT8s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   new_tb2c <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=custom_count, partial=TRUE, align="center"))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = custom_count, partial = TRUE, align = "center"))
   new_th2c <- head(data$th3, 500)$
     update_by(uby_rolling_count_time("time_col", "int_col", "PT4s", "PT4s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   data$client$close()
 })
 
@@ -1958,22 +2120,28 @@ test_that("uby_rolling_std_tick behaves as expected", {
   expect_equal(as.data.frame(new_th1c), as.data.frame(new_tb1c))
 
   new_tb2a <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, sd, partial = TRUE, align = "right"),
-           col3 = rollapply(col3, 5, sd, partial = TRUE, align = "right"))
+    mutate(
+      col1 = rollapply(col1, 5, sd, partial = TRUE, align = "right"),
+      col3 = rollapply(col3, 5, sd, partial = TRUE, align = "right")
+    )
   new_th2a <- data$th2$
     update_by(uby_rolling_std_tick(c("col1", "col3"), rev_ticks = 5))
   expect_equal(as.data.frame(new_th2a), as.data.frame(new_tb2a))
 
   new_tb2b <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, sd, partial = TRUE, align = "left"),
-           col3 = rollapply(col3, 5, sd, partial = TRUE, align = "left"))
+    mutate(
+      col1 = rollapply(col1, 5, sd, partial = TRUE, align = "left"),
+      col3 = rollapply(col3, 5, sd, partial = TRUE, align = "left")
+    )
   new_th2b <- data$th2$
     update_by(uby_rolling_std_tick(c("col1", "col3"), rev_ticks = 1, fwd_ticks = 4))
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
 
   new_tb2c <- data$df2 %>%
-    mutate(col1 = rollapply(col1, 5, sd, partial = TRUE, align = "center"),
-           col3 = rollapply(col3, 5, sd, partial = TRUE, align = "center"))
+    mutate(
+      col1 = rollapply(col1, 5, sd, partial = TRUE, align = "center"),
+      col3 = rollapply(col3, 5, sd, partial = TRUE, align = "center")
+    )
   new_th2c <- data$th2$
     update_by(uby_rolling_std_tick(c("col1", "col3"), rev_ticks = 3, fwd_ticks = 2))
   expect_equal(as.data.frame(new_th2c), as.data.frame(new_tb2c))
@@ -2001,72 +2169,90 @@ test_that("uby_rolling_std_tick behaves as expected", {
 
   new_tb4a <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "right")
+    )
   new_th4a <- data$th4$
     update_by(uby_rolling_std_tick(c("Number1", "Number2"), rev_ticks = 3), by = "X")
   expect_equal(as.data.frame(new_th4a), as.data.frame(new_tb4a))
 
   new_tb4b <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "left")
+    )
   new_th4b <- data$th4$
     update_by(uby_rolling_std_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "X")
   expect_equal(as.data.frame(new_th4b), as.data.frame(new_tb4b))
 
   new_tb4c <- data$df4 %>%
     group_by(X) %>%
-    mutate(Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "center")
+    )
   new_th4c <- data$th4$
     update_by(uby_rolling_std_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "X")
   expect_equal(as.data.frame(new_th4c), as.data.frame(new_tb4c))
 
   new_tb5a <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "right")
+    )
   new_th5a <- data$th5$
     update_by(uby_rolling_std_tick(c("Number1", "Number2"), rev_ticks = 3), by = "Y")
   expect_equal(as.data.frame(new_th5a), as.data.frame(new_tb5a))
 
   new_tb5b <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "left")
+    )
   new_th5b <- data$th5$
     update_by(uby_rolling_std_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "Y")
   expect_equal(as.data.frame(new_th5b), as.data.frame(new_tb5b))
 
   new_tb5c <- data$df5 %>%
     group_by(Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "center")
+    )
   new_th5c <- data$th5$
     update_by(uby_rolling_std_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "Y")
   expect_equal(as.data.frame(new_th5c), as.data.frame(new_tb5c))
 
   new_tb6a <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "right"),
-           Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "right"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "right"),
+      Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "right")
+    )
   new_th6a <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_std_tick(c("Number1", "Number2"), rev_ticks = 3), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6a), as.data.frame(new_tb6a))
 
   new_tb6b <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "left"),
-           Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "left"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "left"),
+      Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "left")
+    )
   new_th6b <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_std_tick(c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6b), as.data.frame(new_tb6b))
 
   new_tb6c <- rbind(data$df4, data$df5, data$df4, data$df5) %>%
     group_by(X, Y) %>%
-    mutate(Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "center"),
-           Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "center"))
+    mutate(
+      Number1 = rollapply(Number1, 3, sd, partial = TRUE, align = "center"),
+      Number2 = rollapply(Number2, 3, sd, partial = TRUE, align = "center")
+    )
   new_th6c <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_std_tick(c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = c("X", "Y"))
   expect_equal(as.data.frame(new_th6c), as.data.frame(new_tb6c))
@@ -2078,37 +2264,37 @@ test_that("uby_rolling_std_time behaves as expected", {
   data <- setup()
 
   new_tb1a <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, sd, partial=TRUE, align="right"))
+    mutate(int_col = rollapply(int_col, 9, sd, partial = TRUE, align = "right"))
   new_th1a <- head(data$th3, 500)$
     update_by(uby_rolling_std_time("time_col", "int_col", "PT8s"))
   expect_equal(as.data.frame(new_th1a), as.data.frame(new_tb1a))
-  
+
   new_tb1b <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, sd, partial=TRUE, align="left"))
+    mutate(int_col = rollapply(int_col, 9, sd, partial = TRUE, align = "left"))
   new_th1b <- head(data$th3, 500)$
     update_by(uby_rolling_std_time("time_col", "int_col", "PT0s", "PT8s"))
   expect_equal(as.data.frame(new_th1b), as.data.frame(new_tb1b))
-  
+
   new_tb1c <- head(data$df3, 500) %>%
-    mutate(int_col = rollapply(int_col, 9, sd, partial=TRUE, align="center"))
+    mutate(int_col = rollapply(int_col, 9, sd, partial = TRUE, align = "center"))
   new_th1c <- head(data$th3, 500)$
     update_by(uby_rolling_std_time("time_col", "int_col", "PT4s", "PT4s"))
   expect_equal(as.data.frame(new_th1c), as.data.frame(new_tb1c))
-  
+
   new_tb2a <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=sd, partial=TRUE, align="right", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = sd, partial = TRUE, align = "right", na.rm = TRUE))
   new_th2a <- head(data$th3, 500)$
     update_by(uby_rolling_std_time("time_col", "int_col", "PT8s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2a), as.data.frame(new_tb2a))
-  
+
   new_tb2b <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=sd, partial=TRUE, align="left", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = sd, partial = TRUE, align = "left", na.rm = TRUE))
   new_th2b <- head(data$th3, 500)$
     update_by(uby_rolling_std_time("time_col", "int_col", "PT0s", "PT8s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
-  
+
   new_tb2c <- head(data$df3, 500) %>%
-    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width=9, FUN=sd, partial=TRUE, align="center", na.rm=TRUE))
+    mutate(int_col = custom_rolling_time_op(int_col, bool_col, width = 9, FUN = sd, partial = TRUE, align = "center", na.rm = TRUE))
   new_th2c <- head(data$th3, 500)$
     update_by(uby_rolling_std_time("time_col", "int_col", "PT4s", "PT4s"), by = "bool_col")
   expect_equal(as.data.frame(new_th2b), as.data.frame(new_tb2b))
@@ -2118,10 +2304,10 @@ test_that("uby_rolling_std_time behaves as expected", {
 
 test_that("uby_rolling_wavg_tick behaves as expected", {
   data <- setup()
-  
+
   # There is not a clean analog to our grouped weighted average in R, so we create
   # these tables directly
-  
+
   new_df1a <- data.frame(
     string_col = c("I", "am", "a", "string", "column"),
     int_col = c(0, 1, 2, 3, 4),
@@ -2130,7 +2316,7 @@ test_that("uby_rolling_wavg_tick behaves as expected", {
   new_th1a <- data$th1$
     update_by(uby_rolling_wavg_tick("int_col", "dbl_col", rev_ticks = 3))
   expect_true(all.equal(as.data.frame(new_th1a), new_df1a, tolerance = 1e-4))
-  
+
   new_df1b <- data.frame(
     string_col = c("I", "am", "a", "string", "column"),
     int_col = c(0, 1, 2, 3, 4),
@@ -2139,7 +2325,7 @@ test_that("uby_rolling_wavg_tick behaves as expected", {
   new_th1b <- data$th1$
     update_by(uby_rolling_wavg_tick("int_col", "dbl_col", rev_ticks = 1, fwd_ticks = 2))
   expect_true(all.equal(as.data.frame(new_th1b), new_df1b, tolerance = 1e-4))
-  
+
   new_df1c <- data.frame(
     string_col = c("I", "am", "a", "string", "column"),
     int_col = c(0, 1, 2, 3, 4),
@@ -2148,7 +2334,7 @@ test_that("uby_rolling_wavg_tick behaves as expected", {
   new_th1c <- data$th1$
     update_by(uby_rolling_wavg_tick("int_col", "dbl_col", rev_ticks = 2, fwd_ticks = 1))
   expect_true(all.equal(as.data.frame(new_th1c), new_df1c, tolerance = 1e-4))
-  
+
   new_df4a <- data.frame(
     X = c("A", "B", "A", "C", "B", "A", "B", "B", "C"),
     Y = c("M", "N", "O", "N", "P", "M", "O", "P", "M"),
@@ -2158,7 +2344,7 @@ test_that("uby_rolling_wavg_tick behaves as expected", {
   new_th4a <- data$th4$
     update_by(uby_rolling_wavg_tick("Number1", c("Number1", "Number2"), rev_ticks = 3), by = "X")
   expect_true(all.equal(as.data.frame(new_th4a), new_df4a, tolerance = 1e-4))
-  
+
   new_df4b <- data.frame(
     X = c("A", "B", "A", "C", "B", "A", "B", "B", "C"),
     Y = c("M", "N", "O", "N", "P", "M", "O", "P", "M"),
@@ -2168,7 +2354,7 @@ test_that("uby_rolling_wavg_tick behaves as expected", {
   new_th4b <- data$th4$
     update_by(uby_rolling_wavg_tick("Number1", c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "X")
   expect_true(all.equal(as.data.frame(new_th4b), new_df4b, tolerance = 1e-4))
-  
+
   new_df4c <- data.frame(
     X = c("A", "B", "A", "C", "B", "A", "B", "B", "C"),
     Y = c("M", "N", "O", "N", "P", "M", "O", "P", "M"),
@@ -2178,7 +2364,7 @@ test_that("uby_rolling_wavg_tick behaves as expected", {
   new_th4c <- data$th4$
     update_by(uby_rolling_wavg_tick("Number1", c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "X")
   expect_true(all.equal(as.data.frame(new_th4c), new_df4c, tolerance = 1e-4))
-  
+
   new_df5a <- data.frame(
     X = c("B", "C", "B", "A", "A", "C", "B", "C", "B", "A"),
     Y = c("N", "N", "M", "P", "O", "P", "O", "N", "O", "O"),
@@ -2188,7 +2374,7 @@ test_that("uby_rolling_wavg_tick behaves as expected", {
   new_th5a <- data$th5$
     update_by(uby_rolling_wavg_tick("Number2", c("Number1", "Number2"), rev_ticks = 3), by = "Y")
   expect_true(all.equal(as.data.frame(new_th5a), new_df5a, tolerance = 1e-4))
-  
+
   new_df5b <- data.frame(
     X = c("B", "C", "B", "A", "A", "C", "B", "C", "B", "A"),
     Y = c("N", "N", "M", "P", "O", "P", "O", "N", "O", "O"),
@@ -2198,7 +2384,7 @@ test_that("uby_rolling_wavg_tick behaves as expected", {
   new_th5b <- data$th5$
     update_by(uby_rolling_wavg_tick("Number2", c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = "Y")
   expect_true(all.equal(as.data.frame(new_th5b), new_df5b, tolerance = 1e-4))
-  
+
   new_df5c <- data.frame(
     X = c("B", "C", "B", "A", "A", "C", "B", "C", "B", "A"),
     Y = c("N", "N", "M", "P", "O", "P", "O", "N", "O", "O"),
@@ -2208,205 +2394,243 @@ test_that("uby_rolling_wavg_tick behaves as expected", {
   new_th5c <- data$th5$
     update_by(uby_rolling_wavg_tick("Number2", c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = "Y")
   expect_true(all.equal(as.data.frame(new_th5c), new_df5c, tolerance = 1e-4))
-  
+
   new_df6a <- data.frame(
-    X = c("A", "B", "A", "C", "B", "A", "B", "B", "C", "B",
-          "C", "B", "A", "A", "C", "B", "C", "B", "A", "A",
-          "B", "A", "C", "B", "A", "B", "B", "C", "B", "C",
-          "B", "A", "A", "C", "B", "C", "B", "A"),
-    Y = c("M", "N", "O", "N", "P", "M", "O", "P", "M", "N",
-          "N", "M", "P", "O", "P", "O", "N", "O", "O", "M",
-          "N", "O", "N", "P", "M", "O", "P", "M", "N", "N",
-          "M", "P", "O", "P", "O", "N", "O", "O"),
-    Number1 = c(100.00000, -44.00000, 49.00000, 11.00000, -66.00000,
-                83.33333, 29.00000, -97.50000, -70.00000, 451.00000,
-                63.91566, 86.00000, -45.00000, 48.04000, NA, 320.49733,
-                529.44444, 274.13742, 53.93333, 90.00000,
-                -209.00000, 53.93333, 529.44444, -79.26316,
-                75.00000, 274.13742, -166.80000, -70.00000, 121.00000,
-                529.44444, 86.00000, -45.00000, 53.93333, NA, 274.13742,
-                529.44444, 274.13742, 53.93333),
-    Number2 = c(-55.00000, 76.00000, 20.00000, 130.00000, 230.00000,
-                -53.33333, 73.00000, 264.87500, 214.00000, 76.00000,
-                20.69880, -6.00000, 34.00000, 19.84000, NA, 47.17112,
-                113.50000, 44.41438, 21.37778, -54.00000,
-                76.00000, 21.37778, 113.50000, 244.68421,
-                -52.50000, 44.41438, 341.60000, 214.00000, 76.00000,
-                113.50000, -6.00000, 34.00000, 21.37778, NA, 44.41438,
-                113.50000, 44.41438, 21.37778)
+    X = c(
+      "A", "B", "A", "C", "B", "A", "B", "B", "C", "B",
+      "C", "B", "A", "A", "C", "B", "C", "B", "A", "A",
+      "B", "A", "C", "B", "A", "B", "B", "C", "B", "C",
+      "B", "A", "A", "C", "B", "C", "B", "A"
+    ),
+    Y = c(
+      "M", "N", "O", "N", "P", "M", "O", "P", "M", "N",
+      "N", "M", "P", "O", "P", "O", "N", "O", "O", "M",
+      "N", "O", "N", "P", "M", "O", "P", "M", "N", "N",
+      "M", "P", "O", "P", "O", "N", "O", "O"
+    ),
+    Number1 = c(
+      100.00000, -44.00000, 49.00000, 11.00000, -66.00000,
+      83.33333, 29.00000, -97.50000, -70.00000, 451.00000,
+      63.91566, 86.00000, -45.00000, 48.04000, NA, 320.49733,
+      529.44444, 274.13742, 53.93333, 90.00000,
+      -209.00000, 53.93333, 529.44444, -79.26316,
+      75.00000, 274.13742, -166.80000, -70.00000, 121.00000,
+      529.44444, 86.00000, -45.00000, 53.93333, NA, 274.13742,
+      529.44444, 274.13742, 53.93333
+    ),
+    Number2 = c(
+      -55.00000, 76.00000, 20.00000, 130.00000, 230.00000,
+      -53.33333, 73.00000, 264.87500, 214.00000, 76.00000,
+      20.69880, -6.00000, 34.00000, 19.84000, NA, 47.17112,
+      113.50000, 44.41438, 21.37778, -54.00000,
+      76.00000, 21.37778, 113.50000, 244.68421,
+      -52.50000, 44.41438, 341.60000, 214.00000, 76.00000,
+      113.50000, -6.00000, 34.00000, 21.37778, NA, 44.41438,
+      113.50000, 44.41438, 21.37778
+    )
   )
   new_th6a <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_wavg_tick("Number1", c("Number1", "Number2"), rev_ticks = 3), by = c("X", "Y"))
   expect_true(all.equal(as.data.frame(new_th6a), new_df6a, tolerance = 1e-4))
-  
+
   new_df6b <- data.frame(
-    X = c("A", "B", "A", "C", "B", "A", "B", "B", "C", "B",
-          "C", "B", "A", "A", "C", "B", "C", "B", "A", "A",
-          "B", "A", "C", "B", "A", "B", "B", "C", "B", "C",
-          "B", "A", "A", "C", "B", "C", "B", "A"),
-    Y = c("M", "N", "O", "N", "P", "M", "O", "P", "M", "N",
-          "N", "M", "P", "O", "P", "O", "N", "O", "O", "M",
-          "N", "O", "N", "P", "M", "O", "P", "M", "N", "N",
-          "M", "P", "O", "P", "O", "N", "O", "O"),
-    Number1 = c(90.00000, -209.00000, 53.93333, 529.44444, -79.26316,
-                75.00000, 274.13742, -166.80000, -70.00000, 121.00000,
-                529.44444, 86.00000, -45.00000, 53.93333, NA, 274.13742,
-                529.44444, 274.13742, 53.93333, 83.33333,
-                451.00000, 53.93333, 529.44444, -97.50000,
-                50.00000, 274.13742, 18.00000, -70.00000, 55.00000,
-                1344.14286, 86.00000, -45.00000, -6.50000, NA, 290.14865,
-                -65.00000, 99.00000, -5.00000),
-    Number2 = c(-54.00000, 76.00000, 21.37778, 113.50000, 244.68421,
-                -52.50000, 44.41438, 341.60000, 214.00000, 76.00000,
-                113.50000, -6.00000, 34.00000, 21.37778, NA, 44.41438,
-                113.50000, 44.41438, 21.37778, -53.33333,
-                76.00000, 21.37778, 113.50000, 264.87500,
-                -50.00000, 44.41438, 137.00000, 214.00000, 76.00000,
-                87.57143, -6.00000, 34.00000, 4.50000, NA, 42.54730,
-                -5.00000, 34.00000, 6.00000)
+    X = c(
+      "A", "B", "A", "C", "B", "A", "B", "B", "C", "B",
+      "C", "B", "A", "A", "C", "B", "C", "B", "A", "A",
+      "B", "A", "C", "B", "A", "B", "B", "C", "B", "C",
+      "B", "A", "A", "C", "B", "C", "B", "A"
+    ),
+    Y = c(
+      "M", "N", "O", "N", "P", "M", "O", "P", "M", "N",
+      "N", "M", "P", "O", "P", "O", "N", "O", "O", "M",
+      "N", "O", "N", "P", "M", "O", "P", "M", "N", "N",
+      "M", "P", "O", "P", "O", "N", "O", "O"
+    ),
+    Number1 = c(
+      90.00000, -209.00000, 53.93333, 529.44444, -79.26316,
+      75.00000, 274.13742, -166.80000, -70.00000, 121.00000,
+      529.44444, 86.00000, -45.00000, 53.93333, NA, 274.13742,
+      529.44444, 274.13742, 53.93333, 83.33333,
+      451.00000, 53.93333, 529.44444, -97.50000,
+      50.00000, 274.13742, 18.00000, -70.00000, 55.00000,
+      1344.14286, 86.00000, -45.00000, -6.50000, NA, 290.14865,
+      -65.00000, 99.00000, -5.00000
+    ),
+    Number2 = c(
+      -54.00000, 76.00000, 21.37778, 113.50000, 244.68421,
+      -52.50000, 44.41438, 341.60000, 214.00000, 76.00000,
+      113.50000, -6.00000, 34.00000, 21.37778, NA, 44.41438,
+      113.50000, 44.41438, 21.37778, -53.33333,
+      76.00000, 21.37778, 113.50000, 264.87500,
+      -50.00000, 44.41438, 137.00000, 214.00000, 76.00000,
+      87.57143, -6.00000, 34.00000, 4.50000, NA, 42.54730,
+      -5.00000, 34.00000, 6.00000
+    )
   )
   new_th6b <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_wavg_tick("Number1", c("Number1", "Number2"), rev_ticks = 1, fwd_ticks = 2), by = c("X", "Y"))
   expect_true(all.equal(as.data.frame(new_th6b), new_df6b, tolerance = 1e-4))
-  
+
   new_df6c <- data.frame(
-    X = c("A", "B", "A", "C", "B", "A", "B", "B", "C", "B",
-          "C", "B", "A", "A", "C", "B", "C", "B", "A", "A",
-          "B", "A", "C", "B", "A", "B", "B", "C", "B", "C",
-          "B", "A", "A", "C", "B", "C", "B", "A"),
-    Y = c("M", "N", "O", "N", "P", "M", "O", "P", "M", "N",
-          "N", "M", "P", "O", "P", "O", "N", "O", "O", "M",
-          "N", "O", "N", "P", "M", "O", "P", "M", "N", "N",
-          "M", "P", "O", "P", "O", "N", "O", "O"),
-    Number1 = c(83.33333, 451.00000, 48.04000, 63.91566, -97.50000,
-                90.00000, 320.49733, -79.26316, -70.00000, -209.00000,
-                529.44444, 86.00000, -45.00000, 53.93333, NA, 274.13742,
-                529.44444, 274.13742, 53.93333, 75.00000,
-                121.00000, 53.93333, 529.44444, -166.80000,
-                83.33333, 274.13742, -97.50000, -70.00000, 451.00000,
-                529.44444, 86.00000, -45.00000, 53.93333, NA, 274.13742,
-                1344.14286, 290.14865, -6.50000),
-    Number2 = c(-53.33333, 76.00000, 19.84000, 20.69880, 264.87500,
-                -54.00000, 47.17112, 244.68421, 214.00000, 76.00000,
-                113.50000, -6.00000, 34.00000, 21.37778, NA, 44.41438,
-                113.50000, 44.41438, 21.37778, -52.50000,
-                76.00000, 21.37778, 113.50000, 341.60000,
-                -53.33333, 44.41438, 264.87500, 214.00000, 76.00000,
-                113.50000, -6.00000, 34.00000, 21.37778, NA, 44.41438,
-                87.57143, 42.54730, 4.50000)
+    X = c(
+      "A", "B", "A", "C", "B", "A", "B", "B", "C", "B",
+      "C", "B", "A", "A", "C", "B", "C", "B", "A", "A",
+      "B", "A", "C", "B", "A", "B", "B", "C", "B", "C",
+      "B", "A", "A", "C", "B", "C", "B", "A"
+    ),
+    Y = c(
+      "M", "N", "O", "N", "P", "M", "O", "P", "M", "N",
+      "N", "M", "P", "O", "P", "O", "N", "O", "O", "M",
+      "N", "O", "N", "P", "M", "O", "P", "M", "N", "N",
+      "M", "P", "O", "P", "O", "N", "O", "O"
+    ),
+    Number1 = c(
+      83.33333, 451.00000, 48.04000, 63.91566, -97.50000,
+      90.00000, 320.49733, -79.26316, -70.00000, -209.00000,
+      529.44444, 86.00000, -45.00000, 53.93333, NA, 274.13742,
+      529.44444, 274.13742, 53.93333, 75.00000,
+      121.00000, 53.93333, 529.44444, -166.80000,
+      83.33333, 274.13742, -97.50000, -70.00000, 451.00000,
+      529.44444, 86.00000, -45.00000, 53.93333, NA, 274.13742,
+      1344.14286, 290.14865, -6.50000
+    ),
+    Number2 = c(
+      -53.33333, 76.00000, 19.84000, 20.69880, 264.87500,
+      -54.00000, 47.17112, 244.68421, 214.00000, 76.00000,
+      113.50000, -6.00000, 34.00000, 21.37778, NA, 44.41438,
+      113.50000, 44.41438, 21.37778, -52.50000,
+      76.00000, 21.37778, 113.50000, 341.60000,
+      -53.33333, 44.41438, 264.87500, 214.00000, 76.00000,
+      113.50000, -6.00000, 34.00000, 21.37778, NA, 44.41438,
+      87.57143, 42.54730, 4.50000
+    )
   )
   new_th6c <- merge_tables(data$th4, data$th5, data$th4, data$th5)$
     update_by(uby_rolling_wavg_tick("Number1", c("Number1", "Number2"), rev_ticks = 2, fwd_ticks = 1), by = c("X", "Y"))
   expect_true(all.equal(as.data.frame(new_th6c), new_df6c, tolerance = 1e-4))
-  
+
   data$client$close()
 })
 
 test_that("uby_rolling_wavg_time behaves as expected", {
   data <- setup()
-  
+
   # Need to append a weight column to the df and th
   data$deterministic_df3 <- data$deterministic_df3 %>%
     mutate(weight_col = sqrt(int_col))
   data$deterministic_th3 <- data$deterministic_th3$
     update("weight_col = sqrt(int_col)")
-  
+
   base_df <- data.frame(
     time_col = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.001), by = "1 sec")[1:50],
-    bool_col = c(TRUE, TRUE, FALSE, FALSE, TRUE, FALSE, TRUE, FALSE, FALSE, TRUE,
-                 TRUE, TRUE, FALSE, FALSE, TRUE, FALSE, TRUE, FALSE, FALSE, TRUE,
-                 TRUE, TRUE, FALSE, FALSE, TRUE, FALSE, TRUE, FALSE, FALSE, TRUE,
-                 TRUE, TRUE, FALSE, FALSE, TRUE, FALSE, TRUE, FALSE, FALSE, TRUE,
-                 TRUE, TRUE, FALSE, FALSE, TRUE, FALSE, TRUE, FALSE, FALSE, TRUE)
+    bool_col = c(
+      TRUE, TRUE, FALSE, FALSE, TRUE, FALSE, TRUE, FALSE, FALSE, TRUE,
+      TRUE, TRUE, FALSE, FALSE, TRUE, FALSE, TRUE, FALSE, FALSE, TRUE,
+      TRUE, TRUE, FALSE, FALSE, TRUE, FALSE, TRUE, FALSE, FALSE, TRUE,
+      TRUE, TRUE, FALSE, FALSE, TRUE, FALSE, TRUE, FALSE, FALSE, TRUE,
+      TRUE, TRUE, FALSE, FALSE, TRUE, FALSE, TRUE, FALSE, FALSE, TRUE
+    )
   )
-  
-  new_df1a <- cbind(base_df, "int_col" = c(1.000000, 1.585786, 2.176557, 2.769907, 3.364806, 3.960724,
-                                           4.557357, 5.154516, 5.752074, 6.599146, 7.501993, 8.433335,
-                                           9.381805, 10.341536, 11.309121, 12.282429, 13.260044, 14.240990,
-                                           15.224566, 16.210258, 17.197678, 18.186530, 19.176580, 20.167643,
-                                           21.159573, 22.152247, 23.145567, 24.139451, 25.133831, 26.128647,
-                                           27.123851, 28.119401, 29.115260, 30.111398, 31.107787, 32.104403,
-                                           33.101225, 34.098236, 35.095418, 36.092758, 37.090243, 38.087860,
-                                           39.085600, 40.083454, 41.081413, 42.079469, 43.077616, 44.075848,
-                                           45.074159, 46.072543))
+
+  new_df1a <- cbind(base_df, "int_col" = c(
+    1.000000, 1.585786, 2.176557, 2.769907, 3.364806, 3.960724,
+    4.557357, 5.154516, 5.752074, 6.599146, 7.501993, 8.433335,
+    9.381805, 10.341536, 11.309121, 12.282429, 13.260044, 14.240990,
+    15.224566, 16.210258, 17.197678, 18.186530, 19.176580, 20.167643,
+    21.159573, 22.152247, 23.145567, 24.139451, 25.133831, 26.128647,
+    27.123851, 28.119401, 29.115260, 30.111398, 31.107787, 32.104403,
+    33.101225, 34.098236, 35.095418, 36.092758, 37.090243, 38.087860,
+    39.085600, 40.083454, 41.081413, 42.079469, 43.077616, 44.075848,
+    45.074159, 46.072543
+  ))
   new_th1a <- head(data$deterministic_th3, 50)$
     update_by(uby_rolling_wavg_time("time_col", "weight_col", "int_col", "PT8s"))$
     drop_columns("weight_col")
   expect_equal(as.data.frame(new_th1a), new_df1a)
-  
-  new_df1b <- cbind(base_df, "int_col" = c(5.752074, 6.599146, 7.501993, 8.433335, 9.381805, 10.341536,
-                                           11.309121, 12.282429, 13.260044, 14.240990, 15.224566,
-                                           16.210258, 17.197678, 18.186530, 19.176580, 20.167643,
-                                           21.159573, 22.152247, 23.145567, 24.139451, 25.133831,
-                                           26.128647, 27.123851, 28.119401, 29.115260, 30.111398,
-                                           31.107787, 32.104403, 33.101225, 34.098236, 35.095418,
-                                           36.092758, 37.090243, 38.087860, 39.085600, 40.083454,
-                                           41.081413, 42.079469, 43.077616, 44.075848, 45.074159,
-                                           46.072543, 46.556499, 47.042580, 47.530715, 48.020839,
-                                           48.512889, 49.006803, 49.502525, 50.000000))
+
+  new_df1b <- cbind(base_df, "int_col" = c(
+    5.752074, 6.599146, 7.501993, 8.433335, 9.381805, 10.341536,
+    11.309121, 12.282429, 13.260044, 14.240990, 15.224566,
+    16.210258, 17.197678, 18.186530, 19.176580, 20.167643,
+    21.159573, 22.152247, 23.145567, 24.139451, 25.133831,
+    26.128647, 27.123851, 28.119401, 29.115260, 30.111398,
+    31.107787, 32.104403, 33.101225, 34.098236, 35.095418,
+    36.092758, 37.090243, 38.087860, 39.085600, 40.083454,
+    41.081413, 42.079469, 43.077616, 44.075848, 45.074159,
+    46.072543, 46.556499, 47.042580, 47.530715, 48.020839,
+    48.512889, 49.006803, 49.502525, 50.000000
+  ))
   new_th1b <- head(data$deterministic_th3, 50)$
     update_by(uby_rolling_wavg_time("time_col", "weight_col", "int_col", "PT0s", "PT8s"))$
     drop_columns("weight_col")
   expect_equal(as.data.frame(new_th1b), new_df1b)
-  
-  new_df1c <- cbind(base_df, "int_col" = c(3.364806, 3.960724, 4.557357, 5.154516, 5.752074, 6.599146,
-                                           7.501993, 8.433335, 9.381805, 10.341536, 11.309121,
-                                           12.282429, 13.260044, 14.240990, 15.224566, 16.210258,
-                                           17.197678, 18.186530, 19.176580, 20.167643, 21.159573,
-                                           22.152247, 23.145567, 24.139451, 25.133831, 26.128647,
-                                           27.123851, 28.119401, 29.115260, 30.111398, 31.107787,
-                                           32.104403, 33.101225, 34.098236, 35.095418, 36.092758,
-                                           37.090243, 38.087860, 39.085600, 40.083454, 41.081413,
-                                           42.079469, 43.077616, 44.075848, 45.074159, 46.072543,
-                                           46.556499, 47.042580, 47.530715, 48.020839))
+
+  new_df1c <- cbind(base_df, "int_col" = c(
+    3.364806, 3.960724, 4.557357, 5.154516, 5.752074, 6.599146,
+    7.501993, 8.433335, 9.381805, 10.341536, 11.309121,
+    12.282429, 13.260044, 14.240990, 15.224566, 16.210258,
+    17.197678, 18.186530, 19.176580, 20.167643, 21.159573,
+    22.152247, 23.145567, 24.139451, 25.133831, 26.128647,
+    27.123851, 28.119401, 29.115260, 30.111398, 31.107787,
+    32.104403, 33.101225, 34.098236, 35.095418, 36.092758,
+    37.090243, 38.087860, 39.085600, 40.083454, 41.081413,
+    42.079469, 43.077616, 44.075848, 45.074159, 46.072543,
+    46.556499, 47.042580, 47.530715, 48.020839
+  ))
   new_th1c <- head(data$deterministic_th3, 50)$
     update_by(uby_rolling_wavg_time("time_col", "weight_col", "int_col", "PT4s", "PT4s"))$
     drop_columns("weight_col")
   expect_equal(as.data.frame(new_th1c), new_df1c)
-  
-  new_df2a <- cbind(base_df, "int_col" = c(1.000000, 1.585786, 3.000000, 3.535898, 3.227496, 4.512320,
-                                           4.595515, 5.607180, 6.454681, 6.782586, 8.609158, 9.401493,
-                                           9.357245, 10.469018, 11.316537, 12.394463, 13.259445,
-                                           15.370444, 16.163520, 16.268778, 18.407602, 19.182620,
-                                           19.169030, 20.230497, 21.162927, 22.211264, 23.146719,
-                                           25.322767, 26.100235, 26.164183, 28.351280, 29.118634,
-                                           29.111044, 30.153311, 31.109992, 32.144589, 33.102407,
-                                           35.302156, 36.072311, 36.118326, 38.324651, 39.087902,
-                                           39.082723, 40.114920, 41.083063, 42.109955, 43.078675,
-                                           45.290649, 46.056564, 46.092521))
+
+  new_df2a <- cbind(base_df, "int_col" = c(
+    1.000000, 1.585786, 3.000000, 3.535898, 3.227496, 4.512320,
+    4.595515, 5.607180, 6.454681, 6.782586, 8.609158, 9.401493,
+    9.357245, 10.469018, 11.316537, 12.394463, 13.259445,
+    15.370444, 16.163520, 16.268778, 18.407602, 19.182620,
+    19.169030, 20.230497, 21.162927, 22.211264, 23.146719,
+    25.322767, 26.100235, 26.164183, 28.351280, 29.118634,
+    29.111044, 30.153311, 31.109992, 32.144589, 33.102407,
+    35.302156, 36.072311, 36.118326, 38.324651, 39.087902,
+    39.082723, 40.114920, 41.083063, 42.109955, 43.078675,
+    45.290649, 46.056564, 46.092521
+  ))
   new_th2a <- head(data$deterministic_th3, 50)$
     update_by(uby_rolling_wavg_time("time_col", "weight_col", "int_col", "PT8s"), by = "bool_col")$
     drop_columns("weight_col")
   expect_equal(as.data.frame(new_th2a), new_df2a)
-  
-  new_df2b <- cbind(base_df, "int_col" = c(4.595515, 6.782586, 6.454681, 7.036869, 9.401493, 10.469018,
-                                           11.316537, 12.394463, 13.260793, 13.259445, 13.956975,
-                                           16.268778, 16.163520, 16.861439, 19.182620, 20.230497,
-                                           21.162927, 22.211264, 23.144128, 23.146719, 23.869532,
-                                           26.164183, 26.100235, 26.819386, 29.118634, 30.153311,
-                                           31.109992, 32.144589, 33.099748, 33.102407, 33.834106,
-                                           36.118326, 36.072311, 36.800397, 39.087902, 40.114920,
-                                           41.083063, 42.109955, 43.076293, 43.078675, 43.814892,
-                                           46.092521, 46.056564, 46.789573, 47.377845, 47.683028,
-                                           48.523201, 48.502577, 49.000000, 50.000000))
+
+  new_df2b <- cbind(base_df, "int_col" = c(
+    4.595515, 6.782586, 6.454681, 7.036869, 9.401493, 10.469018,
+    11.316537, 12.394463, 13.260793, 13.259445, 13.956975,
+    16.268778, 16.163520, 16.861439, 19.182620, 20.230497,
+    21.162927, 22.211264, 23.144128, 23.146719, 23.869532,
+    26.164183, 26.100235, 26.819386, 29.118634, 30.153311,
+    31.109992, 32.144589, 33.099748, 33.102407, 33.834106,
+    36.118326, 36.072311, 36.800397, 39.087902, 40.114920,
+    41.083063, 42.109955, 43.076293, 43.078675, 43.814892,
+    46.092521, 46.056564, 46.789573, 47.377845, 47.683028,
+    48.523201, 48.502577, 49.000000, 50.000000
+  ))
   new_th2b <- head(data$deterministic_th3, 50)$
     update_by(uby_rolling_wavg_time("time_col", "weight_col", "int_col", "PT0s", "PT8s"), by = "bool_col")$
     drop_columns("weight_col")
   expect_equal(as.data.frame(new_th2b), new_df2b)
-  
-  new_df2c <- cbind(base_df, "int_col" = c(3.227496, 3.227496, 4.512320, 5.607180, 4.595515, 6.454681,
-                                           8.609158, 7.036869, 9.357245, 10.183304, 11.316537, 12.143151,
-                                           13.260793, 15.370444, 13.956975, 16.163520, 18.407602, 16.861439,
-                                           19.169030, 20.089214, 21.162927, 22.078589, 23.144128, 25.322767,
-                                           23.869532, 26.100235, 28.351280, 26.819386, 29.111044, 30.059047,
-                                           31.109992, 32.054206, 33.099748, 35.302156, 33.834106, 36.072311,
-                                           38.324651, 36.800397, 39.082723, 40.044139, 41.083063, 42.041378,
-                                           43.076293, 45.290649, 43.814892, 46.056564, 47.377845, 46.789573,
-                                           47.683028, 48.523201))
+
+  new_df2c <- cbind(base_df, "int_col" = c(
+    3.227496, 3.227496, 4.512320, 5.607180, 4.595515, 6.454681,
+    8.609158, 7.036869, 9.357245, 10.183304, 11.316537, 12.143151,
+    13.260793, 15.370444, 13.956975, 16.163520, 18.407602, 16.861439,
+    19.169030, 20.089214, 21.162927, 22.078589, 23.144128, 25.322767,
+    23.869532, 26.100235, 28.351280, 26.819386, 29.111044, 30.059047,
+    31.109992, 32.054206, 33.099748, 35.302156, 33.834106, 36.072311,
+    38.324651, 36.800397, 39.082723, 40.044139, 41.083063, 42.041378,
+    43.076293, 45.290649, 43.814892, 46.056564, 47.377845, 46.789573,
+    47.683028, 48.523201
+  ))
   new_th2c <- head(data$deterministic_th3, 50)$
     update_by(uby_rolling_wavg_time("time_col", "weight_col", "int_col", "PT4s", "PT4s"), by = "bool_col")$
     drop_columns("weight_col")
   expect_equal(as.data.frame(new_th2c), new_df2c)
-  
+
   data$client$close()
 })
diff --git a/R/rdeephaven/inst/tests/testthat/test_update_by_ops_wrappers.R b/R/rdeephaven/inst/tests/testthat/test_update_by_ops_wrappers.R
index 4fb7baed427..250f25078c8 100644
--- a/R/rdeephaven/inst/tests/testthat/test_update_by_ops_wrappers.R
+++ b/R/rdeephaven/inst/tests/testthat/test_update_by_ops_wrappers.R
@@ -1470,4 +1470,4 @@ test_that("uby_rolling_wavg_time fails nicely when 'fwd_time' is a bad type", {
     uby_rolling_wavg_time("PT0s", "wcol", "col", "PT0s", c("Many", "strings")),
     "'fwd_time' must be a single string. Got a vector of length 2."
   )
-})
\ No newline at end of file
+})
diff --git a/R/rdeephaven/man/AggBy.Rd b/R/rdeephaven/man/AggBy.Rd
deleted file mode 100644
index c9bd2a63f5a..00000000000
--- a/R/rdeephaven/man/AggBy.Rd
+++ /dev/null
@@ -1,101 +0,0 @@
-% Generated by roxygen2: do not edit by hand
-% Please edit documentation in R/agg_ops_wrapper.R
-\docType{class}
-\name{AggBy}
-\alias{AggBy}
-\title{Aggregations in Deephaven}
-\description{
-Table aggregations are a quintessential feature of Deephaven. You can apply as many aggregations as
-needed to static tables \emph{or} streaming tables, and if the parent tables are streaming, the resulting aggregated
-tables will update alongside their parent tables. It is also very easy to perform \emph{grouped} aggregations, which
-allow you to aggregate tables on a per-group basis.
-}
-\section{Apply aggregations to a table}{
-
-There are two methods for performing aggregations on a table, \code{agg_by()} and \code{agg_all_by()}. \code{agg_by()} allows you to
-perform many aggregations on specified columns, while \code{agg_all_by()} allows you to perform a single aggregation to
-every non-grouping column in the table. Both methods have an optional \code{by} parameter that is used to specify grouping columns.
-Here are some details on each method:
-\itemize{
-\item \code{TableHandle$agg_by(aggs, by)}: Creates a new table containing grouping columns and grouped data.
-The resulting grouped data is defined by the aggregation(s) specified.
-\item \code{TableHandle$agg_all_by(agg, by)}: Creates a new table containing grouping columns and grouped data.
-The resulting grouped data is defined by the aggregation specified. This method applies the aggregation to all
-non-grouping columns of the table, so it can only accept one aggregation at a time.
-}
-
-The \code{agg_by()} and \code{agg_all_by()} methods themselves do not know anything about the columns on which you want to
-perform aggregations. Rather, the desired columns are passed to individual \code{agg} functions, enabling you to apply
-various kinds of aggregations to different columns or groups of columns as needed.
-}
-
-\section{\code{agg} functions}{
-
-\code{agg} functions are used to perform aggregation calculations on grouped data by passing them to \code{agg_by()} or
-\code{agg_all_by()}. These functions are \emph{generators}, meaning they return \emph{functions} that the Deephaven engine knows
-how to interpret. We call the functions that they return \code{\link{AggOp}}s. These \code{AggOp}s are not R-level functions,
-but Deephaven-specific data types that perform all of the intensive calculations. Here is a list of all \code{agg} functions
-available in Deephaven:
-\itemize{
-\item \code{\link[=agg_first]{agg_first()}}
-\item \code{\link[=agg_last]{agg_last()}}
-\item \code{\link[=agg_min]{agg_min()}}
-\item \code{\link[=agg_max]{agg_max()}}
-\item \code{\link[=agg_sum]{agg_sum()}}
-\item \code{\link[=agg_abs_sum]{agg_abs_sum()}}
-\item \code{\link[=agg_avg]{agg_avg()}}
-\item \code{\link[=agg_w_avg]{agg_w_avg()}}
-\item \code{\link[=agg_median]{agg_median()}}
-\item \code{\link[=agg_var]{agg_var()}}
-\item \code{\link[=agg_std]{agg_std()}}
-\item \code{\link[=agg_percentile]{agg_percentile()}}
-\item \code{\link[=agg_count]{agg_count()}}
-}
-
-For more details on each aggregation function, click on one of the methods above or see the reference documentation
-by running \code{?agg_first}, \code{?agg_last}, etc.
-}
-
-\examples{
-\dontrun{
-library(rdeephaven)
-
-# connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
-
-# create data frame, push to server, retrieve TableHandle
-df <- data.frame(
-  X = c("A", "B", "A", "C", "B", "A", "B", "B", "C"),
-  Y = c("M", "N", "O", "N", "P", "M", "O", "P", "M"),
-  Number1 = c(100, -44, 49, 11, -66, 50, 29, 18, -70),
-  Number2 = c(-55, 76, 20, 130, 230, -50, 73, 137, 214)
-)
-th <- client$import_table(df)
-
-# get first and last elements of each column
-th1 <- th$
-  agg_by(agg_first(c("XFirst = X", "YFirst = Y", "Number1First = Number1", "Number2First = Number2")),
-         agg_last(c("XLast = X", "YLast = Y", "Number1Last = Number1", "Number2Last = Number2")))
-
-# compute mean and standard deviation of Number1 and Number2, grouped by X
-th2 <- th$
-  agg_by(
-    c(agg_avg(c("Number1Avg = Number1", "Number2Avg = Number2")),
-      agg_std(c("Number1Std = Number1", "Number2Std = Number2"))),
-    by="X")
-
-# compute maximum of all non-grouping columns, grouped by X and Y
-th3 <- th$
-  agg_all_by(agg_max(), by=c("X", "Y"))
-
-# compute minimum and maximum of Number1 and Number2 respectively grouped by Y
-th4 <- th$
-  agg_by(
-    c(agg_min("Number1Min = Number1"),
-      agg_max("Number2Max = Number2")),
-    by="Y")
-
-client$close()
-}
-
-}
diff --git a/R/rdeephaven/man/AggOp.Rd b/R/rdeephaven/man/AggOp.Rd
index 761ae21a628..166797cb209 100644
--- a/R/rdeephaven/man/AggOp.Rd
+++ b/R/rdeephaven/man/AggOp.Rd
@@ -5,9 +5,10 @@
 \alias{AggOp}
 \title{Deephaven AggOps}
 \description{
-An \code{AggOp} is the return type of one of Deephaven's \code{\link[=AggBy]{agg}} functions. It is a function that performs the
+An \code{AggOp} is the return type of one of Deephaven's \code{agg} functions. It is a function that performs the
 computation specified by the \code{agg} function. These are intended to be passed directly to \code{agg_by()} or \code{agg_all_by()},
-and should never be instantiated directly be user code.
+and should never be instantiated directly be user code. For more information, see the
+vignette on \code{agg} functions with \code{vignette("agg_by")}.
 
 If multiple tables have the same schema and the same aggregations need to be applied to each table, saving these
 objects directly in a variable may be useful to avoid having to re-create them each time:
@@ -19,10 +20,7 @@ result1 <- th1$agg_by(aggregations, by="Group")
 result2 <- th2$agg_by(aggregations, by="Group")
 }\if{html}{\out{</div>}}
 
-In this example, \code{aggregations} would be a vector of two \code{AggOp}s that can be reused in multiple calls to \code{agg_by()}.
-}
-\details{
-Name AggOp
+In this example, \code{aggregations} would be a vector of two AggOps that can be reused in multiple calls to \code{agg_by()}.
 }
 \section{Methods}{
 \subsection{Public methods}{
diff --git a/R/rdeephaven/man/Client.Rd b/R/rdeephaven/man/Client.Rd
index 132fdb46dfb..9602e5ec5e1 100644
--- a/R/rdeephaven/man/Client.Rd
+++ b/R/rdeephaven/man/Client.Rd
@@ -13,7 +13,7 @@ import data to and export data from the server, and run queries on the server.
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create a data frame and push it to the server, retrieve a reference to it as a TableHandle
 df <- data.frame(
diff --git a/R/rdeephaven/man/TableHandle.Rd b/R/rdeephaven/man/TableHandle.Rd
index 8d86943811b..9f7694a3c34 100644
--- a/R/rdeephaven/man/TableHandle.Rd
+++ b/R/rdeephaven/man/TableHandle.Rd
@@ -60,12 +60,12 @@ th2$bind_to_variable("t2")
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create a data frame, push it to the server, and retrieve a TableHandle referencing the new table
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:50],
-  boolCol = sample(c(TRUE,FALSE), 50, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 50, TRUE),
   col1 = sample(1000, size = 50, replace = TRUE),
   col2 = sample(1000, size = 50, replace = TRUE),
   col3 = 1:50
diff --git a/R/rdeephaven/man/UpdateBy.Rd b/R/rdeephaven/man/UpdateBy.Rd
deleted file mode 100644
index fcfccdc1c5b..00000000000
--- a/R/rdeephaven/man/UpdateBy.Rd
+++ /dev/null
@@ -1,110 +0,0 @@
-% Generated by roxygen2: do not edit by hand
-% Please edit documentation in R/update_by_ops_wrapper.R
-\docType{class}
-\name{UpdateBy}
-\alias{UpdateBy}
-\title{Deephaven's UpdateBy Operations}
-\description{
-Deephaven's \code{update_by()} table method and suite of \code{uby} functions enable cumulative and moving calculations
-on static \emph{and} streaming tables. Complex operations like cumulative minima and maxima, exponential moving averages,
-and rolling standard deviations are all possible and effortless to execute. As always in Deephaven,
-the results of these calculations will continue to update as their parent tables are updated. Additionally, it's easy
-to group data by one or more columns, enabling complex group-wise calculations with a single line of code.
-}
-\section{Applying UpdateBy operations to a table}{
-
-The table method \code{update_by()} is the entry point for UpdateBy operations. It takes two arguments: the first is an
-\code{\link{UpdateByOp}} or a list of \code{UpdateByOp}s denoting the calculations to perform on specific columns of the
-table. Then, it takes a column name or a list of column names that define the groups on which to perform the calculations.
-If you don't want grouped calculations, omit this argument.
-
-The \code{update_by()} method itself does not know anything about the columns on which you want to perform calculations.
-Rather, the desired columns are passed to individual \code{uby} functions, enabling a massive amount of flexibility.
-}
-
-\section{\code{uby} functions}{
-
-\code{uby} functions are the workers that actually execute the complex UpdateBy calculations. These functions are
-\emph{generators}, meaning they return \emph{functions} that the Deephaven engine knows how to interpret. We call the functions
-that they return \code{\link{UpdateByOp}}s. These \code{UpdateByOp}s are not R-level functions, but Deephaven-specific
-data types that perform all of the intensive calculations. Here is a list of all \code{uby} functions available in Deephaven:
-\itemize{
-\item \code{\link[=uby_cum_min]{uby_cum_min()}}
-\item \code{\link[=uby_cum_max]{uby_cum_max()}}
-\item \code{\link[=uby_cum_sum]{uby_cum_sum()}}
-\item \code{\link[=uby_cum_prod]{uby_cum_prod()}}
-\item \code{\link[=uby_forward_fill]{uby_forward_fill()}}
-\item \code{\link[=uby_delta]{uby_delta()}}
-\item \code{\link[=uby_emmin_tick]{uby_emmin_tick()}}
-\item \code{\link[=uby_emmin_time]{uby_emmin_time()}}
-\item \code{\link[=uby_emmax_tick]{uby_emmax_tick()}}
-\item \code{\link[=uby_emmax_time]{uby_emmax_time()}}
-\item \code{\link[=uby_ems_tick]{uby_ems_tick()}}
-\item \code{\link[=uby_ems_time]{uby_ems_time()}}
-\item \code{\link[=uby_ema_tick]{uby_ema_tick()}}
-\item \code{\link[=uby_ema_time]{uby_ema_time()}}
-\item \code{\link[=uby_emstd_tick]{uby_emstd_tick()}}
-\item \code{\link[=uby_emstd_time]{uby_emstd_time()}}
-\item \code{\link[=uby_rolling_count_tick]{uby_rolling_count_tick()}}
-\item \code{\link[=uby_rolling_count_time]{uby_rolling_count_time()}}
-\item \code{\link[=uby_rolling_group_tick]{uby_rolling_group_tick()}}
-\item \code{\link[=uby_rolling_group_time]{uby_rolling_group_time()}}
-\item \code{\link[=uby_rolling_min_tick]{uby_rolling_min_tick()}}
-\item \code{\link[=uby_rolling_min_time]{uby_rolling_min_time()}}
-\item \code{\link[=uby_rolling_max_tick]{uby_rolling_max_tick()}}
-\item \code{\link[=uby_rolling_max_time]{uby_rolling_max_time()}}
-\item \code{\link[=uby_rolling_sum_tick]{uby_rolling_sum_tick()}}
-\item \code{\link[=uby_rolling_sum_time]{uby_rolling_sum_time()}}
-\item \code{\link[=uby_rolling_prod_tick]{uby_rolling_prod_tick()}}
-\item \code{\link[=uby_rolling_prod_time]{uby_rolling_prod_time()}}
-\item \code{\link[=uby_rolling_avg_tick]{uby_rolling_avg_tick()}}
-\item \code{\link[=uby_rolling_avg_time]{uby_rolling_avg_time()}}
-\item \code{\link[=uby_rolling_wavg_tick]{uby_rolling_wavg_tick()}}
-\item \code{\link[=uby_rolling_wavg_time]{uby_rolling_wavg_time()}}
-\item \code{\link[=uby_rolling_std_tick]{uby_rolling_std_tick()}}
-\item \code{\link[=uby_rolling_std_time]{uby_rolling_std_time()}}
-}
-
-For more details on each aggregation function, click on one of the methods above or see the reference documentation
-by running \code{?uby_cum_min}, \code{?uby_delta}, etc.
-}
-
-\examples{
-\dontrun{
-library(rdeephaven)
-
-# connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
-
-# create data frame, push to server, retrieve TableHandle
-df <- data.frame(
-  timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
-  col1 = sample(10000, size = 500, replace = TRUE),
-  col2 = sample(10000, size = 500, replace = TRUE),
-  col3 = 1:500
-)
-th <- client$import_table(df)
-
-# compute 10-row exponential weighted moving average of col1 and col2, grouped by boolCol
-th1 <- th$
-  update_by(uby_ema_tick(decay_ticks=10, cols=c("col1Ema = col1", "col2Ema = col2")), by="boolCol")
-
-# compute rolling 10-second weighted average and standard deviation of col1 and col2, weighted by col3
-th2 <- th$
-  update_by(
-    c(uby_rolling_wavg_time(ts_col="timeCol", wcol="col3", cols=c("col1WAvg = col1", "col2WAvg = col2"), rev_time="PT10s"),
-      uby_rolling_std_time(ts_col="timeCol", cols=c("col1Std = col1", "col2Std = col2"), rev_time="PT10s")))
-
-# compute cumulative minimum and maximum of col1 and col2 respectively, and the rolling 20-row sum of col3, grouped by boolCol
-th3 <- th$
-  update_by(
-    c(uby_cum_min(cols="col1"),
-      uby_cum_max(cols="col2"),
-      uby_rolling_sum_tick(cols="col3", rev_ticks=20)),
-    by="boolCol")
-
-client$close()
-}
-
-}
diff --git a/R/rdeephaven/man/UpdateByOp.Rd b/R/rdeephaven/man/UpdateByOp.Rd
index b4612690cf8..db1f1a5b597 100644
--- a/R/rdeephaven/man/UpdateByOp.Rd
+++ b/R/rdeephaven/man/UpdateByOp.Rd
@@ -7,7 +7,8 @@
 \description{
 An \code{UpdateByOp} is the return type of one of Deephaven's \code{\link[=UpdateBy]{uby}} functions. It is a function that performs
 the computation specified by the \code{uby} function. These are intended to be passed directly to \code{update_by()},
-and should never be instantiated directly be user code.
+and should never be instantiated directly be user code. For more information, see the vignette on
+\code{uby} functions with \code{vignette("update_by")}.
 
 If multiple tables have the same schema and the same UpdateBy operations need to be applied to each table, saving
 these objects directly in a variable may be useful to avoid having to re-create them each time:
diff --git a/R/rdeephaven/man/agg_abs_sum.Rd b/R/rdeephaven/man/agg_abs_sum.Rd
index a5c74e02526..3688270cd29 100644
--- a/R/rdeephaven/man/agg_abs_sum.Rd
+++ b/R/rdeephaven/man/agg_abs_sum.Rd
@@ -8,7 +8,7 @@
 Default is to aggregate all non-grouping columns, which is only valid in the \code{agg_all_by()} operation.}
 }
 \value{
-\code{AggOp} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
+\code{\link{AggOp}} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
 }
 \description{
 Creates an Absolute Sum aggregation that computes the absolute sum of each column in \code{cols} for each aggregation group.
@@ -23,13 +23,16 @@ This function, like other Deephaven \code{agg} functions, is a generator functio
 function called an \code{\link{AggOp}} intended to be used in a call to \code{agg_by()} or \code{agg_all_by()}. This detail is
 typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 as the output of an \code{agg} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{agg} functions by running
+\code{vignette("agg_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
@@ -46,11 +49,11 @@ th1 <- th$
 
 # compute absolute sum of Number1 and Number2 grouped by X
 th2 <- th$
-  agg_by(agg_abs_sum(c("Number1", "Number2")), by="X")
+  agg_by(agg_abs_sum(c("Number1", "Number2")), by = "X")
 
 # compute absolute sum of Number1 and Number2 grouped by X and Y
 th3 <- th$
-  agg_by(agg_abs_sum(c("Number1", "Number2")), by=c("X", "Y"))
+  agg_by(agg_abs_sum(c("Number1", "Number2")), by = c("X", "Y"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/agg_avg.Rd b/R/rdeephaven/man/agg_avg.Rd
index 21711b256f7..156d9f0f646 100644
--- a/R/rdeephaven/man/agg_avg.Rd
+++ b/R/rdeephaven/man/agg_avg.Rd
@@ -8,7 +8,7 @@
 Default is to aggregate all non-grouping columns, which is only valid in the \code{agg_all_by()} operation.}
 }
 \value{
-\code{AggOp} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
+\code{\link{AggOp}} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
 }
 \description{
 Creates an Average aggregation that computes the average of each column in \code{cols} for each aggregation group.
@@ -23,13 +23,16 @@ This function, like other Deephaven \code{agg} functions, is a generator functio
 function called an \code{\link{AggOp}} intended to be used in a call to \code{agg_by()} or \code{agg_all_by()}. This detail is
 typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 as the output of an \code{agg} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{agg} functions by running
+\code{vignette("agg_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
@@ -46,11 +49,11 @@ th1 <- th$
 
 # compute average of Number1 and Number2 grouped by X
 th2 <- th$
-  agg_by(agg_avg(c("Number1", "Number2")), by="X")
+  agg_by(agg_avg(c("Number1", "Number2")), by = "X")
 
 # compute average of Number1 and Number2 grouped by X and Y
 th3 <- th$
-  agg_by(agg_avg(c("Number1", "Number2")), by=c("X", "Y"))
+  agg_by(agg_avg(c("Number1", "Number2")), by = c("X", "Y"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/agg_count.Rd b/R/rdeephaven/man/agg_count.Rd
index 785545726bd..ea02848cf0e 100644
--- a/R/rdeephaven/man/agg_count.Rd
+++ b/R/rdeephaven/man/agg_count.Rd
@@ -7,7 +7,7 @@
 \item{col}{String denoting the name of the new column to hold the counts of each aggregation group.}
 }
 \value{
-\code{AggOp} to be used in a call to \code{agg_by()}.
+\code{\link{AggOp}} to be used in a call to \code{agg_by()}.
 }
 \description{
 Creates a Count aggregation that counts the number of rows in each aggregation group.
@@ -23,6 +23,9 @@ function called an \code{\link{AggOp}} intended to be used in a call to \code{ag
 typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 as the output of an \code{agg} function can otherwise seem unexpected.
 
+For more information, see the vignette on \code{agg} functions by running
+\code{vignette("agg_by")}.
+
 Note that this operation is not supported in \code{agg_all_by()}.
 }
 \examples{
@@ -30,7 +33,7 @@ Note that this operation is not supported in \code{agg_all_by()}.
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
@@ -43,11 +46,11 @@ th <- client$import_table(df)
 
 # count number of elements in each group when grouped by X, name resulting column "count"
 th1 <- th$
-  agg_by(agg_count("count"), by="X")
+  agg_by(agg_count("count"), by = "X")
 
 # count number of elements in each group when grouped by X and Y, name resulting column "CountingCol"
 th2 <- th$
-  agg_by(agg_count("CountingCol"), by=c("X", "Y"))
+  agg_by(agg_count("CountingCol"), by = c("X", "Y"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/agg_first.Rd b/R/rdeephaven/man/agg_first.Rd
index fb914fcfcfa..2e2f6ffcff3 100644
--- a/R/rdeephaven/man/agg_first.Rd
+++ b/R/rdeephaven/man/agg_first.Rd
@@ -8,7 +8,7 @@
 Default is to aggregate all non-grouping columns, which is only valid in the \code{agg_all_by()} operation.}
 }
 \value{
-\code{AggOp} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
+\code{\link{AggOp}} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
 }
 \description{
 Creates a First aggregation that computes the first value of each column in \code{cols} for each aggregation group.
@@ -23,13 +23,16 @@ This function, like other Deephaven \code{agg} functions, is a generator functio
 function called an \code{\link{AggOp}} intended to be used in a call to \code{agg_by()} or \code{agg_all_by()}. This detail is
 typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 as the output of an \code{agg} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{agg} functions by running
+\code{vignette("agg_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
@@ -46,11 +49,11 @@ th1 <- th$
 
 # get first elements of Y, Number1, and Number2 grouped by X
 th2 <- th$
-  agg_by(agg_first(c("Y", "Number1", "Number2")), by="X")
+  agg_by(agg_first(c("Y", "Number1", "Number2")), by = "X")
 
 # get first elements of Number1 and Number2 grouped by X and Y
 th3 <- th
-  agg_by(agg_first(c("Number1", "Number2")), by=c("X", "Y"))
+agg_by(agg_first(c("Number1", "Number2")), by = c("X", "Y"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/agg_last.Rd b/R/rdeephaven/man/agg_last.Rd
index 0cc4d5032ba..a96876670e9 100644
--- a/R/rdeephaven/man/agg_last.Rd
+++ b/R/rdeephaven/man/agg_last.Rd
@@ -8,7 +8,7 @@
 Default is to aggregate all non-grouping columns, which is only valid in the \code{agg_all_by()} operation.}
 }
 \value{
-\code{AggOp} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
+\code{\link{AggOp}} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
 }
 \description{
 Creates a Last aggregation that computes the last value of each column in \code{cols} for each aggregation group.
@@ -23,13 +23,16 @@ This function, like other Deephaven \code{agg} functions, is a generator functio
 function called an \code{\link{AggOp}} intended to be used in a call to \code{agg_by()} or \code{agg_all_by()}. This detail is
 typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 as the output of an \code{agg} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{agg} functions by running
+\code{vignette("agg_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
@@ -46,11 +49,11 @@ th1 <- th$
 
 # get last elements of Y, Number1, and Number2 grouped by X
 th2 <- th$
-  agg_by(agg_last(c("Y", "Number1", "Number2")), by="X")
+  agg_by(agg_last(c("Y", "Number1", "Number2")), by = "X")
 
 # get last elements of Number1 and Number2 grouped by X and Y
 th3 <- th$
-  agg_by(agg_last(c("Number1", "Number2")), by=c("X", "Y"))
+  agg_by(agg_last(c("Number1", "Number2")), by = c("X", "Y"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/agg_max.Rd b/R/rdeephaven/man/agg_max.Rd
index c7bb7ebb13c..5a092d276cc 100644
--- a/R/rdeephaven/man/agg_max.Rd
+++ b/R/rdeephaven/man/agg_max.Rd
@@ -8,7 +8,7 @@
 Default is to aggregate all non-grouping columns, which is only valid in the \code{agg_all_by()} operation.}
 }
 \value{
-\code{AggOp} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
+\code{\link{AggOp}} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
 }
 \description{
 Creates a Maximum aggregation that computes the maximum of each column in \code{cols} for each aggregation group.
@@ -23,13 +23,16 @@ This function, like other Deephaven \code{agg} functions, is a generator functio
 function called an \code{\link{AggOp}} intended to be used in a call to \code{agg_by()} or \code{agg_all_by()}. This detail is
 typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 as the output of an \code{agg} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{agg} functions by running
+\code{vignette("agg_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
@@ -46,11 +49,11 @@ th1 <- th$
 
 # get maximum elements of Number1 and Number2 grouped by X
 th2 <- th$
-  agg_by(agg_max(c("Number1", "Number2")), by="X")
+  agg_by(agg_max(c("Number1", "Number2")), by = "X")
 
 # get maximum elements of Number1 and Number2 grouped by X and Y
 th3 <- th$
-  agg_by(agg_max(c("Number1", "Number2")), by=c("X", "Y"))
+  agg_by(agg_max(c("Number1", "Number2")), by = c("X", "Y"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/agg_median.Rd b/R/rdeephaven/man/agg_median.Rd
index b64da3a26d2..3c08b9d4c02 100644
--- a/R/rdeephaven/man/agg_median.Rd
+++ b/R/rdeephaven/man/agg_median.Rd
@@ -8,7 +8,7 @@
 Default is to aggregate all non-grouping columns, which is only valid in the \code{agg_all_by()} operation.}
 }
 \value{
-\code{AggOp} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
+\code{\link{AggOp}} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
 }
 \description{
 Creates a Median aggregation that computes the median of each column in \code{cols} for each aggregation group.
@@ -23,13 +23,16 @@ This function, like other Deephaven \code{agg} functions, is a generator functio
 function called an \code{\link{AggOp}} intended to be used in a call to \code{agg_by()} or \code{agg_all_by()}. This detail is
 typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 as the output of an \code{agg} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{agg} functions by running
+\code{vignette("agg_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
@@ -46,11 +49,11 @@ th1 <- th$
 
 # compute median of Number1 and Number2 grouped by X
 th2 <- th$
-  agg_by(agg_median(c("Number1", "Number2")), by="X")
+  agg_by(agg_median(c("Number1", "Number2")), by = "X")
 
 # compute median of Number1 and Number2 grouped by X and Y
 th3 <- th$
-  agg_by(agg_median(c("Number1", "Number2")), by=c("X", "Y"))
+  agg_by(agg_median(c("Number1", "Number2")), by = c("X", "Y"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/agg_min.Rd b/R/rdeephaven/man/agg_min.Rd
index 4d66d14fd78..fd9b603a0d2 100644
--- a/R/rdeephaven/man/agg_min.Rd
+++ b/R/rdeephaven/man/agg_min.Rd
@@ -8,7 +8,7 @@
 Default is to aggregate all non-grouping columns, which is only valid in the \code{agg_all_by()} operation.}
 }
 \value{
-\code{AggOp} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
+\code{\link{AggOp}} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
 }
 \description{
 Creates a Minimum aggregation that computes the minimum of each column in \code{cols} for each aggregation group.
@@ -23,13 +23,16 @@ This function, like other Deephaven \code{agg} functions, is a generator functio
 function called an \code{\link{AggOp}} intended to be used in a call to \code{agg_by()} or \code{agg_all_by()}. This detail is
 typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 as the output of an \code{agg} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{agg} functions by running
+\code{vignette("agg_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
@@ -46,11 +49,11 @@ th1 <- th$
 
 # get minimum elements of Number1 and Number2 grouped by X
 th2 <- th$
-  agg_by(agg_min(c("Number1", "Number2")), by="X")
+  agg_by(agg_min(c("Number1", "Number2")), by = "X")
 
 # get minimum elements of Number1 and Number2 grouped by X and Y
 th3 <- th$
-  agg_by(agg_min(c("Number1", "Number2")), by=c("X", "Y"))
+  agg_by(agg_min(c("Number1", "Number2")), by = c("X", "Y"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/agg_percentile.Rd b/R/rdeephaven/man/agg_percentile.Rd
index 84f1f63d31a..4c76455d447 100644
--- a/R/rdeephaven/man/agg_percentile.Rd
+++ b/R/rdeephaven/man/agg_percentile.Rd
@@ -10,7 +10,7 @@
 Default is to aggregate all non-grouping columns, which is only valid in the \code{agg_all_by()} operation.}
 }
 \value{
-\code{AggOp} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
+\code{\link{AggOp}} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
 }
 \description{
 Creates a Percentile aggregation that computes the given percentile of each column in \code{cols} for each aggregation group.
@@ -25,13 +25,16 @@ This function, like other Deephaven \code{agg} functions, is a generator functio
 function called an \code{\link{AggOp}} intended to be used in a call to \code{agg_by()} or \code{agg_all_by()}. This detail is
 typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 as the output of an \code{agg} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{agg} functions by running
+\code{vignette("agg_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
@@ -44,15 +47,15 @@ th <- client$import_table(df)
 
 # compute 20th percentile of Number1 and Number2
 th1 <- th$
-  agg_by(agg_percentile(percentile=0.2, cols=c("Number1", "Number2")))
+  agg_by(agg_percentile(percentile = 0.2, cols = c("Number1", "Number2")))
 
 # compute 50th percentile of Number1 and Number2 grouped by X
 th2 <- th$
-  agg_by(agg_percentile(percentile=0.5, cols=c("Number1", "Number2")), by="X")
+  agg_by(agg_percentile(percentile = 0.5, cols = c("Number1", "Number2")), by = "X")
 
 # compute 75th percentile of Number1 and Number2 grouped by X and Y
 th3 <- th$
-  agg_by(agg_percentile(percentile=0.75, cols=c("Number1", "Number2")), by=c("X", "Y"))
+  agg_by(agg_percentile(percentile = 0.75, cols = c("Number1", "Number2")), by = c("X", "Y"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/agg_std.Rd b/R/rdeephaven/man/agg_std.Rd
index 843e1b78ff6..5101145707d 100644
--- a/R/rdeephaven/man/agg_std.Rd
+++ b/R/rdeephaven/man/agg_std.Rd
@@ -8,7 +8,7 @@
 Default is to aggregate all non-grouping columns, which is only valid in the \code{agg_all_by()} operation.}
 }
 \value{
-\code{AggOp} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
+\code{\link{AggOp}} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
 }
 \description{
 Creates a Standard Deviation aggregation that computes the standard deviation of each column in \code{cols}, for each aggregation group.
@@ -23,13 +23,16 @@ This function, like other Deephaven \code{agg} functions, is a generator functio
 function called an \code{\link{AggOp}} intended to be used in a call to \code{agg_by()} or \code{agg_all_by()}. This detail is
 typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 as the output of an \code{agg} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{agg} functions by running
+\code{vignette("agg_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
@@ -46,11 +49,11 @@ th1 <- th$
 
 # compute standard deviation of Number1 and Number2 grouped by X
 th2 <- th$
-  agg_by(agg_std(c("Number1", "Number2")), by="X")
+  agg_by(agg_std(c("Number1", "Number2")), by = "X")
 
 # compute standard deviation of Number1 and Number2 grouped by X and Y
 th3 <- th$
-  agg_by(agg_std(c("Number1", "Number2")), by=c("X", "Y"))
+  agg_by(agg_std(c("Number1", "Number2")), by = c("X", "Y"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/agg_sum.Rd b/R/rdeephaven/man/agg_sum.Rd
index 76c270a6bf0..ac2de464289 100644
--- a/R/rdeephaven/man/agg_sum.Rd
+++ b/R/rdeephaven/man/agg_sum.Rd
@@ -8,7 +8,7 @@
 Default is to aggregate all non-grouping columns, which is only valid in the \code{agg_all_by()} operation.}
 }
 \value{
-\code{AggOp} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
+\code{\link{AggOp}} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
 }
 \description{
 Creates a Sum aggregation that computes the sum of each column in \code{cols} for each aggregation group.
@@ -23,13 +23,16 @@ This function, like other Deephaven \code{agg} functions, is a generator functio
 function called an \code{\link{AggOp}} intended to be used in a call to \code{agg_by()} or \code{agg_all_by()}. This detail is
 typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 as the output of an \code{agg} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{agg} functions by running
+\code{vignette("agg_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
@@ -46,11 +49,11 @@ th1 <- th$
 
 # compute sum of Number1 and Number2 grouped by X
 th2 <- th$
-  agg_by(agg_sum(c("Number1", "Number2")), by="X")
+  agg_by(agg_sum(c("Number1", "Number2")), by = "X")
 
 # compute sum of Number1 and Number2 grouped by X and Y
 th3 <- th$
-  agg_by(agg_sum(c("Number1", "Number2")), by=c("X", "Y"))
+  agg_by(agg_sum(c("Number1", "Number2")), by = c("X", "Y"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/agg_var.Rd b/R/rdeephaven/man/agg_var.Rd
index 5f8c7ee9537..94d0b59c283 100644
--- a/R/rdeephaven/man/agg_var.Rd
+++ b/R/rdeephaven/man/agg_var.Rd
@@ -8,7 +8,7 @@
 Default is to aggregate all non-grouping columns, which is only valid in the \code{agg_all_by()} operation.}
 }
 \value{
-\code{AggOp} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
+\code{\link{AggOp}} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
 }
 \description{
 Creates a Variance aggregation that computes the variance of each column in \code{cols} for each aggregation group.
@@ -23,13 +23,16 @@ This function, like other Deephaven \code{agg} functions, is a generator functio
 function called an \code{\link{AggOp}} intended to be used in a call to \code{agg_by()} or \code{agg_all_by()}. This detail is
 typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 as the output of an \code{agg} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{agg} functions by running
+\code{vignette("agg_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
@@ -46,11 +49,11 @@ th1 <- th$
 
 # compute variance of Number1 and Number2 grouped by X
 th2 <- th$
-  agg_by(agg_var(c("Number1", "Number2")), by="X")
+  agg_by(agg_var(c("Number1", "Number2")), by = "X")
 
 # compute variance of Number1 and Number2 grouped by X and Y
 th3 <- th$
-  agg_by(agg_var(c("Number1", "Number2")), by=c("X", "Y"))
+  agg_by(agg_var(c("Number1", "Number2")), by = c("X", "Y"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/agg_w_avg.Rd b/R/rdeephaven/man/agg_w_avg.Rd
index d9a27eec52c..575f3fba44c 100644
--- a/R/rdeephaven/man/agg_w_avg.Rd
+++ b/R/rdeephaven/man/agg_w_avg.Rd
@@ -10,7 +10,7 @@
 Default is to aggregate all non-grouping columns, which is only valid in the \code{agg_all_by()} operation.}
 }
 \value{
-\code{AggOp} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
+\code{\link{AggOp}} to be used in a call to \code{agg_by()} or \code{agg_all_by()}.
 }
 \description{
 Creates a Weighted Average aggregation that computes the weighted average of each column in \code{cols} for each aggregation group.
@@ -25,13 +25,16 @@ This function, like other Deephaven \code{agg} functions, is a generator functio
 function called an \code{\link{AggOp}} intended to be used in a call to \code{agg_by()} or \code{agg_all_by()}. This detail is
 typically hidden from the user. However, it is important to understand this detail for debugging purposes,
 as the output of an \code{agg} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{agg} functions by running
+\code{vignette("agg_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
@@ -44,15 +47,15 @@ th <- client$import_table(df)
 
 # compute weighted average of Number1, weighted by Number2
 th1 <- th$
-  agg_by(agg_w_avg(wcol="Number2", cols="Number1"))
+  agg_by(agg_w_avg(wcol = "Number2", cols = "Number1"))
 
 # compute weighted average of Number1, weighted by Number2, grouped by X
 th2 <- th$
-  agg_by(agg_w_avg(wcol="Number2", cols="Number1", by="X"))
+  agg_by(agg_w_avg(wcol = "Number2", cols = "Number1", by = "X"))
 
 # compute weighted average of Number1, weighted by Number2, grouped by X and Y
 th3 <- th$
-  agg_by(agg_w_avg(wcol="Number2", cols="Number1", by=c("X", "Y")))
+  agg_by(agg_w_avg(wcol = "Number2", cols = "Number1", by = c("X", "Y")))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/rdeephaven.Rd b/R/rdeephaven/man/rdeephaven.Rd
deleted file mode 100644
index 96dd3c1f33a..00000000000
--- a/R/rdeephaven/man/rdeephaven.Rd
+++ /dev/null
@@ -1,101 +0,0 @@
-% Generated by roxygen2: do not edit by hand
-% Please edit documentation in R/exports.R
-\name{rdeephaven}
-\alias{rdeephaven}
-\title{The Deephaven Community R Client}
-\description{
-The Deephaven Community R Client provides an R interface to Deephaven's powerful real-time data engine, \href{https://deephaven.io/community/}{\emph{Deephaven Core}}.
-To use this package, you must have a Deephaven server running and be able to connect to it. For more information on
-how to set up a Deephaven server, see the documentation \href{https://deephaven.io/core/docs/tutorials/quickstart/}{here}.
-}
-\section{Building blocks of the Deephaven R Client}{
-
-There are two primary R classes that make up the Deephaven R Client, the \code{\link{Client}} class and the
-\code{\link{TableHandle}} class. The \code{Client} class is used to establish a connection to the Deephaven server with
-its constructor \code{Client$new()}, and to send server requests, such as running a script via \code{run_script()}, or pushing
-local data to the server via \code{import_table()}. Basic usage of the \code{Client} class may look something like this:
-
-\if{html}{\out{<div class="sourceCode r">}}\preformatted{library(rdeephaven)
-
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
-}\if{html}{\out{</div>}}
-
-Many of these server requests end up creating or modifying tables that live on the server. To keep track of these
-tables, the R client retrieves references to them, and wraps these references in \code{TableHandle} objects. These
-TableHandles have a host of methods that mirror server-side table operations, such as \code{head()}, \code{tail()}, \code{update()},
-and so on. So, you can typically use TableHandles \emph{as if} they are tables themselves, and all of the corresponding
-methods that you call on them will be executed on the server. Here is a simple example of pushing data to the server,
-retrieving a TableHandle to the resulting table, and applying some basic table operations to the table:
-
-\if{html}{\out{<div class="sourceCode r">}}\preformatted{df1 <- data.frame(x=1:10, y=11:20)
-th1 <- client$import_table(df1)
-
-th2 <- th1$
-  update("z = x + y")$
-  where("z \% 4 == 0")
-}\if{html}{\out{</div>}}
-
-TableHandles also support common functional methods for converting server-side Deephaven tables to R objects stored in
-local memory such as \code{as.data.frame()}, \code{as_tibble()}, and \code{as_arrow_table()}. Here's an example of converting the
-table created above to an R data frame and verifying that other functional methods work as expected:
-
-\if{html}{\out{<div class="sourceCode r">}}\preformatted{df2 <- as.data.frame(th2)
-
-print(nrow(th2) == nrow(df2))
-print(ncol(th2) == ncol(df2))
-print(dim(th2) == dim(df2))
-print(all(as.data.frame(head(th2, 2)) == head(df2, 2)))
-print(all(as.data.frame(tail(th2, 2)) == tail(df2, 2)))
-}\if{html}{\out{</div>}}
-
-For more information on these classes and all of their methods, see the reference documentation for \code{\link{Client}}
-and \code{\link{TableHandle}} by clicking on their class names, or by running \code{?Client} or \code{?TableHandle}.
-}
-
-\section{Real-time data analysis}{
-
-Since TableHandles are references to tables living on the Deephaven server, they may refer to streaming tables, or
-tables that are receiving new data periodically (typically once per second). Here's a simple example of creating a
-table that adds a new row every second:
-
-\if{html}{\out{<div class="sourceCode r">}}\preformatted{th3 <- client$time_table("PT1s")$
-  update(c("X = ii", "Y = sin(X)"))
-}\if{html}{\out{</div>}}
-
-R objects like data frames or Dplyr tibbles do not have this streaming property - they are always static objects
-stored in memory. However, a TableHandle referring to a streaming table may be converted to a data frame or tibble at
-any time, and the resulting object will be a snapshot of the table at the time of conversion. This means that you can
-use the Deephaven R Client to perform real-time data analysis on streaming data! Here, we make a simple plot of the
-ticking table, and call it three times to demonstrate the dynamic nature of the table:
-
-\if{html}{\out{<div class="sourceCode r">}}\preformatted{plot(as.data.frame(th3)$X, as.data.frame(th3)$Y, type="l")
-Sys.sleep(5)
-plot(as.data.frame(th3)$X, as.data.frame(th3)$Y, type="l")
-Sys.sleep(5)
-plot(as.data.frame(th3)$X, as.data.frame(th3)$Y, type="l")
-}\if{html}{\out{</div>}}
-
-There are performance and memory considerations when pulling data from the server, so it is best to use the provided
-TableHandle methods to perform as much of your analysis as possible on the server, and to only pull the data when
-something \emph{must} be done in R, like plotting or writing to a local file.
-}
-
-\section{Powerful table operations}{
-
-Much of the power of Deephaven's suite of table operations is achieved through the use of the \code{\link[=UpdateBy]{update_by()}}
-and \code{\link[=AggBy]{agg_by()}} methods. These table methods are important enough to warrant their own documentation pages, accessible
-by clicking on their names, or by running \code{?UpdateBy} or \code{?AggBy}. These methods come with their own suites of functions,
-prefixed with \code{agg_} and \code{uby_} respectively, that are discoverable from their documentation pages. Running \code{ls("package:rdeephaven")}
-will reveal that most of the functions included in this package are for these methods, so it is important to get acquainted
-with them.
-}
-
-\section{Getting help}{
-
-While we've done our best to provide good documentation for this package, you may find you need more help than what
-this documentation has to offer. Please visit the official Deephaven Community Core \href{https://deephaven.io/core/docs/tutorials/quickstart/}{documentation}
-to learn more about Deephaven and to find comprehensive examples. Additionally, feel free to reach out to us on
-the Deephaven \href{https://deephaven.io/slack}{Community Slack channel} with any questions.
-We hope you find real-time data analysis in R to be as easy as possible.
-}
-
diff --git a/R/rdeephaven/man/uby_cum_max.Rd b/R/rdeephaven/man/uby_cum_max.Rd
index dfd209e3f38..2b940d46414 100644
--- a/R/rdeephaven/man/uby_cum_max.Rd
+++ b/R/rdeephaven/man/uby_cum_max.Rd
@@ -23,18 +23,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -47,12 +50,12 @@ th1 <- th$
 
 # compute cumulative maximum of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_cum_max(c("col1CumMax = col1", "col2CumMax = col2")), by="boolCol")
+  update_by(uby_cum_max(c("col1CumMax = col1", "col2CumMax = col2")), by = "boolCol")
 
 # compute cumulative maximum of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_cum_max(c("col1CumMax = col1", "col2CumMax = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_cum_max(c("col1CumMax = col1", "col2CumMax = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_cum_min.Rd b/R/rdeephaven/man/uby_cum_min.Rd
index c8b0c7d8040..ed576f7b170 100644
--- a/R/rdeephaven/man/uby_cum_min.Rd
+++ b/R/rdeephaven/man/uby_cum_min.Rd
@@ -23,18 +23,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -47,12 +50,12 @@ th1 <- th$
 
 # compute cumulative minimum of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_cum_min(c("col1CumMin = col1", "col2CumMin = col2")), by="boolCol")
+  update_by(uby_cum_min(c("col1CumMin = col1", "col2CumMin = col2")), by = "boolCol")
 
 # compute cumulative minimum of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_cum_min(c("col1CumMin = col1", "col2CumMin = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_cum_min(c("col1CumMin = col1", "col2CumMin = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_cum_prod.Rd b/R/rdeephaven/man/uby_cum_prod.Rd
index 872b9c59e18..e3710a95a5c 100644
--- a/R/rdeephaven/man/uby_cum_prod.Rd
+++ b/R/rdeephaven/man/uby_cum_prod.Rd
@@ -23,18 +23,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -47,12 +50,12 @@ th1 <- th$
 
 # compute cumulative product of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_cum_prod(c("col1CumProd = col1", "col2CumProd = col2")), by="boolCol")
+  update_by(uby_cum_prod(c("col1CumProd = col1", "col2CumProd = col2")), by = "boolCol")
 
 # compute cumulative product of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_cum_prod(c("col1CumProd = col1", "col2CumProd = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_cum_prod(c("col1CumProd = col1", "col2CumProd = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_cum_sum.Rd b/R/rdeephaven/man/uby_cum_sum.Rd
index d12ebde5eae..12d7e4f97ab 100644
--- a/R/rdeephaven/man/uby_cum_sum.Rd
+++ b/R/rdeephaven/man/uby_cum_sum.Rd
@@ -23,18 +23,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -47,12 +50,12 @@ th1 <- th$
 
 # compute cumulative sum of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_cum_sum(c("col1CumSum = col1", "col2CumSum = col2")), by="boolCol")
+  update_by(uby_cum_sum(c("col1CumSum = col1", "col2CumSum = col2")), by = "boolCol")
 
 # compute cumulative sum of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_cum_sum(c("col1CumSum = col1", "col2CumSum = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_cum_sum(c("col1CumSum = col1", "col2CumSum = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_delta.Rd b/R/rdeephaven/man/uby_delta.Rd
index 2679b3886a7..427d493ceed 100644
--- a/R/rdeephaven/man/uby_delta.Rd
+++ b/R/rdeephaven/man/uby_delta.Rd
@@ -34,18 +34,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -58,12 +61,12 @@ th1 <- th$
 
 # compute consecutive differences of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_delta(c("col1Delta = col1", "col2Delta = col2")), by="boolCol")
+  update_by(uby_delta(c("col1Delta = col1", "col2Delta = col2")), by = "boolCol")
 
 # compute consecutive differences of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_delta(c("col1Delta = col1", "col2Delta = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_delta(c("col1Delta = col1", "col2Delta = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_ema_tick.Rd b/R/rdeephaven/man/uby_ema_tick.Rd
index 9d680ddcc3a..ecbf3ff37b2 100644
--- a/R/rdeephaven/man/uby_ema_tick.Rd
+++ b/R/rdeephaven/man/uby_ema_tick.Rd
@@ -40,18 +40,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -60,16 +63,16 @@ th <- client$import_table(df)
 
 # compute 10-row exponential moving average of col1 and col2
 th1 <- th$
-  update_by(uby_ema_tick(decay_ticks=10, cols=c("col1Ema = col1", "col2Ema = col2")))
+  update_by(uby_ema_tick(decay_ticks = 10, cols = c("col1Ema = col1", "col2Ema = col2")))
 
 # compute 5-row exponential moving average of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_ema_tick(decay_ticks=5, cols=c("col1Ema = col1", "col2Ema = col2")), by="boolCol")
+  update_by(uby_ema_tick(decay_ticks = 5, cols = c("col1Ema = col1", "col2Ema = col2")), by = "boolCol")
 
 # compute 20-row exponential moving average of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_ema_tick(decay_ticks=20, cols=c("col1Ema = col1", "col2Ema = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_ema_tick(decay_ticks = 20, cols = c("col1Ema = col1", "col2Ema = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_ema_time.Rd b/R/rdeephaven/man/uby_ema_time.Rd
index 862f1be69c3..e86dca08f4a 100644
--- a/R/rdeephaven/man/uby_ema_time.Rd
+++ b/R/rdeephaven/man/uby_ema_time.Rd
@@ -43,18 +43,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -63,16 +66,16 @@ th <- client$import_table(df)
 
 # compute 10-second exponential moving average of col1 and col2
 th1 <- th$
-  update_by(uby_ema_time(ts_col="timeCol", decay_time="PT10s", cols=c("col1Ema = col1", "col2Ema = col2")))
+  update_by(uby_ema_time(ts_col = "timeCol", decay_time = "PT10s", cols = c("col1Ema = col1", "col2Ema = col2")))
 
 # compute 5-second exponential moving average of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_ema_time(ts_col="timeCol", decay_time="PT5s", cols=c("col1Ema = col1", "col2Ema = col2")), by="boolCol")
+  update_by(uby_ema_time(ts_col = "timeCol", decay_time = "PT5s", cols = c("col1Ema = col1", "col2Ema = col2")), by = "boolCol")
 
 # compute 20-second exponential moving average of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_ema_time(ts_col="timeCol", decay_time="PT20s", cols=c("col1Ema = col1", "col2Ema = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_ema_time(ts_col = "timeCol", decay_time = "PT20s", cols = c("col1Ema = col1", "col2Ema = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_emmax_tick.Rd b/R/rdeephaven/man/uby_emmax_tick.Rd
index db344db1c35..9f7f960ffde 100644
--- a/R/rdeephaven/man/uby_emmax_tick.Rd
+++ b/R/rdeephaven/man/uby_emmax_tick.Rd
@@ -40,18 +40,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -60,16 +63,16 @@ th <- client$import_table(df)
 
 # compute 10-row exponential moving maximum of col1 and col2
 th1 <- th$
-  update_by(uby_emmax_tick(decay_ticks=10, cols=c("col1Emmax = col1", "col2Emmax = col2")))
+  update_by(uby_emmax_tick(decay_ticks = 10, cols = c("col1Emmax = col1", "col2Emmax = col2")))
 
 # compute 5-row exponential moving maximum of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_emmax_tick(decay_ticks=5, cols=c("col1Emmax = col1", "col2Emmax = col2")), by="boolCol")
+  update_by(uby_emmax_tick(decay_ticks = 5, cols = c("col1Emmax = col1", "col2Emmax = col2")), by = "boolCol")
 
 # compute 20-row exponential moving maximum of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_emmax_tick(decay_ticks=20, cols=c("col1Emmax = col1", "col2Emmax = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_emmax_tick(decay_ticks = 20, cols = c("col1Emmax = col1", "col2Emmax = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_emmax_time.Rd b/R/rdeephaven/man/uby_emmax_time.Rd
index 412c5e3239a..c845c45039b 100644
--- a/R/rdeephaven/man/uby_emmax_time.Rd
+++ b/R/rdeephaven/man/uby_emmax_time.Rd
@@ -41,18 +41,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -61,16 +64,16 @@ th <- client$import_table(df)
 
 # compute 10-second exponential moving maximum of col1 and col2
 th1 <- th$
-  update_by(uby_emmax_time(ts_col="timeCol", decay_time="PT10s", cols=c("col1Emmax = col1", "col2Emmax = col2")))
+  update_by(uby_emmax_time(ts_col = "timeCol", decay_time = "PT10s", cols = c("col1Emmax = col1", "col2Emmax = col2")))
 
 # compute 5-second exponential moving maximum of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_emmax_time(ts_col="timeCol", decay_time="PT5s", cols=c("col1Emmax = col1", "col2Emmax = col2")), by="boolCol")
+  update_by(uby_emmax_time(ts_col = "timeCol", decay_time = "PT5s", cols = c("col1Emmax = col1", "col2Emmax = col2")), by = "boolCol")
 
 # compute 20-second exponential moving maximum of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_emmax_time(ts_col="timeCol", decay_time="PT20s", cols=c("col1Emmax = col1", "col2Emmax = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_emmax_time(ts_col = "timeCol", decay_time = "PT20s", cols = c("col1Emmax = col1", "col2Emmax = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_emmin_tick.Rd b/R/rdeephaven/man/uby_emmin_tick.Rd
index 0c9b8c9390c..20055b7891b 100644
--- a/R/rdeephaven/man/uby_emmin_tick.Rd
+++ b/R/rdeephaven/man/uby_emmin_tick.Rd
@@ -40,18 +40,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -60,16 +63,16 @@ th <- client$import_table(df)
 
 # compute 10-row exponential moving minimum of col1 and col2
 th1 <- th$
-  update_by(uby_emmin_tick(decay_ticks=10, cols=c("col1Emmin = col1", "col2Emmin = col2")))
+  update_by(uby_emmin_tick(decay_ticks = 10, cols = c("col1Emmin = col1", "col2Emmin = col2")))
 
 # compute 5-row exponential moving minimum of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_emmin_tick(decay_ticks=5, cols=c("col1Emmin = col1", "col2Emmin = col2")), by="boolCol")
+  update_by(uby_emmin_tick(decay_ticks = 5, cols = c("col1Emmin = col1", "col2Emmin = col2")), by = "boolCol")
 
 # compute 20-row exponential moving minimum of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_emmin_tick(decay_ticks=20, cols=c("col1Emmin = col1", "col2Emmin = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_emmin_tick(decay_ticks = 20, cols = c("col1Emmin = col1", "col2Emmin = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_emmin_time.Rd b/R/rdeephaven/man/uby_emmin_time.Rd
index d1ee01a441b..ca01ed2117b 100644
--- a/R/rdeephaven/man/uby_emmin_time.Rd
+++ b/R/rdeephaven/man/uby_emmin_time.Rd
@@ -41,18 +41,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -61,16 +64,16 @@ th <- client$import_table(df)
 
 # compute 10-second exponential moving minimum of col1 and col2
 th1 <- th$
-  update_by(uby_emmin_time(ts_col="timeCol", decay_time="PT10s", cols=c("col1Emmin = col1", "col2Emmin = col2")))
+  update_by(uby_emmin_time(ts_col = "timeCol", decay_time = "PT10s", cols = c("col1Emmin = col1", "col2Emmin = col2")))
 
 # compute 5-second exponential moving minimum of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_emmin_time(ts_col="timeCol", decay_time="PT5s", cols=c("col1Emmin = col1", "col2Emmin = col2")), by="boolCol")
+  update_by(uby_emmin_time(ts_col = "timeCol", decay_time = "PT5s", cols = c("col1Emmin = col1", "col2Emmin = col2")), by = "boolCol")
 
 # compute 20-second exponential moving minimum of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_emmin_time(ts_col="timeCol", decay_time="PT20s", cols=c("col1Emmin = col1", "col2Emmin = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_emmin_time(ts_col = "timeCol", decay_time = "PT20s", cols = c("col1Emmin = col1", "col2Emmin = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_ems_tick.Rd b/R/rdeephaven/man/uby_ems_tick.Rd
index 7e5a45e8c79..11536a6ac25 100644
--- a/R/rdeephaven/man/uby_ems_tick.Rd
+++ b/R/rdeephaven/man/uby_ems_tick.Rd
@@ -40,18 +40,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -60,16 +63,16 @@ th <- client$import_table(df)
 
 # compute 10-row exponential moving sum of col1 and col2
 th1 <- th$
-  update_by(uby_ems_tick(decay_ticks=10, cols=c("col1Ems = col1", "col2Ems = col2")))
+  update_by(uby_ems_tick(decay_ticks = 10, cols = c("col1Ems = col1", "col2Ems = col2")))
 
 # compute 5-row exponential moving sum of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_ems_tick(decay_ticks=5, cols=c("col1Ems = col1", "col2Ems = col2")), by="boolCol")
+  update_by(uby_ems_tick(decay_ticks = 5, cols = c("col1Ems = col1", "col2Ems = col2")), by = "boolCol")
 
 # compute 20-row exponential moving sum of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_ems_tick(decay_ticks=20, cols=c("col1Ems = col1", "col2Ems = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_ems_tick(decay_ticks = 20, cols = c("col1Ems = col1", "col2Ems = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_ems_time.Rd b/R/rdeephaven/man/uby_ems_time.Rd
index 12788d09e62..823cb67fe19 100644
--- a/R/rdeephaven/man/uby_ems_time.Rd
+++ b/R/rdeephaven/man/uby_ems_time.Rd
@@ -41,18 +41,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -61,16 +64,16 @@ th <- client$import_table(df)
 
 # compute 10-second exponential moving sum of col1 and col2
 th1 <- th$
-  update_by(uby_ems_time(ts_col="timeCol", decay_time="PT10s", cols=c("col1Ems = col1", "col2Ems = col2")))
+  update_by(uby_ems_time(ts_col = "timeCol", decay_time = "PT10s", cols = c("col1Ems = col1", "col2Ems = col2")))
 
 # compute 5-second exponential moving sum of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_ems_time(ts_col="timeCol", decay_time="PT5s", cols=c("col1Ems = col1", "col2Ems = col2")), by="boolCol")
+  update_by(uby_ems_time(ts_col = "timeCol", decay_time = "PT5s", cols = c("col1Ems = col1", "col2Ems = col2")), by = "boolCol")
 
 # compute 20-second exponential moving sum of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_ems_time(ts_col="timeCol", decay_time="PT20s", cols=c("col1Ems = col1", "col2Ems = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_ems_time(ts_col = "timeCol", decay_time = "PT20s", cols = c("col1Ems = col1", "col2Ems = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_emstd_tick.Rd b/R/rdeephaven/man/uby_emstd_tick.Rd
index 294eadbc46d..50e45858d35 100644
--- a/R/rdeephaven/man/uby_emstd_tick.Rd
+++ b/R/rdeephaven/man/uby_emstd_tick.Rd
@@ -45,18 +45,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -65,16 +68,16 @@ th <- client$import_table(df)
 
 # compute 10-row exponential moving standard deviation of col1 and col2
 th1 <- th$
-  update_by(uby_emstd_tick(decay_ticks=10, cols=c("col1Emstd = col1", "col2Emstd = col2")))
+  update_by(uby_emstd_tick(decay_ticks = 10, cols = c("col1Emstd = col1", "col2Emstd = col2")))
 
 # compute 5-row exponential moving standard deviation of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_emstd_tick(decay_ticks=5, cols=c("col1Emstd = col1", "col2Emstd = col2")), by="boolCol")
+  update_by(uby_emstd_tick(decay_ticks = 5, cols = c("col1Emstd = col1", "col2Emstd = col2")), by = "boolCol")
 
 # compute 20-row exponential moving standard deviation of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_emstd_tick(decay_ticks=20, cols=c("col1Emstd = col1", "col2Emstd = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_emstd_tick(decay_ticks = 20, cols = c("col1Emstd = col1", "col2Emstd = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_emstd_time.Rd b/R/rdeephaven/man/uby_emstd_time.Rd
index e166d18869d..af605cacafa 100644
--- a/R/rdeephaven/man/uby_emstd_time.Rd
+++ b/R/rdeephaven/man/uby_emstd_time.Rd
@@ -46,18 +46,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -66,16 +69,16 @@ th <- client$import_table(df)
 
 # compute 10-second exponential moving standard deviation of col1 and col2
 th1 <- th$
-  update_by(uby_emstd_time(ts_col="timeCol", decay_time="PT10s", cols=c("col1Emstd = col1", "col2Emstd = col2")))
+  update_by(uby_emstd_time(ts_col = "timeCol", decay_time = "PT10s", cols = c("col1Emstd = col1", "col2Emstd = col2")))
 
 # compute 5-second exponential moving standard deviation of col1 and col2, grouped by boolCol
 th2 <- th$
-  update_by(uby_emstd_time(ts_col="timeCol", decay_time="PT5s", cols=c("col1Emstd = col1", "col2Emstd = col2")), by="boolCol")
+  update_by(uby_emstd_time(ts_col = "timeCol", decay_time = "PT5s", cols = c("col1Emstd = col1", "col2Emstd = col2")), by = "boolCol")
 
 # compute 20-second exponential moving standard deviation of col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_emstd_time(ts_col="timeCol", decay_time="PT20s", cols=c("col1Emstd = col1", "col2Emstd = col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_emstd_time(ts_col = "timeCol", decay_time = "PT20s", cols = c("col1Emstd = col1", "col2Emstd = col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_forward_fill.Rd b/R/rdeephaven/man/uby_forward_fill.Rd
index 71b074e89da..473941fafd9 100644
--- a/R/rdeephaven/man/uby_forward_fill.Rd
+++ b/R/rdeephaven/man/uby_forward_fill.Rd
@@ -24,18 +24,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = replace(sample(10000, size = 500, replace = TRUE), sample(500, 100), NA),
   col2 = replace(sample(10000, size = 500, replace = TRUE), sample(500, 100), NA),
   col3 = replace(1:500, sample(500, 100), NA)
@@ -48,13 +51,13 @@ th1 <- th$
 
 # forward fill col1 and col2, grouped by boolCol
 th2 <- th$
- update_by(uby_forward_fill(c("col1", "col2")), by="boolCol")
+  update_by(uby_forward_fill(c("col1", "col2")), by = "boolCol")
 
 # forward fill col3, compute parity of col3, and forward fill col1 and col2, grouped by boolCol and parity of col3
 th3 <- th$
   update_by(uby_forward_fill("col3"))$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_forward_fill(c("col1", "col2")), by=c("boolCol", "col3Parity"))
+  update_by(uby_forward_fill(c("col1", "col2")), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_avg_tick.Rd b/R/rdeephaven/man/uby_rolling_avg_tick.Rd
index 04171aaa17e..131fff4e472 100644
--- a/R/rdeephaven/man/uby_rolling_avg_tick.Rd
+++ b/R/rdeephaven/man/uby_rolling_avg_tick.Rd
@@ -44,18 +44,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -64,16 +67,16 @@ th <- client$import_table(df)
 
 # compute rolling average of col1 and col2, using the previous 5 rows and current row
 th1 <- th$
-  update_by(uby_rolling_avg_tick(cols=c("col1RollAvg = col1", "col2RollAvg = col2"), rev_ticks=6))
+  update_by(uby_rolling_avg_tick(cols = c("col1RollAvg = col1", "col2RollAvg = col2"), rev_ticks = 6))
 
 # compute rolling average of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 th2 <- th$
-  update_by(uby_rolling_avg_tick(cols=c("col1RollAvg = col1", "col2RollAvg = col2"), rev_ticks=6, fwd_ticks=5)), by="boolCol")
+  update_by(uby_rolling_avg_tick(cols = c("col1RollAvg = col1", "col2RollAvg = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 
 # compute rolling average of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_avg_tick(cols=c("col1RollAvg = col1", "col2RollAvg = col2"), rev_ticks=1, fwd_ticks=10)), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_avg_tick(cols = c("col1RollAvg = col1", "col2RollAvg = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_avg_time.Rd b/R/rdeephaven/man/uby_rolling_avg_time.Rd
index af048148d88..1361aa53b3f 100644
--- a/R/rdeephaven/man/uby_rolling_avg_time.Rd
+++ b/R/rdeephaven/man/uby_rolling_avg_time.Rd
@@ -43,18 +43,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -63,16 +66,16 @@ th <- client$import_table(df)
 
 # compute rolling average of col1 and col2, using the previous 5 seconds
 th1 <- th$
-  update_by(uby_rolling_avg_time(ts_col="timeCol", cols=c("col1RollAvg = col1", "col2RollAvg = col2"), rev_time="PT5s"))
+  update_by(uby_rolling_avg_time(ts_col = "timeCol", cols = c("col1RollAvg = col1", "col2RollAvg = col2"), rev_time = "PT5s"))
 
 # compute rolling average of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 th2 <- th$
-  update_by(uby_rolling_avg_time(ts_col="timeCol", cols=c("col1RollAvg = col1", "col2RollAvg = col2"), rev_time="PT5s", fwd_ticks="PT5s")), by="boolCol")
+  update_by(uby_rolling_avg_time(ts_col = "timeCol", cols = c("col1RollAvg = col1", "col2RollAvg = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 
 # compute rolling average of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_avg_time(ts_col="timeCol", cols=c("col1RollAvg = col1", "col2RollAvg = col2"), rev_time="PT0s", fwd_time="PT10s")), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_avg_time(ts_col = "timeCol", cols = c("col1RollAvg = col1", "col2RollAvg = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_count_tick.Rd b/R/rdeephaven/man/uby_rolling_count_tick.Rd
index f319e99b895..f97ec165ed2 100644
--- a/R/rdeephaven/man/uby_rolling_count_tick.Rd
+++ b/R/rdeephaven/man/uby_rolling_count_tick.Rd
@@ -44,18 +44,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -64,16 +67,16 @@ th <- client$import_table(df)
 
 # compute rolling count of col1 and col2, using the previous 5 rows and current row
 th1 <- th$
-  update_by(uby_rolling_count_tick(cols=c("col1RollCount = col1", "col2RollCount = col2"), rev_ticks=6))
+  update_by(uby_rolling_count_tick(cols = c("col1RollCount = col1", "col2RollCount = col2"), rev_ticks = 6))
 
 # compute rolling count of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 th2 <- th$
-  update_by(uby_rolling_count_tick(cols=c("col1RollCount = col1", "col2RollCount = col2"), rev_ticks=6, fwd_ticks=5), by="boolCol")
+  update_by(uby_rolling_count_tick(cols = c("col1RollCount = col1", "col2RollCount = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 
 # compute rolling count of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_count_tick(cols=c("col1RollCount = col1", "col2RollCount = col2"), rev_ticks=1, fwd_ticks=10), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_count_tick(cols = c("col1RollCount = col1", "col2RollCount = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_count_time.Rd b/R/rdeephaven/man/uby_rolling_count_time.Rd
index ecaef05be59..b3b55749155 100644
--- a/R/rdeephaven/man/uby_rolling_count_time.Rd
+++ b/R/rdeephaven/man/uby_rolling_count_time.Rd
@@ -43,18 +43,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -63,16 +66,16 @@ th <- client$import_table(df)
 
 # compute rolling count of col1 and col2, using the previous 5 seconds
 th1 <- th$
-  update_by(uby_rolling_count_time(ts_col="timeCol", cols=c("col1RollCount = col1", "col2RollCount = col2"), rev_time="PT5s"))
+  update_by(uby_rolling_count_time(ts_col = "timeCol", cols = c("col1RollCount = col1", "col2RollCount = col2"), rev_time = "PT5s"))
 
 # compute rolling count of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 th2 <- th$
-  update_by(uby_rolling_count_time(ts_col="timeCol", cols=c("col1RollCount = col1", "col2RollCount = col2"), rev_time="PT5s", fwd_ticks="PT5s"), by="boolCol")
+  update_by(uby_rolling_count_time(ts_col = "timeCol", cols = c("col1RollCount = col1", "col2RollCount = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 
 # compute rolling count of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_count_time(ts_col="timeCol", cols=c("col1RollCount = col1", "col2RollCount = col2"), rev_time="PT0s", fwd_time="PT10s"), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_count_time(ts_col = "timeCol", cols = c("col1RollCount = col1", "col2RollCount = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_group_tick.Rd b/R/rdeephaven/man/uby_rolling_group_tick.Rd
index fde8d3aacfe..c036ef80405 100644
--- a/R/rdeephaven/man/uby_rolling_group_tick.Rd
+++ b/R/rdeephaven/man/uby_rolling_group_tick.Rd
@@ -44,18 +44,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -64,12 +67,12 @@ th <- client$import_table(df)
 
 # compute rolling group of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 th1 <- th$
-  update_by(uby_rolling_group_tick(cols=c("col1RollGroup = col1", "col2RollGroup = col2"), rev_ticks=6, fwd_ticks=5)), by="boolCol")
+  update_by(uby_rolling_group_tick(cols = c("col1RollGroup = col1", "col2RollGroup = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 
 # compute rolling group of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 th2 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_group_tick(cols=c("col1RollGroup = col1", "col2RollGroup = col2"), rev_ticks=1, fwd_ticks=10)), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_group_tick(cols = c("col1RollGroup = col1", "col2RollGroup = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_group_time.Rd b/R/rdeephaven/man/uby_rolling_group_time.Rd
index 74b447c1816..62d578bdc2c 100644
--- a/R/rdeephaven/man/uby_rolling_group_time.Rd
+++ b/R/rdeephaven/man/uby_rolling_group_time.Rd
@@ -43,18 +43,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -63,12 +66,12 @@ th <- client$import_table(df)
 
 # compute rolling group of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 th1 <- th$
-  update_by(uby_rolling_group_time(ts_col="timeCol", cols=c("col1RollGroup = col1", "col2RollGroup = col2"), rev_time="PT5s", fwd_ticks="PT5s")), by="boolCol")
+  update_by(uby_rolling_group_time(ts_col = "timeCol", cols = c("col1RollGroup = col1", "col2RollGroup = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 
 # compute rolling group of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 th2 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_group_time(ts_col="timeCol", cols=c("col1RollGroup = col1", "col2RollGroup = col2"), rev_time="PT0s", fwd_time="PT10s")), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_group_time(ts_col = "timeCol", cols = c("col1RollGroup = col1", "col2RollGroup = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_max_tick.Rd b/R/rdeephaven/man/uby_rolling_max_tick.Rd
index cd90d3f9578..21c0d1a5ae0 100644
--- a/R/rdeephaven/man/uby_rolling_max_tick.Rd
+++ b/R/rdeephaven/man/uby_rolling_max_tick.Rd
@@ -44,18 +44,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -64,16 +67,16 @@ th <- client$import_table(df)
 
 # compute rolling maximum of col1 and col2, using the previous 5 rows and current row
 th1 <- th$
-  update_by(uby_rolling_max_tick(cols=c("col1RollMax = col1", "col2RollMax = col2"), rev_ticks=6))
+  update_by(uby_rolling_max_tick(cols = c("col1RollMax = col1", "col2RollMax = col2"), rev_ticks = 6))
 
 # compute rolling maximum of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 th2 <- th$
-  update_by(uby_rolling_max_tick(cols=c("col1RollMax = col1", "col2RollMax = col2"), rev_ticks=6, fwd_ticks=5)), by="boolCol")
+  update_by(uby_rolling_max_tick(cols = c("col1RollMax = col1", "col2RollMax = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 
 # compute rolling maximum of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_max_tick(cols=c("col1RollMax = col1", "col2RollMax = col2"), rev_ticks=1, fwd_ticks=10)), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_max_tick(cols = c("col1RollMax = col1", "col2RollMax = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_max_time.Rd b/R/rdeephaven/man/uby_rolling_max_time.Rd
index d259bc28db8..71ae9e7389d 100644
--- a/R/rdeephaven/man/uby_rolling_max_time.Rd
+++ b/R/rdeephaven/man/uby_rolling_max_time.Rd
@@ -43,18 +43,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -63,16 +66,16 @@ th <- client$import_table(df)
 
 # compute rolling maximum of col1 and col2, using the previous 5 seconds
 th1 <- th$
-  update_by(uby_rolling_max_time(ts_col="timeCol", cols=c("col1RollMax = col1", "col2RollMax = col2"), rev_time="PT5s"))
+  update_by(uby_rolling_max_time(ts_col = "timeCol", cols = c("col1RollMax = col1", "col2RollMax = col2"), rev_time = "PT5s"))
 
 # compute rolling maximum of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 th2 <- th$
-  update_by(uby_rolling_max_time(ts_col="timeCol", cols=c("col1RollMax = col1", "col2RollMax = col2"), rev_time="PT5s", fwd_ticks="PT5s")), by="boolCol")
+  update_by(uby_rolling_max_time(ts_col = "timeCol", cols = c("col1RollMax = col1", "col2RollMax = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 
 # compute rolling maximum of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_max_time(ts_col="timeCol", cols=c("col1RollMax = col1", "col2RollMax = col2"), rev_time="PT0s", fwd_time="PT10s")), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_max_time(ts_col = "timeCol", cols = c("col1RollMax = col1", "col2RollMax = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_min_tick.Rd b/R/rdeephaven/man/uby_rolling_min_tick.Rd
index fb659d40d48..f765c36004a 100644
--- a/R/rdeephaven/man/uby_rolling_min_tick.Rd
+++ b/R/rdeephaven/man/uby_rolling_min_tick.Rd
@@ -44,18 +44,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -64,16 +67,16 @@ th <- client$import_table(df)
 
 # compute rolling minimum of col1 and col2, using the previous 5 rows and current row
 th1 <- th$
-  update_by(uby_rolling_min_tick(cols=c("col1RollMin = col1", "col2RollMin = col2"), rev_ticks=6))
+  update_by(uby_rolling_min_tick(cols = c("col1RollMin = col1", "col2RollMin = col2"), rev_ticks = 6))
 
 # compute rolling minimum of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 th2 <- th$
-  update_by(uby_rolling_min_tick(cols=c("col1RollMin = col1", "col2RollMin = col2"), rev_ticks=6, fwd_ticks=5)), by="boolCol")
+  update_by(uby_rolling_min_tick(cols = c("col1RollMin = col1", "col2RollMin = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 
 # compute rolling minimum of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_min_tick(cols=c("col1RollMin = col1", "col2RollMin = col2"), rev_ticks=1, fwd_ticks=10)), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_min_tick(cols = c("col1RollMin = col1", "col2RollMin = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_min_time.Rd b/R/rdeephaven/man/uby_rolling_min_time.Rd
index 1fb73cefae3..49a12e1a989 100644
--- a/R/rdeephaven/man/uby_rolling_min_time.Rd
+++ b/R/rdeephaven/man/uby_rolling_min_time.Rd
@@ -43,18 +43,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -63,16 +66,16 @@ th <- client$import_table(df)
 
 # compute rolling minimum of col1 and col2, using the previous 5 seconds
 th1 <- th$
-  update_by(uby_rolling_min_time(ts_col="timeCol", cols=c("col1RollMin = col1", "col2RollMin = col2"), rev_time="PT5s"))
+  update_by(uby_rolling_min_time(ts_col = "timeCol", cols = c("col1RollMin = col1", "col2RollMin = col2"), rev_time = "PT5s"))
 
 # compute rolling minimum of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 th2 <- th$
-  update_by(uby_rolling_min_time(ts_col="timeCol", cols=c("col1RollMin = col1", "col2RollMin = col2"), rev_time="PT5s", fwd_ticks="PT5s")), by="boolCol")
+  update_by(uby_rolling_min_time(ts_col = "timeCol", cols = c("col1RollMin = col1", "col2RollMin = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 
 # compute rolling minimum of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_min_time(ts_col="timeCol", cols=c("col1RollMin = col1", "col2RollMin = col2"), rev_time="PT0s", fwd_time="PT10s")), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_min_time(ts_col = "timeCol", cols = c("col1RollMin = col1", "col2RollMin = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_prod_tick.Rd b/R/rdeephaven/man/uby_rolling_prod_tick.Rd
index 40afb461f45..f5b02b59e8b 100644
--- a/R/rdeephaven/man/uby_rolling_prod_tick.Rd
+++ b/R/rdeephaven/man/uby_rolling_prod_tick.Rd
@@ -44,18 +44,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -64,16 +67,16 @@ th <- client$import_table(df)
 
 # compute rolling product of col1 and col2, using the previous 5 rows and current row
 th1 <- th$
-  update_by(uby_rolling_prod_tick(cols=c("col1RollProd = col1", "col2RollProd = col2"), rev_ticks=6))
+  update_by(uby_rolling_prod_tick(cols = c("col1RollProd = col1", "col2RollProd = col2"), rev_ticks = 6))
 
 # compute rolling product of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 th2 <- th$
-  update_by(uby_rolling_prod_tick(cols=c("col1RollProd = col1", "col2RollProd = col2"), rev_ticks=6, fwd_ticks=5)), by="boolCol")
+  update_by(uby_rolling_prod_tick(cols = c("col1RollProd = col1", "col2RollProd = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 
 # compute rolling product of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_prod_tick(cols=c("col1RollProd = col1", "col2RollProd = col2"), rev_ticks=1, fwd_ticks=10)), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_prod_tick(cols = c("col1RollProd = col1", "col2RollProd = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_prod_time.Rd b/R/rdeephaven/man/uby_rolling_prod_time.Rd
index 84af4a1e354..d0dfe2f92d3 100644
--- a/R/rdeephaven/man/uby_rolling_prod_time.Rd
+++ b/R/rdeephaven/man/uby_rolling_prod_time.Rd
@@ -43,18 +43,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -63,16 +66,16 @@ th <- client$import_table(df)
 
 # compute rolling product of col1 and col2, using the previous 5 seconds
 th1 <- th$
-  update_by(uby_rolling_prod_time(ts_col="timeCol", cols=c("col1RollProd = col1", "col2RollProd = col2"), rev_time="PT5s"))
+  update_by(uby_rolling_prod_time(ts_col = "timeCol", cols = c("col1RollProd = col1", "col2RollProd = col2"), rev_time = "PT5s"))
 
 # compute rolling product of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 th2 <- th$
-  update_by(uby_rolling_prod_time(ts_col="timeCol", cols=c("col1RollProd = col1", "col2RollProd = col2"), rev_time="PT5s", fwd_ticks="PT5s")), by="boolCol")
+  update_by(uby_rolling_prod_time(ts_col = "timeCol", cols = c("col1RollProd = col1", "col2RollProd = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 
 # compute rolling product of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_prod_time(ts_col="timeCol", cols=c("col1RollProd = col1", "col2RollProd = col2"), rev_time="PT0s", fwd_time="PT10s")), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_prod_time(ts_col = "timeCol", cols = c("col1RollProd = col1", "col2RollProd = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_std_tick.Rd b/R/rdeephaven/man/uby_rolling_std_tick.Rd
index b41c629f069..889d7047a8f 100644
--- a/R/rdeephaven/man/uby_rolling_std_tick.Rd
+++ b/R/rdeephaven/man/uby_rolling_std_tick.Rd
@@ -44,18 +44,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -64,16 +67,16 @@ th <- client$import_table(df)
 
 # compute rolling standard deviation of col1 and col2, using the previous 5 rows and current row
 th1 <- th$
-  update_by(uby_rolling_std_tick(cols=c("col1RollStd = col1", "col2RollStd = col2"), rev_ticks=6))
+  update_by(uby_rolling_std_tick(cols = c("col1RollStd = col1", "col2RollStd = col2"), rev_ticks = 6))
 
 # compute rolling standard deviation of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 th2 <- th$
-  update_by(uby_rolling_std_tick(cols=c("col1RollStd = col1", "col2RollStd = col2"), rev_ticks=6, fwd_ticks=5), by="boolCol")
+  update_by(uby_rolling_std_tick(cols = c("col1RollStd = col1", "col2RollStd = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 
 # compute rolling standard deviation of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_std_tick(cols=c("col1RollStd = col1", "col2RollStd = col2"), rev_ticks=1, fwd_ticks=10), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_std_tick(cols = c("col1RollStd = col1", "col2RollStd = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_std_time.Rd b/R/rdeephaven/man/uby_rolling_std_time.Rd
index 4dab0224e20..88d1a0e105b 100644
--- a/R/rdeephaven/man/uby_rolling_std_time.Rd
+++ b/R/rdeephaven/man/uby_rolling_std_time.Rd
@@ -43,18 +43,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -63,16 +66,16 @@ th <- client$import_table(df)
 
 # compute rolling standard deviation of col1 and col2, using the previous 5 seconds
 th1 <- th$
-  update_by(uby_rolling_std_time(ts_col="timeCol", cols=c("col1RollStd = col1", "col2RollStd = col2"), rev_time="PT5s"))
+  update_by(uby_rolling_std_time(ts_col = "timeCol", cols = c("col1RollStd = col1", "col2RollStd = col2"), rev_time = "PT5s"))
 
 # compute rolling standard deviation of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 th2 <- th$
-  update_by(uby_rolling_std_time(ts_col="timeCol", cols=c("col1RollStd = col1", "col2RollStd = col2"), rev_time="PT5s", fwd_ticks="PT5s"), by="boolCol")
+  update_by(uby_rolling_std_time(ts_col = "timeCol", cols = c("col1RollStd = col1", "col2RollStd = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 
 # compute rolling standard deviation of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_std_time(ts_col="timeCol", cols=c("col1RollStd = col1", "col2RollStd = col2"), rev_time="PT0s", fwd_time="PT10s"), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_std_time(ts_col = "timeCol", cols = c("col1RollStd = col1", "col2RollStd = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_sum_tick.Rd b/R/rdeephaven/man/uby_rolling_sum_tick.Rd
index 31df03a4118..ccf14d79d31 100644
--- a/R/rdeephaven/man/uby_rolling_sum_tick.Rd
+++ b/R/rdeephaven/man/uby_rolling_sum_tick.Rd
@@ -44,18 +44,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -64,16 +67,16 @@ th <- client$import_table(df)
 
 # compute rolling sum of col1 and col2, using the previous 5 rows and current row
 th1 <- th$
-  update_by(uby_rolling_sum_tick(cols=c("col1RollSum = col1", "col2RollSum = col2"), rev_ticks=6))
+  update_by(uby_rolling_sum_tick(cols = c("col1RollSum = col1", "col2RollSum = col2"), rev_ticks = 6))
 
 # compute rolling sum of col1 and col2, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 th2 <- th$
-  update_by(uby_rolling_sum_tick(cols=c("col1RollSum = col1", "col2RollSum = col2"), rev_ticks=6, fwd_ticks=5)), by="boolCol")
+  update_by(uby_rolling_sum_tick(cols = c("col1RollSum = col1", "col2RollSum = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 
 # compute rolling sum of col1 and col2, grouped by boolCol and parity of col3, using current row and following 10 rows
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_sum_tick(cols=c("col1RollSum = col1", "col2RollSum = col2"), rev_ticks=1, fwd_ticks=10)), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_sum_tick(cols = c("col1RollSum = col1", "col2RollSum = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_sum_time.Rd b/R/rdeephaven/man/uby_rolling_sum_time.Rd
index b8e028ebc5b..1111e4dd923 100644
--- a/R/rdeephaven/man/uby_rolling_sum_time.Rd
+++ b/R/rdeephaven/man/uby_rolling_sum_time.Rd
@@ -43,18 +43,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -63,16 +66,16 @@ th <- client$import_table(df)
 
 # compute rolling sum of col1 and col2, using the previous 5 seconds
 th1 <- th$
-  update_by(uby_rolling_sum_time(ts_col="timeCol", cols=c("col1RollSum = col1", "col2RollSum = col2"), rev_time="PT5s"))
+  update_by(uby_rolling_sum_time(ts_col = "timeCol", cols = c("col1RollSum = col1", "col2RollSum = col2"), rev_time = "PT5s"))
 
 # compute rolling sum of col1 and col2, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 th2 <- th$
-  update_by(uby_rolling_sum_time(ts_col="timeCol", cols=c("col1RollSum = col1", "col2RollSum = col2"), rev_time="PT5s", fwd_ticks="PT5s")), by="boolCol")
+  update_by(uby_rolling_sum_time(ts_col = "timeCol", cols = c("col1RollSum = col1", "col2RollSum = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 
 # compute rolling sum of col1 and col2, grouped by boolCol and parity of col3, using following 10 seconds
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_sum_time(ts_col="timeCol", cols=c("col1RollSum = col1", "col2RollSum = col2"), rev_time="PT0s", fwd_time="PT10s")), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_sum_time(ts_col = "timeCol", cols = c("col1RollSum = col1", "col2RollSum = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_wavg_tick.Rd b/R/rdeephaven/man/uby_rolling_wavg_tick.Rd
index b4c2df54117..98c72197788 100644
--- a/R/rdeephaven/man/uby_rolling_wavg_tick.Rd
+++ b/R/rdeephaven/man/uby_rolling_wavg_tick.Rd
@@ -46,18 +46,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -66,16 +69,16 @@ th <- client$import_table(df)
 
 # compute rolling weighted average of col1 and col2, weighted by col3, using the previous 5 rows and current row
 th1 <- th$
-  update_by(uby_rolling_wavg_tick(wcol="col3", cols=c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_ticks=6))
+  update_by(uby_rolling_wavg_tick(wcol = "col3", cols = c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_ticks = 6))
 
 # compute rolling weighted average of col1 and col2, weighted by col3, grouped by boolCol, using previous 5 rows, current row, and following 5 rows
 th2 <- th$
-  update_by(uby_rolling_wavg_tick(wcol="col3", cols=c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_ticks=6, fwd_ticks=5), by="boolCol")
+  update_by(uby_rolling_wavg_tick(wcol = "col3", cols = c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_ticks = 6, fwd_ticks = 5), by = "boolCol")
 
 # compute rolling weighted average of col1 and col2, weighted by col3, grouped by boolCol and parity of col3, using current row and following 10 rows
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_wavg_tick(wcol="col3", cols=c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_ticks=1, fwd_ticks=10), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_wavg_tick(wcol = "col3", cols = c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_ticks = 1, fwd_ticks = 10), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/man/uby_rolling_wavg_time.Rd b/R/rdeephaven/man/uby_rolling_wavg_time.Rd
index a8dcc5f98ae..09cb4269eda 100644
--- a/R/rdeephaven/man/uby_rolling_wavg_time.Rd
+++ b/R/rdeephaven/man/uby_rolling_wavg_time.Rd
@@ -45,18 +45,21 @@ This function, like other Deephaven \code{uby} functions, is a generator functio
 function called an \code{\link{UpdateByOp}} intended to be used in a call to \code{update_by()}. This detail is typically
 hidden from the user. However, it is important to understand this detail for debugging purposes, as the output of
 a \code{uby} function can otherwise seem unexpected.
+
+For more information, see the vignette on \code{uby} functions by running
+\code{vignette("update_by")}.
 }
 \examples{
 \dontrun{
 library(rdeephaven)
 
 # connecting to Deephaven server
-client <- Client$new("localhost:10000", auth_type="psk", auth_token="my_secret_token")
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
 
 # create data frame, push to server, retrieve TableHandle
 df <- data.frame(
   timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
-  boolCol = sample(c(TRUE,FALSE), 500, TRUE),
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
   col1 = sample(10000, size = 500, replace = TRUE),
   col2 = sample(10000, size = 500, replace = TRUE),
   col3 = 1:500
@@ -65,16 +68,16 @@ th <- client$import_table(df)
 
 # compute rolling weighted average of col1 and col2, weighted by col3, using the previous 5 seconds
 th1 <- th$
-  update_by(uby_rolling_wavg_time(ts_col="timeCol", wcol="col3", cols=c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_time="PT5s"))
+  update_by(uby_rolling_wavg_time(ts_col = "timeCol", wcol = "col3", cols = c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_time = "PT5s"))
 
 # compute rolling weighted average of col1 and col2, weighted by col3, grouped by boolCol, using previous 5 seconds, and following 5 seconds
 th2 <- th$
-  update_by(uby_rolling_wavg_time(ts_col="timeCol", wcol="col3", cols=c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_time="PT5s", fwd_ticks="PT5s"), by="boolCol")
+  update_by(uby_rolling_wavg_time(ts_col = "timeCol", wcol = "col3", cols = c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_time = "PT5s", fwd_ticks = "PT5s"), by = "boolCol")
 
 # compute rolling weighted average of col1 and col2, weighted by col3, grouped by boolCol and parity of col3, using following 10 seconds
 th3 <- th$
   update("col3Parity = col3 \% 2")$
-  update_by(uby_rolling_wavg_time(ts_col="timeCol", wcol="col3", cols=c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_time="PT0s", fwd_time="PT10s"), by=c("boolCol", "col3Parity"))
+  update_by(uby_rolling_wavg_time(ts_col = "timeCol", wcol = "col3", cols = c("col1RollWAvg = col1", "col2RollWAvg = col2"), rev_time = "PT0s", fwd_time = "PT10s"), by = c("boolCol", "col3Parity"))
 
 client$close()
 }
diff --git a/R/rdeephaven/pkgdown/_pkgdown.yml b/R/rdeephaven/pkgdown/_pkgdown.yml
new file mode 100644
index 00000000000..14c104a8548
--- /dev/null
+++ b/R/rdeephaven/pkgdown/_pkgdown.yml
@@ -0,0 +1,47 @@
+---
+url: https://deephaven.io/core/rdoc/
+
+template:
+  bootstrap: 5
+  bootswatch: cosmo
+  
+navbar:
+  structure:
+    left:  [reference, articles]
+    right: [search, github]
+  components:
+    articles:
+      text: Articles
+      menu:
+      - text: Overview
+        href: articles/rdeephaven.html
+      - text: Aggregations with agg_by
+        href: articles/agg_by.html
+      - text: Moving Operations with update_by
+        href: articles/update_by.html
+  
+reference:
+  - title: Core API
+    contents:
+    - Client
+    - TableHandle
+    - AggOp
+    - UpdateByOp
+
+  - title: AggBy Operations
+    contents:
+    - starts_with("agg_")
+  
+  - title: UpdateBy Operations
+    contents:
+    - starts_with("uby_")
+
+  - title: Utility Functions
+    contents:
+    - merge_tables
+    - op_control
+  
+  - title: Other Data Types
+    contents:
+    - OperationControl
+
diff --git a/R/rdeephaven/vignettes/.gitignore b/R/rdeephaven/vignettes/.gitignore
new file mode 100644
index 00000000000..097b241637d
--- /dev/null
+++ b/R/rdeephaven/vignettes/.gitignore
@@ -0,0 +1,2 @@
+*.html
+*.R
diff --git a/R/rdeephaven/vignettes/agg_by.Rmd b/R/rdeephaven/vignettes/agg_by.Rmd
new file mode 100644
index 00000000000..0fd5bab3f38
--- /dev/null
+++ b/R/rdeephaven/vignettes/agg_by.Rmd
@@ -0,0 +1,120 @@
+---
+title: "Aggregations with agg_by"
+output: rmarkdown::html_vignette
+vignette: >
+  %\VignetteIndexEntry{agg_by}
+  %\VignetteEngine{knitr::rmarkdown}
+  %\VignetteEncoding{UTF-8}
+---
+
+```{r, include = FALSE}
+knitr::opts_chunk$set(
+  collapse = TRUE,
+  comment = "#>"
+)
+```
+
+Table aggregations are a quintessential feature of Deephaven. You can apply as
+many aggregations as needed to static tables _or_ streaming tables, and if the
+parent tables are streaming, the resulting aggregated tables will update
+alongside their parent tables. It is also very easy to perform _grouped_
+aggregations, which allow you to aggregate tables on a per-group basis.
+
+## Applying aggregations to a table
+
+There are two methods for performing aggregations on a table, `agg_by()` and
+`agg_all_by()`. `agg_by()` allows you to perform many aggregations on specified
+columns, while `agg_all_by()` allows you to perform a single aggregation to
+every non-grouping column in the table. Both methods have an optional `by`
+parameter that is used to specify grouping columns. Here are some details on
+each method:
+
+- `TableHandle$agg_by(aggs, by)`: Creates a new table containing grouping
+  columns and grouped data.
+  The resulting grouped data is defined by the aggregation(s) specified.
+- `TableHandle$agg_all_by(agg, by)`: Creates a new table containing grouping
+  columns and grouped data. The resulting grouped data is defined by the
+  aggregation specified. This method applies the aggregation to all non-grouping
+  columns of the table, so it can only accept one aggregation at a time.
+
+The `agg_by()` and `agg_all_by()` methods themselves do not know anything about
+the columns on which you want to perform aggregations. Rather, the desired
+columns are passed to individual `agg` functions, enabling you to apply various
+kinds of aggregations to different columns or groups of columns as needed.
+
+## `agg` functions
+
+`agg` functions are used to perform aggregation calculations on grouped data by
+passing them to `agg_by()` or `agg_all_by()`. These functions are _generators_,
+meaning they return _functions_ that the Deephaven engine knows how to
+interpret. We call the functions that they return `AggOp`s. See `?AggOp` for
+more information. These AggOps are not R-level functions, but
+Deephaven-specific data types that perform all of the intensive calculations.
+Here is a list of all `agg` functions available in Deephaven:
+
+- `agg_first()`
+- `agg_last()`
+- `agg_min()`
+- `agg_max()`
+- `agg_sum()`
+- `agg_abs_sum()`
+- `agg_avg()`
+- `agg_w_avg()`
+- `agg_median()`
+- `agg_var()`
+- `agg_std()`
+- `agg_percentile()`
+- `agg_count()`
+
+For more details on each aggregation function, see the reference documentation
+by running `?agg_first`, `?agg_last`, etc.
+
+## An Example
+```{r, eval=FALSE}
+library(rdeephaven)
+
+# connecting to Deephaven server
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
+
+# create data frame, push to server, retrieve TableHandle
+df <- data.frame(
+  X = c("A", "B", "A", "C", "B", "A", "B", "B", "C"),
+  Y = c("M", "N", "O", "N", "P", "M", "O", "P", "M"),
+  Number1 = c(100, -44, 49, 11, -66, 50, 29, 18, -70),
+  Number2 = c(-55, 76, 20, 130, 230, -50, 73, 137, 214)
+)
+th <- client$import_table(df)
+
+# get first and last elements of each column
+th1 <- th$
+  agg_by(
+  agg_first(c("XFirst = X", "YFirst = Y", "Number1First = Number1", "Number2First = Number2")),
+  agg_last(c("XLast = X", "YLast = Y", "Number1Last = Number1", "Number2Last = Number2"))
+)
+
+# compute mean and standard deviation of Number1 and Number2, grouped by X
+th2 <- th$
+  agg_by(
+  c(
+    agg_avg(c("Number1Avg = Number1", "Number2Avg = Number2")),
+    agg_std(c("Number1Std = Number1", "Number2Std = Number2"))
+  ),
+  by = "X"
+)
+
+# compute maximum of all non-grouping columns, grouped by X and Y
+th3 <- th$
+  agg_all_by(agg_max(), by = c("X", "Y"))
+
+# compute minimum and maximum of Number1 and Number2 respectively grouped by Y
+th4 <- th$
+  agg_by(
+  c(
+    agg_min("Number1Min = Number1"),
+    agg_max("Number2Max = Number2")
+  ),
+  by = "Y"
+)
+
+client$close()
+```
diff --git a/R/rdeephaven/vignettes/rdeephaven.Rmd b/R/rdeephaven/vignettes/rdeephaven.Rmd
new file mode 100644
index 00000000000..b19df258a6c
--- /dev/null
+++ b/R/rdeephaven/vignettes/rdeephaven.Rmd
@@ -0,0 +1,130 @@
+---
+title: "Overview of the Deephaven Core R Client"
+output: rmarkdown::html_vignette
+vignette: >
+  %\VignetteIndexEntry{rdeephaven}
+  %\VignetteEngine{knitr::rmarkdown}
+  %\VignetteEncoding{UTF-8}
+---
+
+```{r, include = FALSE}
+knitr::opts_chunk$set(
+  collapse = TRUE,
+  comment = "#>"
+)
+```
+
+The Deephaven Core R Client provides an R interface to Deephaven's powerful
+real-time data engine, [_Deephaven Core_](https://deephaven.io/community/).
+To use this package, you must have a Deephaven server running and be able to 
+connect to it. For more information on how to set up a Deephaven server, see the 
+documentation [here](https://deephaven.io/core/docs/tutorials/quickstart/).
+
+## Building blocks of the Deephaven R Client
+
+There are two primary R classes that make up the Deephaven R Client, the `Client` 
+class and the `TableHandle` class. See `?Client` and `?TableHandle` for more
+information. The Client class is used to establish a connection to the
+Deephaven server with its constructor `Client$new()`, and to send server 
+requests, such as running a script via `run_script()`, or pushing local data to 
+the server via `import_table()`. Basic usage of the Client class may look 
+something like this:
+
+```{r, eval=FALSE}
+library(rdeephaven)
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
+```
+
+Many of these server requests end up creating or modifying tables that live on 
+the server. To keep track of these tables, the R client retrieves references to 
+them, and wraps these references in TableHandle objects. These TableHandles 
+have a host of methods that mirror server-side table operations, such as
+`head()`, `tail()`, `update_by()`, and so on. So, you can typically use
+TableHandles _as if_ they are tables themselves, and all of the corresponding
+methods that you call on them will be executed on the server. Here is a simple
+example of pushing data to the server, retrieving a TableHandle to the resulting
+table, and applying some basic table operations to the table:
+
+```{r, eval=FALSE}
+df1 <- data.frame(x = 1:10, y = 11:20)
+th1 <- client$import_table(df1)
+
+th2 <- th1$
+  update("z = x + y")$
+  where("z % 4 == 0")
+```
+
+TableHandles also support common functional methods for converting server-side 
+Deephaven tables to R objects stored in local memory such as `as.data.frame()`, 
+`as_tibble()`, and `as_arrow_table()`. Here's an example of converting the table 
+created above to an R data frame and verifying that other functional methods
+work as expected:
+
+```{r, eval=FALSE}
+df2 <- as.data.frame(th2)
+
+print(nrow(th2) == nrow(df2))
+print(ncol(th2) == ncol(df2))
+print(dim(th2) == dim(df2))
+print(all(as.data.frame(head(th2, 2)) == head(df2, 2)))
+print(all(as.data.frame(tail(th2, 2)) == tail(df2, 2)))
+```
+
+For more information on these classes and all of their methods, see the
+reference documentation by running `?Client` or `?TableHandle`.
+
+## Real-time data analysis
+
+Since TableHandles are references to tables living on the Deephaven server, they 
+may refer to streaming tables, or tables that are receiving new data
+periodically(typically once per second). Here's a simple example of creating a
+table that adds a new row every second:
+
+```{r, eval=FALSE}
+th3 <- client$time_table("PT1s")$
+  update(c("X = ii", "Y = sin(X)"))
+```
+
+R objects like data frames or Dplyr tibbles do not have this streaming property 
+- they are always static objects stored in memory. However, a TableHandle 
+referring to a streaming table may be converted to a data frame or tibble at any 
+time, and the resulting object will be a snapshot of the table at the time of 
+conversion. This means that you can use the Deephaven R Client to perform
+real-time data analysis on streaming data! Here, we make a simple plot of the
+ticking table, and call it three times to demonstrate the dynamic nature of the
+table:
+
+```{r, eval=FALSE}
+plot(as.data.frame(th3)$X, as.data.frame(th3)$Y, type = "l")
+Sys.sleep(5)
+plot(as.data.frame(th3)$X, as.data.frame(th3)$Y, type = "l")
+Sys.sleep(5)
+plot(as.data.frame(th3)$X, as.data.frame(th3)$Y, type = "l")
+```
+
+There are performance and memory considerations when pulling data from the
+server, so it is best to use the provided TableHandle methods to perform as much
+of your analysis as possible on the server, and to only pull the data when
+something _must_ be done in R, like plotting or writing to a local file.
+
+## Powerful table operations
+
+Much of the power of Deephaven's suite of table operations is achieved through 
+the use of the `update_by()` and `agg_by()` methods. These table methods are
+important enough to warrant their own documentation pages, accessible by running
+`vignette("update_by")` or `vignette("agg_by")`. These methods
+come with their own suites of functions, prefixed with `agg_` and `uby_`
+respectively, that are listed in their documentation pages. Running
+`ls("package:rdeephaven")` will reveal that most of the functions included in
+this package are for these methods, so it is important to get acquainted with
+them.
+
+## Getting help
+While we've done our best to provide good documentation for this package, you
+may find you need more help than what this documentation has to offer. Please
+visit the official Deephaven Community Core
+[documentation](https://deephaven.io/core/docs/tutorials/quickstart/) to learn
+more about Deephaven and to find comprehensive examples. Additionally, feel free
+to reach out to us on the Deephaven
+[Community Slack channel](https://deephaven.io/slack) with any questions. We
+hope you find real-time data analysis in R to be as easy as possible.
diff --git a/R/rdeephaven/vignettes/update_by.Rmd b/R/rdeephaven/vignettes/update_by.Rmd
new file mode 100644
index 00000000000..2704b46fed0
--- /dev/null
+++ b/R/rdeephaven/vignettes/update_by.Rmd
@@ -0,0 +1,133 @@
+---
+title: "Moving Operations with update_by"
+output: rmarkdown::html_vignette
+vignette: >
+  %\VignetteIndexEntry{update_by}
+  %\VignetteEngine{knitr::rmarkdown}
+  %\VignetteEncoding{UTF-8}
+---
+
+```{r, include = FALSE}
+knitr::opts_chunk$set(
+  collapse = TRUE,
+  comment = "#>"
+)
+```
+
+Deephaven's `update_by()` table method and suite of `uby` functions enable
+cumulative and moving calculations on static _and_ streaming tables. Complex
+operations like cumulative minima and maxima, exponential moving averages, and
+rolling standard deviations are all possible and effortless to execute. As
+always in Deephaven, the results of these calculations will continue to update
+as their parent tables are updated. Additionally, it's easy to group data by one
+or more columns, enabling complex group-wise calculations with a single line of
+code.
+
+## Applying UpdateBy operations to a table
+
+The table method `update_by()` is the entry point for UpdateBy operations. It
+takes two arguments: the first is an `UpdateByOp` or a list of
+`UpdateByOp`s denoting the calculations to perform on specific columns of the
+table. Then, it takes a column name or a list of column names that define the
+groups on which to perform the calculations. If you don't want grouped
+calculations, omit this argument.
+
+To learn more about UpdateByOps, see the reference documentation with
+`?UpdateByOp`.
+
+The `update_by()` method itself does not know anything about the columns on
+which you want to perform calculations. Rather, the desired columns are passed
+to individual `uby` functions, enabling a massive amount of flexibility.
+
+## `uby` functions
+
+`uby` functions are the workers that actually execute the complex UpdateBy
+calculations. These functions are _generators_, meaning they return _functions_
+that the Deephaven engine knows how to interpret. We call the functions they
+return `UpdateByOp`s. See `?UpdateByOp` for more information. These UpdateByOps
+are not R-level functions, but Deephaven-specific data types that perform all of
+the intensive calculations. Here is a list of all `uby` functions available in
+Deephaven:
+
+- `uby_cum_min()`
+- `uby_cum_max()`
+- `uby_cum_sum()`
+- `uby_cum_prod()`
+- `uby_forward_fill()`
+- `uby_delta()`
+- `uby_emmin_tick()`
+- `uby_emmin_time()`
+- `uby_emmax_tick()`
+- `uby_emmax_time()`
+- `uby_ems_tick()`
+- `uby_ems_time()`
+- `uby_ema_tick()`
+- `uby_ema_time()`
+- `uby_emstd_tick()`
+- `uby_emstd_time()`
+- `uby_rolling_count_tick()`
+- `uby_rolling_count_time()`
+- `uby_rolling_group_tick()`
+- `uby_rolling_group_time()`
+- `uby_rolling_min_tick()`
+- `uby_rolling_min_time()`
+- `uby_rolling_max_tick()`
+- `uby_rolling_max_time()`
+- `uby_rolling_sum_tick()`
+- `uby_rolling_sum_time()`
+- `uby_rolling_prod_tick()`
+- `uby_rolling_prod_time()`
+- `uby_rolling_avg_tick()`
+- `uby_rolling_avg_time()`
+- `uby_rolling_wavg_tick()`
+- `uby_rolling_wavg_time()`
+- `uby_rolling_std_tick()`
+- `uby_rolling_std_time()`
+
+For more details on each aggregation function, see the reference documentation
+by running `?uby_cum_min`, `?uby_delta`, etc.
+
+## An Example
+```{r, eval=FALSE}
+library(rdeephaven)
+
+# connecting to Deephaven server
+client <- Client$new("localhost:10000", auth_type = "psk", auth_token = "my_secret_token")
+
+# create data frame, push to server, retrieve TableHandle
+df <- data.frame(
+  timeCol = seq.POSIXt(as.POSIXct(Sys.Date()), as.POSIXct(Sys.Date() + 0.01), by = "1 sec")[1:500],
+  boolCol = sample(c(TRUE, FALSE), 500, TRUE),
+  col1 = sample(10000, size = 500, replace = TRUE),
+  col2 = sample(10000, size = 500, replace = TRUE),
+  col3 = 1:500
+)
+th <- client$import_table(df)
+
+# compute 10-row exponential weighted moving average of col1 and col2, grouped by boolCol
+th1 <- th$
+  update_by(uby_ema_tick(decay_ticks = 10, cols = c("col1Ema = col1", "col2Ema = col2")), by = "boolCol")
+
+# compute rolling 10-second weighted average and standard deviation of col1 and col2, weighted by col3
+th2 <- th$
+  update_by(
+  c(
+    uby_rolling_wavg_time(ts_col = "timeCol", wcol = "col3", cols = c("col1WAvg = col1", "col2WAvg = col2"), rev_time = "PT10s"),
+    uby_rolling_std_time(ts_col = "timeCol", cols = c("col1Std = col1", "col2Std = col2"), rev_time = "PT10s")
+  )
+)
+
+# compute cumulative minimum and maximum of col1 and col2 respectively, and the rolling 20-row sum of col3, grouped by boolCol
+th3 <- th$
+  update_by(
+  c(
+    uby_cum_min(cols = "col1"),
+    uby_cum_max(cols = "col2"),
+    uby_rolling_sum_tick(cols = "col3", rev_ticks = 20)
+  ),
+  by = "boolCol"
+)
+
+client$close()
+}
+```
\ No newline at end of file
diff --git a/docker/registry/cpp-clients-multi-base/gradle.properties b/docker/registry/cpp-clients-multi-base/gradle.properties
index 11c131793a6..0e9b923c556 100644
--- a/docker/registry/cpp-clients-multi-base/gradle.properties
+++ b/docker/registry/cpp-clients-multi-base/gradle.properties
@@ -1,4 +1,4 @@
 io.deephaven.project.ProjectType=DOCKER_REGISTRY
 deephaven.registry.imageName=ghcr.io/deephaven/cpp-clients-multi-base:latest
-deephaven.registry.imageId=ghcr.io/deephaven/cpp-clients-multi-base@sha256:e9d3cbd9cf5f95162e559a8e4b78fec20ddbc559048d8b0167db161f8b748d55
+deephaven.registry.imageId=ghcr.io/deephaven/cpp-clients-multi-base@sha256:f4797606e1a2e06349289f0cd9a73f310d37f4509b10b1637ec62589a8d05b12
 deephaven.registry.platform=linux/amd64