From 770ef0002369f55239927202454ec580d290d2a3 Mon Sep 17 00:00:00 2001 From: Edgar Ruiz <77294576+edgararuiz@users.noreply.github.com> Date: Wed, 2 Oct 2024 20:16:18 -0500 Subject: [PATCH] Adds Databricks integration (#839) (#841) * Ports host and token functions * Starts board_databricks * Starts pin list * Removes pipes * Centralices content retreival adds pin_exists * Starts pin_meta function * Simplifies arguments, renames token and host functions * First pass at pin_store * Fixex hashed subfolder * Adds pin_versions * Improvements to cache path * Adds download file helper * Adds download step to meta, fixes cache discovery * Adds pin_fetch * Adds pin_delete, and all supporting functions * Assignes proper file rights to local cache * Passes all tests * Adds board_deparse * Adds required_pkgs * Starts testing * Avoids failing when checking contents of a folder, needed for prefix * Passes all tests * Fixes a pkg check finding * Starts documentation * Completes documentation * Adds NEWS item * Small fix to documentation, adds some instructions to tests * Properly handles lack of host or token * Fixes pkg_down failure, address oldrel-4 issue by reverting to older mode of `purrr`, and improves some tests * More consistent filename * More consistent filename * Edits to docs * Redocument * Update R/board_databricks.R Co-authored-by: Julia Silge * Update R/board_databricks.R Co-authored-by: Julia Silge * Removes reference to bucket, and re-documents * Little more doc refining * Try running tests in CI * Update snapshot --------- Co-authored-by: Julia Silge --- .github/workflows/check-boards.yaml | 3 + DESCRIPTION | 1 + NAMESPACE | 11 + NEWS.md | 2 + R/board_databricks.R | 396 ++++++++++++++++++++++ _pkgdown.yml | 1 + man/board_databricks.Rd | 83 +++++ man/required_pkgs.pins_board.Rd | 7 +- tests/testthat/_snaps/board_databricks.md | 14 + tests/testthat/test-board_databricks.R | 30 ++ 10 files changed, 546 insertions(+), 2 deletions(-) create mode 100644 R/board_databricks.R create mode 100644 man/board_databricks.Rd create mode 100644 tests/testthat/_snaps/board_databricks.md create mode 100644 tests/testthat/test-board_databricks.R diff --git a/.github/workflows/check-boards.yaml b/.github/workflows/check-boards.yaml index 576e1a3b..6dc7a086 100644 --- a/.github/workflows/check-boards.yaml +++ b/.github/workflows/check-boards.yaml @@ -22,6 +22,9 @@ jobs: PINS_AWS_ACCESS_KEY: ${{ secrets.PINS_AWS_ACCESS_KEY}} PINS_AWS_SECRET_ACCESS_KEY: ${{ secrets.PINS_AWS_SECRET_ACCESS_KEY}} PINS_GCS_PASSWORD: ${{ secrets.PINS_GCS_PASSWORD }} + DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} + DATABRICKS_HOST: "https://rstudio-partner-posit-default.cloud.databricks.com" + PINS_DATABRICKS_FOLDER_URL: "/Volumes/workshops/models/vetiver" # needed for legacy board tests TEST_AWS_BUCKET: "pins-github-tests" diff --git a/DESCRIPTION b/DESCRIPTION index 8e0286ef..c9f86930 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -48,6 +48,7 @@ Suggests: gitcreds, googleCloudStorageR, googledrive, + httr2, ids, knitr, Microsoft365R, diff --git a/NAMESPACE b/NAMESPACE index ad21f4e7..7173a25c 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -9,6 +9,7 @@ S3method(board_browse,pins_board_local) S3method(board_deparse,pins_board) S3method(board_deparse,pins_board_azure) S3method(board_deparse,pins_board_connect) +S3method(board_deparse,pins_board_databricks) S3method(board_deparse,pins_board_folder) S3method(board_deparse,pins_board_gcs) S3method(board_deparse,pins_board_kaggle_competition) @@ -56,6 +57,7 @@ S3method(pin,data.frame) S3method(pin,default) S3method(pin_delete,pins_board_azure) S3method(pin_delete,pins_board_connect) +S3method(pin_delete,pins_board_databricks) S3method(pin_delete,pins_board_folder) S3method(pin_delete,pins_board_gcs) S3method(pin_delete,pins_board_gdrive) @@ -66,6 +68,7 @@ S3method(pin_delete,pins_board_s3) S3method(pin_delete,pins_board_url) S3method(pin_exists,pins_board_azure) S3method(pin_exists,pins_board_connect) +S3method(pin_exists,pins_board_databricks) S3method(pin_exists,pins_board_folder) S3method(pin_exists,pins_board_gcs) S3method(pin_exists,pins_board_gdrive) @@ -76,6 +79,7 @@ S3method(pin_exists,pins_board_s3) S3method(pin_exists,pins_board_url) S3method(pin_fetch,pins_board_azure) S3method(pin_fetch,pins_board_connect) +S3method(pin_fetch,pins_board_databricks) S3method(pin_fetch,pins_board_folder) S3method(pin_fetch,pins_board_gcs) S3method(pin_fetch,pins_board_gdrive) @@ -86,6 +90,7 @@ S3method(pin_fetch,pins_board_s3) S3method(pin_fetch,pins_board_url) S3method(pin_list,pins_board_azure) S3method(pin_list,pins_board_connect) +S3method(pin_list,pins_board_databricks) S3method(pin_list,pins_board_folder) S3method(pin_list,pins_board_gcs) S3method(pin_list,pins_board_gdrive) @@ -101,6 +106,7 @@ S3method(pin_load,package) S3method(pin_load,table) S3method(pin_meta,pins_board_azure) S3method(pin_meta,pins_board_connect) +S3method(pin_meta,pins_board_databricks) S3method(pin_meta,pins_board_folder) S3method(pin_meta,pins_board_gcs) S3method(pin_meta,pins_board_gdrive) @@ -115,6 +121,7 @@ S3method(pin_search,pins_board_kaggle_competition) S3method(pin_search,pins_board_kaggle_dataset) S3method(pin_store,pins_board_azure) S3method(pin_store,pins_board_connect) +S3method(pin_store,pins_board_databricks) S3method(pin_store,pins_board_folder) S3method(pin_store,pins_board_gcs) S3method(pin_store,pins_board_gdrive) @@ -126,6 +133,7 @@ S3method(pin_store,pins_board_url) S3method(pin_version_delete,pins_board) S3method(pin_version_delete,pins_board_azure) S3method(pin_version_delete,pins_board_connect) +S3method(pin_version_delete,pins_board_databricks) S3method(pin_version_delete,pins_board_folder) S3method(pin_version_delete,pins_board_gcs) S3method(pin_version_delete,pins_board_gdrive) @@ -135,6 +143,7 @@ S3method(pin_version_delete,pins_board_url) S3method(pin_versions,pins_board) S3method(pin_versions,pins_board_azure) S3method(pin_versions,pins_board_connect) +S3method(pin_versions,pins_board_databricks) S3method(pin_versions,pins_board_folder) S3method(pin_versions,pins_board_gcs) S3method(pin_versions,pins_board_gdrive) @@ -149,6 +158,7 @@ S3method(print,pins_meta) S3method(required_pkgs,pins_board) S3method(required_pkgs,pins_board_azure) S3method(required_pkgs,pins_board_connect) +S3method(required_pkgs,pins_board_databricks) S3method(required_pkgs,pins_board_gcs) S3method(required_pkgs,pins_board_gdrive) S3method(required_pkgs,pins_board_ms365) @@ -166,6 +176,7 @@ export(board_browse) export(board_cache_path) export(board_connect) export(board_connect_url) +export(board_databricks) export(board_default) export(board_deparse) export(board_deregister) diff --git a/NEWS.md b/NEWS.md index acd2943b..c599a9f3 100644 --- a/NEWS.md +++ b/NEWS.md @@ -6,6 +6,8 @@ ## Other improvements +* Added new board for Databricks Volumes `board_databricks()` (#839, @edgararuiz). + * Added example Python code to pin previews for Posit Connect (#806). * Fixed a bug in how pins with the same name but different owners on Posit Connect were identified (#808). diff --git a/R/board_databricks.R b/R/board_databricks.R new file mode 100644 index 00000000..13a60aff --- /dev/null +++ b/R/board_databricks.R @@ -0,0 +1,396 @@ +#' Use a Databricks Volume as a board +#' +#' Pin data to a [Databricks Volume](https://docs.databricks.com/en/sql/language-manual/sql-ref-volumes.html) +#' +#' # Authentication +#' +#' `board_databricks()` searches for an authentication token in three different +#' places, in this order: +#' - 'DATABRICKS_TOKEN' environment variable +#' - 'CONNECT_DATABRICKS_TOKEN' environment variable +#' - OAuth Databricks token inside the RStudio API +#' +#' In most cases, the authentication will be a Personal Authentication +#' Token ('PAT') that is saved as the 'DATABRICKS_TOKEN' environment variable. +#' To obtain a 'PAT' see: [Databricks personal access token authentication](https://docs.databricks.com/en/dev-tools/auth/pat.html). +#' +#' # Details +#' +#' * The functions in pins do not create a new Databricks Volume. +#' * `board_databricks()` is powered by the httr2 package, which is a +#' suggested dependency of pins (not required for pins in general). If +#' you run into errors when deploying content to a server like +#' or [Connect](https://posit.co/products/enterprise/connect/), +#' add `requireNamespace("httr2")` to your app or document for [automatic +#' dependency discovery](https://docs.posit.co/connect/user/troubleshooting/#render-missing-r-package). + +#' +#' @inheritParams new_board +#' @param folder_url The path to the target folder inside Unity Catalog. The path +#' must include the catalog, schema, and volume names, preceded by 'Volumes/', +#' like `"/Volumes/my-catalog/my-schema/my-volume"`. +#' @param host Your [Workspace Instance URL](https://docs.databricks.com/en/workspace/workspace-details.html#workspace-url). +#' Defaults to `NULL`. If `NULL`, it will search for this URL in two different +#' environment variables, in this order: +#' - 'DATABRICKS_HOST' +#' - 'CONNECT_DATABRICKS_HOST' +#' @param prefix Prefix within the folder that this board will occupy. +#' You can use this to maintain multiple independent pin boards within a single +#' Databricks Volume. Make sure to end with '/', to take advantage of +#' Databricks Volume directory-like handling. +#' @export +#' @examples +#' \dontrun{ +#' board <- board_databricks("/Volumes/my-catalog/my-schema/my-volume") +#' board %>% pin_write(mtcars) +#' board %>% pin_read("mtcars") +#' +#' # A prefix allows you to have multiple independent boards in the same folder. +#' project_1 <- board_databricks( +#' folder_url = "/Volumes/my-catalog/my-schema/my-volume", +#' prefix = "project1/" +#' ) +#' project_2 <- board_databricks( +#' folder_url = "/Volumes/my-catalog/my-schema/my-volume", +#' prefix = "project2/" +#' ) +#' } +#' @export +board_databricks <- function( + folder_url, + host = NULL, + prefix = NULL, + versioned = TRUE, + cache = NULL) { + + check_installed("httr2") + + cache_path <- tolower(fs::path("databricks", folder_url, prefix %||% "")) + cache_path <- paste0(strsplit(cache_path, "\\/")[[1]], collapse = "-") + cache <- cache %||% board_cache_path(cache_path) + new_board_v1( + "pins_board_databricks", + name = "databricks", + folder_url = folder_url, + host = as.character(db_get_host(host)), + prefix = prefix, + cache = cache, + versioned = versioned + ) +} + +board_databricks_test <- function(prefix = NULL) { + testthat::skip_if( + db_get_token(fail = FALSE) == "", + message = "No Databricks credentials found" + ) + testthat::skip_if( + db_get_host(fail = FALSE) == "", + message = "No Databricks host defined" + ) + skip_if_missing_envvars( + tests = "board_databricks()", + envvars = c("PINS_DATABRICKS_FOLDER_URL") + ) + board_databricks( + folder_url = Sys.getenv("PINS_DATABRICKS_FOLDER_URL"), + prefix = prefix, + cache = tempfile() + ) +} + +#' @export +pin_list.pins_board_databricks <- function(board, ...) { + db_list_folders(board) +} + +#' @export +pin_exists.pins_board_databricks <- function(board, name, ...) { + name %in% db_list_folders(board) +} + +#' @export +pin_meta.pins_board_databricks <- function(board, name, version = NULL, ...) { + check_pin_exists(board, name) + version <- check_pin_version(board, name, version) + if (!version %in% db_list_folders(board, name)) { + abort_pin_version_missing(version) + } + db_download_file(board, name, version, "data.txt") + path_version <- fs::path(board$cache, board$prefix %||% "", name, version %||% "") + local_meta( + x = read_meta(path_version), + name = name, + dir = path_version, + version = version + ) +} + +#' @export +pin_store.pins_board_databricks <- function(board, name, paths, metadata, + versioned = NULL, x = NULL, ...) { + check_dots_used() + check_pin_name(name) + version <- version_setup( + board = board, + name = name, + new_version = version_name(metadata), + versioned = versioned + ) + version_dir <- fs::path(name, version) + temp_file <- withr::local_tempfile() + yaml::write_yaml(x = metadata, file = temp_file) + db_upload_file( + board = board, + path = temp_file, + name = version_dir, + file_name = "data.txt" + ) + for (path in paths) { + db_upload_file( + board = board, + path = path, + name = version_dir + ) + } + name +} + +#' @export +pin_versions.pins_board_databricks <- function(board, name, ...) { + paths <- db_list_folders(board, name) + version_from_path(paths) +} + +#' @export +pin_fetch.pins_board_databricks <- function(board, name, version = NULL, ...) { + meta <- pin_meta(board, name, version = version) + for (file in meta$file) { + db_download_file(board, name, meta$local$version, file) + } + meta +} + +#' @export +pin_delete.pins_board_databricks <- function(board, names, ...) { + for (name in names) { + check_pin_exists(board, name) + db_delete_pin(board, name) + } + invisible(board) +} + +#' @export +pin_version_delete.pins_board_databricks <- function(board, name, version, ...) { + db_delete_pin(board, fs::path(name, version)) +} + +#' @export +board_deparse.pins_board_databricks <- function(board, ...) { + expr( + board_databricks( + folder_url = !!board$folder_url, + host = !!board$host, + prefix = !!board$prefix, + versioned = !!board$versioned, + cache = !!board$cache + ) + ) +} + +#' @rdname required_pkgs.pins_board +#' @export +required_pkgs.pins_board_databricks <- function(x, ...) { + check_dots_empty() + "httr2" +} + +# Helpers ----------------------------------------------------------------- + +db_upload_file <- function(board, path, name = "", file_name = NULL) { + file_name <- file_name %||% fs::path_file(path) + full_path <- fs::path( + "/api/2.0/fs/files", + board$folder_url, + board$prefix %||% "", + name, + file_name + ) + out <- db_req_init(board, "PUT", full_path) + out <- httr2::req_body_file(out, path) + out <- httr2::req_perform(out) + out +} + +db_download_file <- function(board, name = "", version = "", file_name = "") { + base_path <- fs::path(board$prefix %||% "", name, version) + cache_path <- fs::path(board$cache, base_path) + local_path <- fs::path(cache_path, file_name) + if (fs::file_exists(local_path)) { + return(invisible()) + } + try(fs::dir_create(cache_path)) + full_path <- fs::path("/api/2.0/fs/files", board$folder_url, base_path, file_name) + out <- db_req_init(board, "GET", full_path) + out <- httr2::req_perform(out, path = local_path) + fs::file_chmod(local_path, "u=r") + invisible() +} + +db_delete_pin <- function(board, name) { + files <- db_list_file_paths(board, name) + purrr::walk(files, db_delete_file, board) + dir <- fs::path(name, db_list_folders(board, name)) + purrr::walk(dir, db_delete_folder, board) + db_delete_folder(name, board) + invisible() +} + +db_delete_folder <- function(folder, board) { + full_path <- fs::path( + "/api/2.0/fs/directories", + board$folder_url, + board$prefix %||% "", + folder + ) + out <- db_req_init(board, "DELETE", full_path) + out <- httr2::req_perform(out) + out +} + +db_delete_file <- function(path, board) { + full_path <- fs::path("/api/2.0/fs/files", path) + out <- db_req_init(board, "DELETE", full_path) + out <- httr2::req_perform(out) + out +} + +db_list_file_paths <- function(board, name) { + root_folder <- db_list_folders(board, name) + root_files <- db_list_files(board, name, "") + if (length(root_files) == 0) { + root_files <- NULL + } + if (length(root_folder) == 0 && length(root_files) == 0) { + return(root_folder) + } + out <- purrr::map(root_folder, ~ db_list_files(board, name, .x)) + if (length(out) > 0) { + out <- purrr::reduce(out, c) + } else { + out <- NULL + } + c(out, root_files) +} + +db_list_files <- function(board, name, folder = "") { + out <- db_list_contents(board, fs::path(name, folder)) + out <- purrr::discard(out, ~ .x$is_directory) + out <- purrr::map_chr(out, ~ .x$path) + as.character(out) +} + +db_list_folders <- function(board, path = NULL) { + out <- db_list_contents(board, path) + out <- purrr::keep(out, ~ .x$is_directory) + out <- purrr::map_chr(out, ~ .x$name) + as.character(out) +} + +db_list_contents <- function(board, path = NULL) { + full_path <- fs::path( + "/api/2.0/fs/directories", + board$folder_url, + board$prefix %||% "", + path %||% "" + ) + out <- db_req_init(board, "GET", full_path) + out <- try(httr2::req_perform(out), silent = TRUE) + if (inherits(out, "try-error")) { + cond <- attr(out, "condition") + if (inherits(cond, "httr2_http_404")) { + return(list()) + } else { + return(cond) + } + } + out <- httr2::resp_body_json(out) + purrr::list_flatten(out) +} + +db_req_init <- function(board, method, path) { + host_url <- httr2::url_parse(board$host) + if (is.null(host_url$scheme)) host_url$scheme <- "https" + out <- httr2::url_build(host_url) + out <- httr2::request(out) + out <- httr2::req_method(out, method) + out <- httr2::req_auth_bearer_token(out, db_get_token()) + httr2::req_url_path_append(out, glue(path)) +} + +db_get_host <- function(host = NULL, fail = TRUE) { + if (!is.null(host)) { + return(set_names(host, "argument")) + } + env_host <- Sys.getenv("DATABRICKS_HOST", unset = NA) + connect_host <- Sys.getenv("CONNECT_DATABRICKS_HOST", unset = NA) + if (!is.na(env_host)) { + host <- set_names(env_host, "environment") + } + if (!is.na(connect_host)) { + host <- set_names(connect_host, "environment_connect") + } + if (is.null(host)) { + if (fail) { + cli_abort(c( + paste0( + "No Host URL was provided, and", + "the environment variable 'DATABRICKS_HOST' is not set." + ), + "Please add your Host to 'DATABRICKS_HOST' inside your .Renviron file." + )) + } else { + host <- "" + } + } + host +} + +db_get_token <- function(token = NULL, fail = FALSE) { + if (!is.null(token)) { + return(set_names(token, "argument")) + } + # Checks the Environment Variable + if (is.null(token)) { + env_token <- Sys.getenv("DATABRICKS_TOKEN", unset = NA) + connect_token <- Sys.getenv("CONNECT_DATABRICKS_TOKEN", unset = NA) + if (!is.na(env_token)) { + token <- set_names(env_token, "environment") + } else { + if (!is.na(connect_token)) { + token <- set_names(connect_token, "environment_connect") + } + } + } + # Checks for OAuth Databricks token inside the RStudio API + if (is.null(token) && exists(".rs.api.getDatabricksToken")) { + getDatabricksToken <- get(".rs.api.getDatabricksToken") + token <- set_names(getDatabricksToken(db_get_host()), "oauth") + } + if (is.null(token)) { + if (fail) { + rlang::abort(c( + paste0( + "No authentication token was identified: \n", + " - No 'DATABRICKS_TOKEN' environment variable found \n", + " - No Databricks OAuth token found \n", + " - Not passed as a function argument" + ), + "Please add your Token to 'DATABRICKS_TOKEN' inside your .Renviron file." + )) + } else { + token <- "" + } + } + token +} diff --git a/_pkgdown.yml b/_pkgdown.yml index 59c837d6..b6412094 100644 --- a/_pkgdown.yml +++ b/_pkgdown.yml @@ -38,6 +38,7 @@ reference: - board_azure - board_connect - board_connect_url + - board_databricks - board_gcs - board_gdrive - board_local diff --git a/man/board_databricks.Rd b/man/board_databricks.Rd new file mode 100644 index 00000000..71cdb253 --- /dev/null +++ b/man/board_databricks.Rd @@ -0,0 +1,83 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/board_databricks.R +\name{board_databricks} +\alias{board_databricks} +\title{Use a Databricks Volume as a board} +\usage{ +board_databricks( + folder_url, + host = NULL, + prefix = NULL, + versioned = TRUE, + cache = NULL +) +} +\arguments{ +\item{folder_url}{The path to the target folder inside Unity Catalog. The path +must include the catalog, schema, and volume names, preceded by 'Volumes/', +like \code{"/Volumes/my-catalog/my-schema/my-volume"}.} + +\item{host}{Your \href{https://docs.databricks.com/en/workspace/workspace-details.html#workspace-url}{Workspace Instance URL}. +Defaults to \code{NULL}. If \code{NULL}, it will search for this URL in two different +environment variables, in this order: +\itemize{ +\item 'DATABRICKS_HOST' +\item 'CONNECT_DATABRICKS_HOST' +}} + +\item{prefix}{Prefix within the folder that this board will occupy. +You can use this to maintain multiple independent pin boards within a single +Databricks Volume. Make sure to end with '/', to take advantage of +Databricks Volume directory-like handling.} + +\item{versioned}{Should this board be registered with support for versions?} + +\item{cache}{Cache path. Every board requires a local cache to avoid +downloading files multiple times. The default stores in a standard +cache location for your operating system, but you can override if needed.} +} +\description{ +Pin data to a \href{https://docs.databricks.com/en/sql/language-manual/sql-ref-volumes.html}{Databricks Volume} +} +\section{Authentication}{ +\code{board_databricks()} searches for an authentication token in three different +places, in this order: +\itemize{ +\item 'DATABRICKS_TOKEN' environment variable +\item 'CONNECT_DATABRICKS_TOKEN' environment variable +\item OAuth Databricks token inside the RStudio API +} + +In most cases, the authentication will be a Personal Authentication +Token ('PAT') that is saved as the 'DATABRICKS_TOKEN' environment variable. +To obtain a 'PAT' see: \href{https://docs.databricks.com/en/dev-tools/auth/pat.html}{Databricks personal access token authentication}. +} + +\section{Details}{ +\itemize{ +\item The functions in pins do not create a new Databricks Volume. +\item \code{board_databricks()} is powered by the httr2 package, which is a +suggested dependency of pins (not required for pins in general). If +you run into errors when deploying content to a server like +\url{https://www.shinyapps.io} or \href{https://posit.co/products/enterprise/connect/}{Connect}, +add \code{requireNamespace("httr2")} to your app or document for \href{https://docs.posit.co/connect/user/troubleshooting/#render-missing-r-package}{automatic dependency discovery}. +} +} + +\examples{ +\dontrun{ +board <- board_databricks("/Volumes/my-catalog/my-schema/my-volume") +board \%>\% pin_write(mtcars) +board \%>\% pin_read("mtcars") + +# A prefix allows you to have multiple independent boards in the same folder. +project_1 <- board_databricks( + folder_url = "/Volumes/my-catalog/my-schema/my-volume", + prefix = "project1/" +) +project_2 <- board_databricks( + folder_url = "/Volumes/my-catalog/my-schema/my-volume", + prefix = "project2/" +) +} +} diff --git a/man/required_pkgs.pins_board.Rd b/man/required_pkgs.pins_board.Rd index cb0d6461..e086444e 100644 --- a/man/required_pkgs.pins_board.Rd +++ b/man/required_pkgs.pins_board.Rd @@ -1,10 +1,11 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/board_azure.R, R/board_connect.R, -% R/board_gcs.R, R/board_gdrive.R, R/board_ms365.R, R/board_s3.R, -% R/required_pkgs.R +% R/board_databricks.R, R/board_gcs.R, R/board_gdrive.R, R/board_ms365.R, +% R/board_s3.R, R/required_pkgs.R \name{required_pkgs.pins_board_azure} \alias{required_pkgs.pins_board_azure} \alias{required_pkgs.pins_board_connect} +\alias{required_pkgs.pins_board_databricks} \alias{required_pkgs.pins_board_gcs} \alias{required_pkgs.pins_board_gdrive} \alias{required_pkgs.pins_board_ms365} @@ -16,6 +17,8 @@ \method{required_pkgs}{pins_board_connect}(x, ...) +\method{required_pkgs}{pins_board_databricks}(x, ...) + \method{required_pkgs}{pins_board_gcs}(x, ...) \method{required_pkgs}{pins_board_gdrive}(x, ...) diff --git a/tests/testthat/_snaps/board_databricks.md b/tests/testthat/_snaps/board_databricks.md new file mode 100644 index 00000000..270b045d --- /dev/null +++ b/tests/testthat/_snaps/board_databricks.md @@ -0,0 +1,14 @@ +# can find board required pkgs + + Code + required_pkgs(board) + Output + [1] "httr2" + +--- + + Code + required_pkgs(board) + Output + [1] "httr2" + diff --git a/tests/testthat/test-board_databricks.R b/tests/testthat/test-board_databricks.R new file mode 100644 index 00000000..27f75646 --- /dev/null +++ b/tests/testthat/test-board_databricks.R @@ -0,0 +1,30 @@ +# To run these tests, you will need a Databricks Volume, with read/write access, +# and the following environment variables set: +# PINS_DATABRICKS_FOLDER_URL - The path to your Databricks Volume +# DATABRICKS_HOST - Your Workpace Instance URL +# DATABRICKS_TOKEN - Your PAT +# CI has 200 day token created on 2024-10-02 + +skip_if_not_installed("httr2") +test_that("Deparse works", { + x <- board_databricks( + folder_url = "THIS-IS-A-TEST", + host = "NOT-A-HOST", + cache = "CACHE" + ) + expect_s3_class(x, "pins_board_databricks") + expected_expr <- expr( + board_databricks( + folder_url = "THIS-IS-A-TEST", + host = "NOT-A-HOST", + prefix = NULL, + versioned = TRUE, + cache = "CACHE" + ) + ) + expect_identical(board_deparse(x), expected_expr) +}) +test_api_basic(board_databricks_test()) +test_api_basic(board_databricks_test(prefix = "prefixed/")) +test_api_versioning(board_databricks_test()) +test_api_meta(board_databricks_test())