Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CRAN submission changes #70

Merged
merged 12 commits into from
Nov 14, 2024
6 changes: 2 additions & 4 deletions DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,18 @@ Description: Collection of utilities that improve using 'Databricks' from R.
License: Apache License (>= 2)
Encoding: UTF-8
LazyData: true
Depends:
R (>= 4.1.0)
Imports:
arrow,
base64enc,
cli,
curl,
data.table,
dplyr,
glue,
httr2,
ini,
jsonlite,
magrittr,
purrr,
reticulate,
R6 (>= 2.4.0),
Expand All @@ -42,12 +42,10 @@ Suggests:
testthat (>= 3.0.0),
huxtable,
htmltools,
htmlwidgets,
knitr,
magick,
rmarkdown,
rstudioapi,
rvest,
withr
Roxygen: list(markdown = TRUE)
RoxygenNote: 7.3.2
Expand Down
1 change: 0 additions & 1 deletion NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,6 @@ import(cli)
import(httr2)
import(tibble)
importFrom(glue,glue)
importFrom(magrittr,`%>%`)
importFrom(rlang,.data)
importFrom(stats,setNames)
importFrom(utils,object.size)
10 changes: 0 additions & 10 deletions NOTICE
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,6 @@ Copyright 2021 httr2 authors
jeroen/jsonlite - https://github.com/jeroen/jsonlite
Copyright 2020 Jeroen Ooms

tidyverse/magrittr - https://github.com/tidyverse/magrittr
Copyright 2023 magrittr authors

tidyverse/purrr - https://github.com/tidyverse/purrr
Copyright 2023 purrr authors

Expand All @@ -41,13 +38,6 @@ rstudio/rstudionapi - https://github.com/rstudio/rstudioapi
Copyright 2015 RStudio
tidyverse/rvest - https://github.com/tidyverse/rvest
Copyright 2023 rvest authors

__________
This Software contains code from the following open source projects, licensed under the MPL-2 license:

Rdatatable/data.table - https://github.com/Rdatatable/data.table
Copyright data.table authors

__________
This Software contains code from the following open source projects, licensed under the GPL-2/GPL-3 licenses:

Expand Down
18 changes: 9 additions & 9 deletions R/clusters.R
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ db_cluster_action <- function(cluster_id,
token = token
)

req <- req %>%
req <- req |>
httr2::req_body_json(body)

if (perform_request) {
Expand Down Expand Up @@ -558,12 +558,12 @@ db_cluster_get <- function(cluster_id,
token = token
)

req <- req %>%
req <- req |>
httr2::req_body_json(body)

if (perform_request) {
req %>%
httr2::req_perform() %>%
req |>
httr2::req_perform() |>
httr2::resp_body_json()
} else {
req
Expand Down Expand Up @@ -872,26 +872,26 @@ get_latest_dbr <- function(lts, ml, gpu, photon,

runtimes <- db_cluster_runtime_versions(host = host, token = token)

runtimes_adj <- runtimes[[1]] %>%
runtimes_adj <- runtimes[[1]] |>
purrr::map_dfr(function(x) {
list(key = x[["key"]], name = x[["name"]])
}) %>%
}) |>
dplyr::mutate(
version = as.numeric(gsub("^(\\d+\\.\\d)\\..*", "\\1", .data$key)),
lts = grepl("LTS", .data$name),
ml = grepl("ml", .data$key),
gpu = grepl("gpu", .data$key),
photon = grepl("photon", .data$key),
) %>%
) |>
dplyr::arrange(dplyr::desc(version))

runtime_matches <- runtimes_adj %>%
runtime_matches <- runtimes_adj |>
dplyr::filter(
.data$lts == {{lts}},
.data$ml == {{ml}},
.data$gpu == {{gpu}},
.data$photon == {{photon}}
) %>%
) |>
dplyr::slice_head(n = 1)

list(
Expand Down
6 changes: 3 additions & 3 deletions R/connection-pane.R
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ get_uc_model_versions <- function(catalog, schema, model, host, token,
aliases <- purrr::map(
model_info$aliases, ~{
setNames(.x$version_num, .x$alias_name)
}) %>%
}) |>
unlist()

version_names <- purrr::map_chr(versions, function(x) {
Expand Down Expand Up @@ -281,8 +281,8 @@ get_schema_objects <- function(catalog, schema, host, token) {

# how many objects of each type exist
# only show when objects exist within
sizes <- purrr::map_int(objects, nrow) %>%
purrr::keep(~.x > 0) %>%
sizes <- purrr::map_int(objects, nrow) |>
purrr::keep(~.x > 0) |>
purrr::imap_chr(~ glue::glue("{.y} ({.x})"))

data.frame(
Expand Down
2 changes: 1 addition & 1 deletion R/data-structures.R
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ is.file_storage_info <- function(x) {
#' `bucket-owner-full-control`. If `canned_acl` is set, the cluster instance
#' profile must have `s3:PutObjectAcl` permission on the destination bucket and
#' prefix. The full list of possible canned ACLs can be found in
#' [docs](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl).
#' [docs](https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl).
#' By default only the object owner gets full control. If you are using cross
#' account role for writing data, you may want to set
#' `bucket-owner-full-control` to make bucket owner able to read the logs.
Expand Down
12 changes: 4 additions & 8 deletions R/dbfs.R
Original file line number Diff line number Diff line change
Expand Up @@ -454,14 +454,10 @@ db_dbfs_put <- function(path, file = NULL, contents = NULL, overwrite = FALSE,
)

if (perform_request) {
req %>%
httr2::req_body_multipart(
path = body$path,
contents = body$contents,
overwrite = body$overwrite
) %>%
httr2::req_error(body = db_req_error_body) %>%
httr2::req_perform() %>%
req |>
httr2::req_body_multipart(!!!body) |>
httr2::req_error(body = db_req_error_body) |>
httr2::req_perform() |>
httr2::resp_body_json()
} else {
req
Expand Down
21 changes: 12 additions & 9 deletions R/execution-context.R
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ db_context_status <- function(cluster_id,
token = token
)

req <- req %>%
req <- req |>
httr2::req_url_query(
clusterId = cluster_id,
contextId = context_id
Expand Down Expand Up @@ -254,7 +254,7 @@ db_context_command_status <- function(cluster_id,
token = token
)

req <- req %>%
req <- req |>
httr2::req_url_query(
clusterId = cluster_id,
contextId = context_id,
Expand Down Expand Up @@ -292,7 +292,7 @@ db_context_command_cancel <- function(cluster_id,
token = token
)

req <- req %>%
req <- req |>
httr2::req_url_query(
clusterId = cluster_id,
contextId = context_id,
Expand Down Expand Up @@ -327,13 +327,16 @@ db_context_command_parse <- function(x, language = c("r", "py", "scala", "sql"))
}

if (x$results$resultType == "table") {
schema <- data.table::rbindlist(x$results$schema)
tbl <- data.table::rbindlist(x$results$data)
names(tbl) <- schema$name
schema <- dplyr::bind_rows(x$results$schema)

output_tbl <- huxtable::hux(tbl) %>%
huxtable::set_all_borders(TRUE) %>%
huxtable::set_font_size(10) %>%
tbl <- purrr::list_transpose(x$results$data) |>
as.data.frame()

names(tbl) <- schema$names

output_tbl <- huxtable::hux(tbl) |>
huxtable::set_all_borders(TRUE) |>
huxtable::set_font_size(10) |>
huxtable::set_position("left")

huxtable::print_screen(output_tbl)
Expand Down
2 changes: 1 addition & 1 deletion R/experiments.R
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ db_experiments_get <- function(name = NULL, id = NULL,
body = body,
host = host,
token = token
) %>%
) |>
httr2::req_url_path_append(endpoint_suffix)

if (perform_request) {
Expand Down
4 changes: 2 additions & 2 deletions R/feature-store.R
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ db_feature_tables_get <- function(feature_table,
version = "2.0",
host = host,
token = token
) %>%
) |>
httr2::req_url_query(name = feature_table)

if (perform_request) {
Expand All @@ -63,7 +63,7 @@ db_feature_table_features <- function(feature_table,
version = "2.0",
host = host,
token = token
) %>%
) |>
httr2::req_url_query(feature_table = feature_table)

if (perform_request) {
Expand Down
6 changes: 3 additions & 3 deletions R/misc-helpers.R
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,9 @@ db_current_workspace_id <- function(host = db_host(), token = db_token(),
)

if (perform_request) {
resp <- req %>%
httr2::req_error(body = db_req_error_body) %>%
httr2::req_perform() %>%
resp <- req |>
httr2::req_error(body = db_req_error_body) |>
httr2::req_perform() |>
httr2::resp_headers()

# workspace id can be extracted from response headers
Expand Down
34 changes: 28 additions & 6 deletions R/package-auth.R
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ db_host <- function(id = NULL, prefix = NULL, profile = default_config_profile()
#'
#' @description
#' The function will check for a token in the `DATABRICKS_HOST` environment variable.
#' `.databrickscfg` will be searched if `db_profile` and `use_databrickscfg` are set or
#' `.databrickscfg` will be searched if `db_profile` and `use_databrickscfg` are set or
#' if Posit Workbench managed OAuth credentials are detected.
#' If none of the above are found then will default to using OAuth U2M flow.
#'
Expand Down Expand Up @@ -98,7 +98,7 @@ db_token <- function(profile = default_config_profile()) {
#' @description
#' Workspace ID, optionally specified to make connections pane more powerful.
#' Specified as an environment variable `DATABRICKS_WSID`.
#' `.databrickscfg` will be searched if `db_profile` and `use_databrickscfg` are set or
#' `.databrickscfg` will be searched if `db_profile` and `use_databrickscfg` are set or
#' if Posit Workbench managed OAuth credentials are detected.
#'
#' Refer to [api authentication docs](https://docs.databricks.com/dev-tools/api/latest/authentication.html)
Expand Down Expand Up @@ -223,7 +223,6 @@ read_env_var <- function(key = c("token", "host", "wsid"),

value <- Sys.getenv(key_name)


if (value == "") {
if (error) {
stop(cli::format_error(c(
Expand Down Expand Up @@ -273,7 +272,7 @@ db_oauth_client <- function(host = db_host()) {
#' Returns the default config profile
#' @details Returns the config profile first looking at `DATABRICKS_CONFIG_PROFILE`
#' and then the `db_profile` option.
#'
#'
#' @return profile name
#' @keywords internal
default_config_profile <- function() {
Expand All @@ -288,7 +287,7 @@ default_config_profile <- function() {
#' Returns whether or not to use a `.databrickscfg` file
#' @details Indicates `.databrickscfg` should be used instead of environment variables when
#' either the `use_databrickscfg` option is set or Posit Workbench managed OAuth credentials are detected.
#'
#'
#' @return boolean
#' @keywords internal
use_databricks_cfg <- function() {
Expand All @@ -297,4 +296,27 @@ use_databricks_cfg <- function() {
use_databricks_cfg <- TRUE
}
return(use_databricks_cfg)
}
}


# Extended from {odbc}
#
# Try to determine whether we can redirect the user's browser to a server on
# localhost, which isn't possible if we are running on a hosted platform.
#
# This is based on the strategy pioneered by the {gargle} package and {httr2}.
is_hosted_session <- function() {

if (on_databricks()) {
return(TRUE)
}

if (nzchar(Sys.getenv("COLAB_RELEASE_TAG"))) {
return(TRUE)
}

# If RStudio Server or Posit Workbench is running locally (which is possible,
# though unusual), it's not acting as a hosted environment.
Sys.getenv("RSTUDIO_PROGRAM_MODE") == "server" &&
!grepl("localhost", Sys.getenv("RSTUDIO_HTTP_REFERER"), fixed = TRUE)
}
Loading
Loading