Skip to content

Commit

Permalink
add fxn six_file_upload #67
Browse files Browse the repository at this point in the history
- as part of addressing six_file_upload, reworked s3 connection acquisition for paws and s3fs
- rework tests to work with new s3 connection
- rename random_str to random_db_id_str
  • Loading branch information
sckott committed Apr 5, 2024
1 parent 058af4c commit e1c2883
Show file tree
Hide file tree
Showing 35 changed files with 359 additions and 148 deletions.
2 changes: 2 additions & 0 deletions NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ export(six_bucket_change_user)
export(six_bucket_delete)
export(six_bucket_permissions)
export(six_bucket_remove_user)
export(six_file_upload)
export(six_user_create)
export(six_user_creds)
export(six_user_delete)
Expand Down Expand Up @@ -143,6 +144,7 @@ importFrom(purrr,flatten)
importFrom(purrr,keep)
importFrom(purrr,list_rbind)
importFrom(purrr,map)
importFrom(purrr,map2_vec)
importFrom(purrr,map_chr)
importFrom(purrr,map_lgl)
importFrom(purrr,pluck)
Expand Down
6 changes: 4 additions & 2 deletions R/admin.R
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,10 @@ group_policies_data <- list(
#' group_policies("users")
group_policies <- function(group) {
stop_if_not(is.character(group), "group must be character")
stop_if_not(group %in% names(group_policies_data),
"group must be one of {names(group_policies_data)}")
stop_if_not(
group %in% names(group_policies_data),
"group must be one of {names(group_policies_data)}"
)
group_policies_data[[group]]
}

Expand Down
31 changes: 18 additions & 13 deletions R/bucket.R
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ aws_bucket_exists <- function(bucket) {
bucket_checks(bucket)
res <- tryCatch(
{
env64$s3$head_bucket(Bucket = bucket)
con_s3()$head_bucket(Bucket = bucket)
},
error = function(e) e
)
Expand All @@ -42,7 +42,7 @@ aws_bucket_exists <- function(bucket) {
#' }
aws_bucket_create <- function(bucket, ...) {
bucket_checks(bucket)
env64$s3$create_bucket(
con_s3()$create_bucket(
Bucket = bucket,
CreateBucketConfiguration =
list(LocationConstraint = env_var("AWS_REGION")), ...
Expand Down Expand Up @@ -94,7 +94,7 @@ aws_bucket_delete <- function(bucket, force = FALSE, ...) {
return(invisible())
}
}
env64$s3$delete_bucket(Bucket = bucket, ...)
con_s3()$delete_bucket(Bucket = bucket, ...)
return(invisible())
}

Expand All @@ -105,12 +105,14 @@ aws_bucket_delete <- function(bucket, force = FALSE, ...) {
#'
#' @export
#' @importFrom purrr safely
#' @inherit aws_bucket_delete
#' @inheritParams aws_bucket_delete
#' @section What is magical:
#' - Exits early if bucket does not exist
#' - Checks for any objects in the bucket and deletes any present
#' - Deletes bucket after deleting objects
#' @family buckets
#' @family magicians
#' @return `NULL`, invisibly
#' @examplesIf interactive()
#' # bucket does not exist
#' six_bucket_delete("notabucket")
Expand All @@ -131,8 +133,10 @@ aws_bucket_delete <- function(bucket, force = FALSE, ...) {
#' )
#' aws_file_upload(
#' c(demo_rds_file, links_file),
#' s3_path(bucket, "newfolder",
#' c(basename(demo_rds_file), basename(links_file)))
#' s3_path(
#' bucket, "newfolder",
#' c(basename(demo_rds_file), basename(links_file))
#' )
#' )
#'
#' six_bucket_delete(bucket)
Expand Down Expand Up @@ -274,12 +278,14 @@ aws_bucket_upload <- function(
#' )
#' aws_bucket_list_objects(bucket = bucket_name)
aws_bucket_list_objects <- function(bucket, ...) {
out <- env64$s3$list_objects(bucket, ...)
if (rlang::is_empty(out$Contents)) return(tibble())
out <- con_s3()$list_objects(bucket, ...)
if (rlang::is_empty(out$Contents)) {
return(tibble())
}
as_tibble(jsonlite::fromJSON(
jsonlite::toJSON(out$Contents, auto_unbox = TRUE),
flatten = TRUE
)) %>%
jsonlite::toJSON(out$Contents, auto_unbox = TRUE),
flatten = TRUE
)) %>%
mutate(
bucket = bucket,
uri = glue("s3://{bucket}/{Key}"),
Expand Down Expand Up @@ -308,8 +314,7 @@ aws_bucket_list_objects <- function(bucket, ...) {
#' aws_buckets()
#' }
aws_buckets <- function(...) {
s3fs_creds_refresh()
out <- s3fs::s3_dir_info(refresh = TRUE, ...)
out <- con_s3fs()$dir_info(refresh = TRUE, ...)
if (is.data.frame(out) && NROW(out) > 0) {
as_tibble(out)
} else {
Expand Down
2 changes: 1 addition & 1 deletion R/database-misc.R
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,6 @@ which_driver <- function(engine) {
)
}

random_str <- function(prefix = "-") {
random_db_id_str <- function(prefix = "-") {
paste0(prefix, sub("-.+", "", uuid::UUIDgenerate()))
}
2 changes: 1 addition & 1 deletion R/database-rds.R
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ aws_db_rds_create <-
if (verbose) cli::cli_alert_info("Uploading user/pwd to secrets manager")
x <- instance_con_info(id)
aws_secrets_create(
name = paste0(id, random_str()),
name = paste0(id, random_db_id_str()),
secret = construct_db_secret(
engine = x$engine,
host = x$host,
Expand Down
78 changes: 62 additions & 16 deletions R/files.R
Original file line number Diff line number Diff line change
Expand Up @@ -13,23 +13,21 @@ equal_lengths <- function(x, y) {
#' @export
#' @importFrom fs file_exists
#' @importFrom s3fs s3_file_copy
#' @inheritParams aws_file_copy
#' @importFrom purrr map2_vec
#' @param path (character) a file path to read from. required
#' @param remote_path (character) a remote path where the file
#' should go. required
#' @param ... named parameters passed on to [s3fs::s3_file_copy()]
#' @return (character) a vector of remote s3 paths
#' @details
#' - For upload: if it does exist it will be created
#' - For download: if it does not exist, function will return an error
#'
#' To upload a folder of files see [aws_bucket_upload()]
#' @details to upload a folder of files see [aws_bucket_upload()]
#' @family files
#' @examples \dontrun{
#' @examplesIf interactive()
#' bucket <- random_string("bucket")
#' aws_bucket_create(bucket)
#' demo_rds_file <- file.path(system.file(), "Meta/demo.rds")
#' aws_file_upload(
#' demo_rds_file,
#' s3_path("s64-test-2", basename(demo_rds_file))
#' s3_path(bucket, basename(demo_rds_file))
#' )
#'
#' ## many files at once
Expand All @@ -46,20 +44,68 @@ equal_lengths <- function(x, y) {
#'
#' # bucket doesn't exist
#' aws_file_upload(demo_rds_file, "s3://not-a-bucket/eee.rds")
#' }
#'
#' @examplesIf interactive()
#' # path doesn't exist
#' aws_file_upload(
#' "file_doesnt_exist.txt",
#' s3_path("s64-test-2", "file_doesnt_exist.txt")
#' )
aws_file_upload <- function(path, remote_path, force = FALSE, ...) {
#' s3_path("s64-test-2", "file_doesnt_exist.txt"))
aws_file_upload <- function(path, remote_path, ...) {
stopifnot(fs::file_exists(path))
bucket <- path_s3_parse(remote_path)[[1]]$bucket
stop_if_not(aws_bucket_exists(bucket),
"bucket {.strong {bucket}} doesn't exist")
# s3fs_creds_refresh()
map2_vec(path, remote_path, con_s3fs()$file_copy, ...)
}

#' Magically upload a file
#'
#' @export
#' @param path (character) one or more file paths to add to
#' the `bucket`. required
#' @inheritParams aws_file_copy
#' @param ... named params passed on to
#' [put_object](https://www.paws-r-sdk.com/docs/s3_put_object/)
#' @section What is magical:
#' - Exits early if files do not exist
#' - Creates the bucket if it does not exist
#' - Adds files to the bucket, figuring out the key to use from
#' the supplied path
#' - Function is vectoried for the `path` argument; you can
#' pass in many file paths
#' - xx
#' @family files
#' @family magicians
#' @return (character) a vector of remote s3 paths where your
#' files are located
#' @examplesIf interactive()
#' bucket <- random_string("bucket")
#' demo_rds_file <- file.path(system.file(), "Meta/demo.rds")
#' six_file_upload(demo_rds_file, bucket)
#'
#' ## many files at once
#' links_file <- file.path(system.file(), "Meta/links.rds")
#' six_file_upload(c(demo_rds_file, links_file), bucket)
#'
#' # set expiration, expire 1 minute from now
#' six_file_upload(demo_rds_file, bucket, Expires = Sys.time() + 60)
#'
#' # bucket doesn't exist
#' six_file_upload(demo_rds_file, "not-a-buckets")
#'
#' # path doesn't exist
#' # six_file_upload("file_doesnt_exist.txt", random_string("bucket"))
six_file_upload <- function(path, bucket, force = FALSE, ...) {
stopifnot(fs::file_exists(path))
bucket_create_if_not(bucket, force)
s3fs_creds_refresh()
purrr::map2_vec(path, remote_path, s3fs::s3_file_copy, ...)
if (!aws_bucket_exists(bucket)) {
cli_warning("bucket {.strong {bucket}} not created; exiting")
return(invisible())
}
map(path, \(p) {
con_s3()$put_object(Bucket = bucket, Key = basename(p), Body = p, ...)
})
s3_path(bucket, basename(path))
}

#' Download a file
Expand Down Expand Up @@ -146,7 +192,7 @@ aws_file_delete_one <- function(one_path, ...) {
} else {
path_parsed[[1]]$file
}
env64$s3$delete_object(
con_s3()$delete_object(
path_parsed[[1]]$bucket,
glue("{key}{ifelse(trailing_slash, '/', '')}")
)
Expand Down
72 changes: 36 additions & 36 deletions R/interface.R
Original file line number Diff line number Diff line change
Expand Up @@ -28,20 +28,8 @@ set_s3_interface <- function(interface = "aws") {
}

# package paws
if (interface == "aws") {
s3con <- paws::s3()
} else if (interface == "localstack") {
s3con <- paws::s3(
credentials = list(
creds = list(
access_key_id = "NOTAREALKEY",
secret_access_key = "AREALLYFAKETOKEN"
)
),
endpoint = LOCALSTACK_ENDPOINT
)
} else {
s3con <- paws::s3(config = list(
if (interface == "minio") {
paws::s3(config = list(
credentials = list(
creds = list(
access_key_id = Sys.getenv("MINIO_USER"),
Expand All @@ -50,33 +38,45 @@ set_s3_interface <- function(interface = "aws") {
),
endpoint = Sys.getenv("MINIO_ENDPOINT")
))
}

# package s3fs
if (interface == "aws") {
s3fs::s3_file_system(
aws_access_key_id = Sys.getenv("AWS_ACCESS_KEY_ID"),
aws_secret_access_key = Sys.getenv("AWS_SECRET_ACCESS_KEY"),
region_name = Sys.getenv("AWS_REGION"),
refresh = TRUE
)
} else if (interface == "localstack") {
s3fs::s3_file_system(
aws_access_key_id = "NOTAREALKEY",
aws_secret_access_key = "AREALLYFAKETOKEN",
endpoint = LOCALSTACK_ENDPOINT,
refresh = TRUE
paws::s3(
credentials = list(
creds = list(
access_key_id = "NOTAREALKEY",
secret_access_key = "AREALLYFAKETOKEN"
)
),
endpoint = LOCALSTACK_ENDPOINT
)
} else {
s3fs::s3_file_system(
aws_access_key_id = Sys.getenv("MINIO_USER"),
aws_secret_access_key = Sys.getenv("MINIO_PWD"),
endpoint = Sys.getenv("MINIO_ENDPOINT"),
refresh = TRUE
)
paws::s3()
}

return(s3con)
# # package s3fs
# if (interface == "aws") {
# s3fs::s3_file_system(
# aws_access_key_id = Sys.getenv("AWS_ACCESS_KEY_ID"),
# aws_secret_access_key = Sys.getenv("AWS_SECRET_ACCESS_KEY"),
# region_name = Sys.getenv("AWS_REGION"),
# refresh = TRUE
# )
# } else if (interface == "localstack") {
# s3fs::s3_file_system(
# aws_access_key_id = "NOTAREALKEY",
# aws_secret_access_key = "AREALLYFAKETOKEN",
# endpoint = LOCALSTACK_ENDPOINT,
# refresh = TRUE
# )
# } else {
# s3fs::s3_file_system(
# aws_access_key_id = Sys.getenv("MINIO_USER"),
# aws_secret_access_key = Sys.getenv("MINIO_PWD"),
# endpoint = Sys.getenv("MINIO_ENDPOINT"),
# refresh = TRUE
# )
# }

# return(s3con)
}

#' Copy of `testthat::is_testing`
Expand Down
2 changes: 1 addition & 1 deletion R/internal.R
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ account_id <- memoise::memoise(function() {
#' @return character string of bucket region; NULL if bucket not found
bucket_region <- function(bucket) {
res <- tryCatch(
env64$s3$get_bucket_location(bucket),
con_s3()$get_bucket_location(bucket),
error = function(e) e
)
if (rlang::is_error(res)) NULL else res$LocationConstraint
Expand Down
4 changes: 0 additions & 4 deletions R/onload.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,6 @@
env64 <- new.env()

.onLoad <- function(libname, pkgname) {
# sets creds for paws and s3fs for the S3 service
env64$s3 <- set_s3_interface("aws")

# iam and costexplorer services
env64$iam <- paws::iam()
env64$costexplorer <- paws::costexplorer()
env64$secretsmanager <- paws::secretsmanager()
Expand Down
39 changes: 39 additions & 0 deletions R/s3con.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
LOCALSTACK_ENDPOINT <- "http://localhost.localstack.cloud:4566" # nolint

#' Get the `paws` S3 client
#' @return a list with methods for interfacing with IAM;
#' see <https://www.paws-r-sdk.com/docs/s3/>
#' @keywords internal
con_s3 <- function() {
set_s3_interface(Sys.getenv("AWS_PROFILE", "aws"))
}

#' s3fs connection
#' @examplesIf interactive()
#' con <- con_s3fs()
#' file_copy <- con_s3fs()$file_copy
con_s3fs <- function() {
profile <- Sys.getenv("AWS_PROFILE")
if (profile == "minio") {
s3fs::s3_file_system(
aws_access_key_id = Sys.getenv("MINIO_USER"),
aws_secret_access_key = Sys.getenv("MINIO_PWD"),
endpoint = Sys.getenv("MINIO_ENDPOINT"),
refresh = TRUE
)
} else if (profile == "localstack") {
s3fs::s3_file_system(
aws_access_key_id = "NOTAREALKEY",
aws_secret_access_key = "AREALLYFAKETOKEN",
endpoint = LOCALSTACK_ENDPOINT,
refresh = TRUE
)
} else {
s3fs::s3_file_system(
aws_access_key_id = Sys.getenv("AWS_ACCESS_KEY_ID"),
aws_secret_access_key = Sys.getenv("AWS_SECRET_ACCESS_KEY"),
region_name = Sys.getenv("AWS_REGION"),
refresh = TRUE
)
}
}
Loading

0 comments on commit e1c2883

Please sign in to comment.