Skip to content

Commit

Permalink
CRAN 5.2.2
Browse files Browse the repository at this point in the history
  • Loading branch information
laresbernardo committed May 16, 2023
1 parent ce60b23 commit 1fbd878
Show file tree
Hide file tree
Showing 12 changed files with 105 additions and 78 deletions.
2 changes: 1 addition & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Package: lares
Type: Package
Title: Analytics & Machine Learning Sidekick
Version: 5.2.2.9000
Version: 5.2.2
Authors@R: c(
person("Bernardo", "Lares", , "[email protected]", c("aut", "cre")))
Maintainer: Bernardo Lares <[email protected]>
Expand Down
6 changes: 4 additions & 2 deletions R/audio.R
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,10 @@ get_mp3 <- function(id,
stop(msg)
}
)

if (1 %in% ret) return(invisible(NULL))

if (1 %in% ret) {
return(invisible(NULL))
}

f <- listfiles(getwd(), recursive = FALSE) %>%
filter(grepl("\\.info\\.json", .data$filename)) %>%
Expand Down
48 changes: 29 additions & 19 deletions R/chatgpt.R
Original file line number Diff line number Diff line change
Expand Up @@ -25,36 +25,40 @@ hist_reply <- "GPT_HIST_REPLY"
#' api_key <- get_credentials()$openai$secret_key
#' # Open question:
#' gpt_ask("Can you write an R function to plot a dummy histogram?", api_key)
#'
#'
#' ##### The following examples return dataframes:
#' # Classify each element based on categories:
#' gpt_classify(1:10, c("odd", "even"))
#'
#' # Add all tags that apply to each element based on tags:
#' gpt_tag(
#' c("I love chocolate", "I hate chocolate", "I like Coke"),
#' c("food", "positive", "negative", "beverage"))
#' c("food", "positive", "negative", "beverage")
#' )
#'
#' # Extract specific information:
#' gpt_extract(
#' c("My mail is 123@@test.com", "30 Main Street, Brooklyn, NY, USA", "+82 2-312-3456", "$1.5M"),
#' c("email", "full state name", "country of phone number", "amount as number"))
#'
#' c("email", "full state name", "country of phone number", "amount as number")
#' )
#'
#' # Format values
#' gpt_format(
#' c("March 27th, 2021", "12-25-2023 3:45PM", "01.01.2000", "29 Feb 92"),
#' format = "ISO Date getting rid of timestamps")
#'
#' format = "ISO Date getting rid of timestamps"
#' )
#'
#' # Convert units
#' gpt_convert(c("50C", "300K"), "Fahrenheit")
#'
#'
#' # Create a table with data
#' gpt_table("5 random people's address in South America, email, phone, age between 18-30")
#'
#'
#' # Translate text to any language
#' gpt_translate(
#' rep("I love you with all my heart", 5),
#' language = c("spanish", "chinese", "japanese", "russian", "german"))
#' language = c("spanish", "chinese", "japanese", "russian", "german")
#' )
#'
#' # Now let's read the historical prompts and replies from current session
#' gpt_history()
Expand All @@ -68,34 +72,39 @@ gpt_ask <- function(ask,
ts <- Sys.time()
if (length(ask) > 1) ask <- paste(ask, collapse = " + ")
# Save historical questions
if (cache_exists(hist_ask)){
if (cache_exists(hist_ask)) {
cache <- cache_read(hist_ask, quiet = TRUE, ...)
cache <- rbind(cache, data.frame(ts = ts, prompt = ask))
} else cache <- data.frame(ts = ts, prompt = ask)
} else {
cache <- data.frame(ts = ts, prompt = ask)
}
cache_write(distinct(cache), hist_ask, quiet = TRUE, ...)
# Ask ChatGPT using their API
response <- POST(
url = url,
url = url,
add_headers(Authorization = paste("Bearer", secret_key)),
httr::content_type_json(),
encode = "json",
body = list(
model = model,
messages = list(list(
role = "user",
role = "user",
content = ask
))
)
)
ret <- content(response)
if ("error" %in% names(ret)) warning(ret$error$message)
if ("message" %in% names(ret$choices[[1]]) & !quiet)
if ("message" %in% names(ret$choices[[1]]) & !quiet) {
cat(stringr::str_trim(ret$choices[[1]]$message$content))
}
# Save historical answers
if (cache_exists(hist_ask)){
if (cache_exists(hist_ask)) {
cache <- cache_read(hist_reply, quiet = TRUE, ...)
cache <- rbind(cache, data.frame(ts = ts, reply = ret))
} else cache <- data.frame(ts = ts, prompt = ret)
} else {
cache <- data.frame(ts = ts, prompt = ret)
}
cache_write(distinct(cache), hist_reply, quiet = TRUE, ...)
return(invisible(ret))
}
Expand All @@ -108,7 +117,7 @@ gpt_history <- function() {
replies <- cache_read(hist_reply, quiet = TRUE)
hist <- as_tibble(left_join(asks, replies, by = "ts")) %>%
select(.data$ts, .data$prompt, contains("message.content"), everything())
return(hist)
return(hist)
} else {
warning("No historical prompts nor replies registered yet")
}
Expand Down Expand Up @@ -186,13 +195,14 @@ gpt_translate <- function(x, language, quiet = TRUE, ...) {
df <- gpt_markdown2df(resp)
return(df)
}

gpt_prompt_builder <- function(type = "category", cols = c("item", type), x, y) {
paste(
"Return a structured markdown table with", length(cols), "columns named", v2t(cols, and = "and"),
". Consider the following items:", v2t(x, quotes = FALSE),
". For each respective item, what", type, "represent each item using:", v2t(y, quotes = FALSE),
". If you don't know any item, replace with NA")
". If you don't know any item, replace with NA"
)
}

gpt_markdown2df <- function(resp) {
Expand Down
4 changes: 3 additions & 1 deletion R/credentials.R
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,9 @@ get_credentials <- function(from = NA, dir = NA,
filename = "config.yml",
env = "LARES_CREDS",
...) {
if (is.list(dir)) return(dir)
if (is.list(dir)) {
return(dir)
}
if (isTRUE(is.na(dir)) || isTRUE(is.null(dir))) {
dir <- Sys.getenv(env)
if (dir == "") {
Expand Down
27 changes: 12 additions & 15 deletions R/currency.R
Original file line number Diff line number Diff line change
Expand Up @@ -39,23 +39,20 @@ get_currency <- function(currency_pair,
if (from == to) to <- from + 1
if (to > Sys.Date()) to <- Sys.Date()

if (Sys.Date() == from) {
x <- suppressWarnings(getQuote(string, auto.assign = FALSE))
x <- try(data.frame(suppressWarnings(getSymbols(
string,
env = NULL,
from = from, to = to,
src = "yahoo"
))))
if ("try-error" %in% class(x)) {
warning(x)
return(x)
}
if (substr(rownames(x), 1, 1)[1] == "X") {
x <- x[1, ]
rownames(x) <- Sys.Date()
x[, 1] <- NULL
} else {
x <- data.frame(suppressWarnings(getSymbols(
string,
env = NULL,
from = from, to = to,
src = "yahoo"
)))
if (substr(rownames(x), 1, 1)[1] == "X") {
x <- x[1, ]
rownames(x) <- Sys.Date()
}
}

rate <- data.frame(date = as.Date(rownames(x)), rate = x[, 1])

if (fill) {
Expand Down
65 changes: 39 additions & 26 deletions R/facebook.R
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ fb_process <- function(input, paginate = TRUE, sleep = 0, quiet = FALSE, ...) {
"\n1. Within the Graph API Explorer, select an Application.",
"\n2. Go to Get Token and select the Page Acces Token needed.",
"Note that there are navigation arrows if there are lots of accounts.",
"\n3. Copy and use the Acces Token created."))
"\n3. Copy and use the Acces Token created."
))
browseURL("https://developers.facebook.com/tools/explorer")
}
}
Expand All @@ -57,8 +58,10 @@ fb_process <- function(input, paginate = TRUE, sleep = 0, quiet = FALSE, ...) {

# Deal with GET + response vs jsonlite
out <- fromJSON(toJSON(import))
if (all(lapply(out, length) == 1)) return(out)

if (all(lapply(out, length) == 1)) {
return(out)
}

# First pagination
results <- list()
i <- 1
Expand All @@ -80,11 +83,14 @@ fb_process <- function(input, paginate = TRUE, sleep = 0, quiet = FALSE, ...) {
}
while (exists("next", out$paging) && (i < as.integer(paginate) || isTRUE(paginate))) {
i <- i + 1
res <- try({
out <- fromJSON(out$paging[["next"]])
results[[i]] <- list_flattener(out$data, i)
if (!quiet) show_pag_status(results, i, sleep, quiet)
}, silent = TRUE)
res <- try(
{
out <- fromJSON(out$paging[["next"]])
results[[i]] <- list_flattener(out$data, i)
if (!quiet) show_pag_status(results, i, sleep, quiet)
},
silent = TRUE
)
if (inherits(res, "try-error")) {
warning(paste0("Returning partial results given last pagination (", i, ") returned error"))
break
Expand All @@ -102,7 +108,7 @@ fb_process <- function(input, paginate = TRUE, sleep = 0, quiet = FALSE, ...) {
mutate_at(vars(contains("url")), list(as.character)) %>%
mutate_at(vars(contains("name")), list(as.character)) %>%
as_tibble()

# Save last pagination and if it returned complete results
attr(ret, "paging_done") <- !("next" %in% names(out[["paging"]]))
attr(ret, "paging") <- out[["paging"]]
Expand All @@ -126,7 +132,7 @@ show_pag_status <- function(results, i = 1, sleep = 0, quiet = FALSE) {
} else {
flush.console()
total <- sum(unlist(lapply(results, nrow)))
cat(paste(sprintf("\r>>> Pagination imported: %s | Total rows: %s", i, total)))
cat(paste(sprintf("\r>>> Pagination imported: %s | Total rows: %s", i, total)))
}
}
Sys.sleep(sleep)
Expand Down Expand Up @@ -466,7 +472,8 @@ fb_rf <- function(token,
destination_ids = toJSON(as.character(destination_ids)),
target_spec = ts,
access_token = token
)), encode = "json")[[1]]
)
), encode = "json")[[1]]

if ("message" %in% names(prediction)) {
message(paste(
Expand Down Expand Up @@ -548,15 +555,15 @@ fb_accounts <- function(token,
set_config(config(http_version = 0))
api_version <- ifelse(is.null(api_version), META_API_VER, api_version)
output <- NULL

# Select which type of ad accounts
type <- paste0(type, "_ad_accounts")

for (i in seq_along(type)) {
message(paste(">>> Fetching", type[i]))
URL <- paste(META_GRAPH_URL, api_version, business_id, type[i], sep = "/")
continue <- TRUE

# Call insights
import <- GET(
URL,
Expand All @@ -568,17 +575,17 @@ fb_accounts <- function(token,
encode = "json"
)
ret <- fb_process(import, quiet = TRUE, ...)

if (inherits(ret, "data.frame")) {
ret$type <- type[i]
output <- bind_rows(output, ret)
}
}

if (!inherits(output, "data.frame")) {
return(invisible(NULL))
}

# Account status dictionary
output <- mutate(output, account_status = case_when(
account_status == "1" ~ "ACTIVE",
Expand All @@ -592,12 +599,14 @@ fb_accounts <- function(token,
account_status == "201" ~ "ANY_ACTIVE",
account_status == "202" ~ "ANY_CLOSED"
))

output <- suppressMessages(type.convert(
output, numerals = "no.loss", as.is = TRUE)) %>%
output,
numerals = "no.loss", as.is = TRUE
)) %>%
arrange(desc(.data$amount_spent)) %>%
as_tibble()

return(output)
}

Expand Down Expand Up @@ -667,7 +676,7 @@ fb_creatives <- function(token, which,
...) {
set_config(config(http_version = 0))
api_version <- ifelse(is.null(api_version), META_API_VER, api_version)

fields <- c(
"account_id", "object_type", "name", "status", "campaign_id",
"call_to_action_type", "image_url", "thumbnail_url"
Expand All @@ -684,7 +693,9 @@ fb_creatives <- function(token, which,
token
)
import <- GET(linkurl)
if (!process) return(import)
if (!process) {
return(import)
}
ret <- fb_process(import, ...)
return(ret)
}
Expand Down Expand Up @@ -723,14 +734,14 @@ fb_ads <- function(token,
...) {
set_config(config(http_version = 0))
api_version <- ifelse(is.null(api_version), META_API_VER, api_version)

if (is.na(fields[1])) {
fields <- paste(
"account_id, campaign, campaign_id, objective, adset_id, id,",
"name, source_ad, cpm, spend, recommendations"
)
}

import <- GET(
glued("{META_GRAPH_URL}/{api_version}/{which}/ads"),
query = list(
Expand All @@ -744,8 +755,10 @@ fb_ads <- function(token,
),
encode = "json"
)

if (!process) return(import)

if (!process) {
return(import)
}
ret <- fb_process(import, ...)
return(ret)
}
4 changes: 1 addition & 3 deletions R/frequencies.R
Original file line number Diff line number Diff line change
Expand Up @@ -305,11 +305,9 @@ scale_x_reordered <- function(..., sep = "___") {
#' @return Plot when \code{plot=TRUE} and data.frame with grouped frequency results
#' when \code{plot=FALSE}.
#' @examples
#' \donttest{
#' data(dft) # Titanic dataset
#' freqs_df(dft)
#' freqs_df(dft, plot = TRUE)
#' }
#' @export
freqs_df <- function(df,
max = 0.9, min = 0.0, novar = TRUE,
Expand All @@ -327,7 +325,7 @@ freqs_df <- function(df,
names <- lapply(df, function(x) length(unique(x)))
unique <- as.data.frame(names)
colnames(unique) <- names(names)
which <- rownames(t(-sort(unique)))
which <- names(sort(-rank(unique, ties.method = "first")))

# Too much variance
no <- names(unique)[unique > nrow(df) * max]
Expand Down
Loading

0 comments on commit 1fbd878

Please sign in to comment.