Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updates #81

Merged
merged 8 commits into from
Mar 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Package: chattr
Title: Integrates LLM's with the RStudio IDE
Version: 0.0.0.9007
Version: 0.0.0.9008
Authors@R: c(
person("Edgar", "Ruiz", , "[email protected]", role = c("aut", "cre")),
person(given = "Posit Software, PBC", role = c("cph", "fnd"))
Expand Down
1 change: 0 additions & 1 deletion R/app_server.R
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,6 @@ app_add_user <- function(content) {
}

app_add_assistant <- function(content, input) {
style <- app_theme_style("ui_assistant")
len_hist <- length(ch_env$content_hist)
ch <- app_split_content(content)

Expand Down
2 changes: 1 addition & 1 deletion R/app_theme.R
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
app_theme_style <- function(x = NULL) {
if ((ide_is_rstudio() && !ch_debug_get()) | running_as_job()) {
if ((ide_is_rstudio() && !ch_debug_get()) || running_as_job()) {
ti <- getThemeInfo()
color_bg <- app_theme_rgb_to_hex(ti$background)
color_fg <- app_theme_rgb_to_hex(ti$foreground)
Expand Down
4 changes: 3 additions & 1 deletion R/backend-llamagpt.R
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,9 @@ ch_llamagpt_output <- function(

if (stop_stream) {
if (stream_to == "chat") {
if (!is.null(output_file)) saveRDS(all_output, output_file, compress = FALSE)
if (!is.null(output_file)) {
saveRDS(all_output, output_file, compress = FALSE)
}
file_delete(stream_file)
return(NULL)
} else {
Expand Down
28 changes: 23 additions & 5 deletions R/backend-openai-core.R
Original file line number Diff line number Diff line change
Expand Up @@ -12,21 +12,39 @@
if (ch_debug_get()) {
return("")
}

hosts_path <- defaults$hosts_path

if (is.null(hosts_path)) {
if (os_win()) {
possible_path <- path(Sys.getenv("localappdata"), "github-copilot")

Check warning on line 20 in R/backend-openai-core.R

View check run for this annotation

Codecov / codecov/patch

R/backend-openai-core.R#L20

Added line #L20 was not covered by tests
} else {
possible_path <- "~/.config/github-copilot"
}
if (dir_exists(possible_path)) {
hosts_path <- possible_path

Check warning on line 25 in R/backend-openai-core.R

View check run for this annotation

Codecov / codecov/patch

R/backend-openai-core.R#L25

Added line #L25 was not covered by tests
}
}

token_url <- defaults$token_url
if(is.null(hosts_path) && fail) {
if (is.null(hosts_path) && fail) {
abort(
c(
"There is no default for the RStudio GitHub Copilot configuration folder",
"Please add a 'hosts_path' to your YAML file, or to chattr_defaults() "
))
)
)
}
if(is.null(token_url) && fail) {
if (is.null(token_url) && fail) {
abort(
c(
"There is no default GH Copilot token URL",
"Please add a 'token_url' to your YAML file, or to chattr_defaults() "
))
)
)
}
if(is.null(hosts_path)) {
return(NULL)
}
gh_path <- path_expand(hosts_path)
if (dir_exists(gh_path)) {
Expand All @@ -38,7 +56,7 @@
x_json <- resp_body_json(x)
ret <- x_json$token
} else {
if(fail) {
if (fail) {

Check warning on line 59 in R/backend-openai-core.R

View check run for this annotation

Codecov / codecov/patch

R/backend-openai-core.R#L59

Added line #L59 was not covered by tests
abort("Please setup GitHub Copilot for RStudio first")
}
}
Expand Down
3 changes: 1 addition & 2 deletions R/backend-openai-submit.R
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,7 @@ openai_completion <- function(
prompt,
new_prompt,
r_file_stream,
r_file_complete
) {
r_file_complete) {
UseMethod("openai_completion")
}

Expand Down
10 changes: 5 additions & 5 deletions R/backend-openai-switch.R
Original file line number Diff line number Diff line change
Expand Up @@ -85,12 +85,12 @@

has_error <- substr(current, 1, 9) == "{{error}}"

if (!is.null(current)) {
if (!is.null(current) && !testing && !has_error) {
if (is.null(ch_env$stream$response)) {
if (ui_current_console()) {
if (!testing && !has_error) cat(current)
cat(current)

Check warning on line 91 in R/backend-openai-switch.R

View check run for this annotation

Codecov / codecov/patch

R/backend-openai-switch.R#L91

Added line #L91 was not covered by tests
} else {
if (!testing && !has_error) ide_paste_text(current)
ide_paste_text(current)

Check warning on line 93 in R/backend-openai-switch.R

View check run for this annotation

Codecov / codecov/patch

R/backend-openai-switch.R#L93

Added line #L93 was not covered by tests
}
} else {
if (nchar(current) != nchar(ch_env$stream$response)) {
Expand All @@ -100,10 +100,10 @@
nchar(current)
)
if (ui_current_console()) {
if (!testing && !has_error) cat(delta)
cat(delta)

Check warning on line 103 in R/backend-openai-switch.R

View check run for this annotation

Codecov / codecov/patch

R/backend-openai-switch.R#L103

Added line #L103 was not covered by tests
} else {
for (i in 1:nchar(delta)) {
if (!testing && !has_error) ide_paste_text(substr(delta, i, i))
ide_paste_text(substr(delta, i, i))

Check warning on line 106 in R/backend-openai-switch.R

View check run for this annotation

Codecov / codecov/patch

R/backend-openai-switch.R#L106

Added line #L106 was not covered by tests
}
}
}
Expand Down
7 changes: 4 additions & 3 deletions R/chattr-app.R
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,17 @@
#' the document, or console, in the IDE.
#' @param as_job_port Port to use for the Shiny app. Applicable only if `as_job`
#' is set to TRUE.
#' @param as_job_host Host IP to use for the Shiny app. Applicable only if `as_job`
#' is set to TRUE.
#' @param as_job_host Host IP to use for the Shiny app. Applicable only if
#' `as_job` is set to TRUE.
#' @returns A chat interface inside the 'RStudio' IDE
#' @export
chattr_app <- function(viewer = c("viewer", "dialog"),
as_job = getOption("chattr.as_job", FALSE),
as_job_port = getOption("shiny.port", 7788),
as_job_host = getOption("shiny.host", "127.0.0.1")) {
td <- chattr_defaults(type = "chat")
show_init <- TRUE
if(interactive() && is.null(td$provider)) {
if (interactive() && is.null(td$provider)) {

Check warning on line 18 in R/chattr-app.R

View check run for this annotation

Codecov / codecov/patch

R/chattr-app.R#L18

Added line #L18 was not covered by tests
chattr_use()
td <- chattr_defaults(type = "chat")
show_init <- FALSE
Expand Down
6 changes: 4 additions & 2 deletions R/chattr-defaults-save.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@
#' the config package
#' @param path Path to the file to save the configuration to
#' @param overwrite Indicates to replace the file if it exists
#' @param type The type of UI to save the defaults for. It defaults to NULL which
#' will save whatever types had been used during the current R session
#' @param type The type of UI to save the defaults for. It defaults to NULL
#' which will save whatever types had been used during the current R session
#' @returns It creates a YAML file with the defaults set in the current R
#' session.
#' @export
chattr_defaults_save <- function(path = "chattr.yml",
overwrite = FALSE,
Expand Down
27 changes: 14 additions & 13 deletions R/chattr-defaults.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@
#' @details The idea is that because we will use addin shortcut to execute the
#' request, all of the other arguments can be controlled via this function. By
#' default, it will try to load defaults from a `config` YAML file, if none are
#' found, then the defaults for GPT 3.5 will be used. The defaults can be modified
#' by calling this function, even after the interactive session has started.
#' found, then the defaults for GPT 3.5 will be used. The defaults can be
#' modified by calling this function, even after the interactive session has
#' started.
#' @export
#' @param max_data_files Sets the maximum number of data files to send to the
#' model. It defaults to 20. To send all, set to NULL
Expand All @@ -21,14 +22,17 @@
#' part of the request
#' @param yaml_file The path to a valid `config` YAML file that contains the
#' defaults to use in a session
#' @param model_arguments Additional arguments to pass to the model as part of the
#' request, it requires a list. Examples of arguments: temperature, top_p,
#' @param model_arguments Additional arguments to pass to the model as part of
#' the request, it requires a list. Examples of arguments: temperature, top_p,
#' max_tokens
#' @param type Entry point to interact with the model. Accepted values: 'notebook',
#' 'chat'
#' @param type Entry point to interact with the model. Accepted values:
#' 'notebook', chat'
#' @param force Re-process the base and any work space level file defaults
#' @param label Label to display in the Shiny app, and other locations
#' @param ... Additional model arguments that are not standard for all models/backends
#' @param ... Additional model arguments that are not standard for all
#' models/backends
#' @returns An 'ch_model' object that contains the current defaults that will be
#' used to communicate with the LLM.
#' @inheritParams chattr

chattr_defaults <- function(type = "default",
Expand All @@ -45,8 +49,7 @@ chattr_defaults <- function(type = "default",
yaml_file = "chattr.yml",
force = FALSE,
label = NULL,
...
) {
...) {
function_args <- c(as.list(environment()), ...)

sys_type <- Sys.getenv("CHATTR_TYPE", NA)
Expand Down Expand Up @@ -83,14 +86,12 @@ chattr_defaults <- function(type = "default",
for (j in seq_along(check_files)) {
td_defaults <- read_yaml(file = check_files[j])
loaded_default <- chattr_defaults_get(type = "default")
if (!is.null(loaded_default)) {
td_defaults$default <- loaded_default
}
td_defaults$default <- loaded_default %||% td_defaults$default
check_defaults <- c("default", type)
for (i in seq_along(check_defaults)) {
td <- td_defaults[[check_defaults[i]]]
if (!is.null(td)) {
if (length(td$prompt) > 0 & any(grepl("\n", td$prompt))) {
if (length(td$prompt) > 0 && any(grepl("\n", td$prompt))) {
td$prompt <- unlist(strsplit(td$prompt, split = "\n"))
}
chattr_defaults_set(
Expand Down
1 change: 1 addition & 0 deletions R/chattr-test.R
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#' Confirms conectivity to LLM interface
#' @inheritParams ch_submit
#' @returns It returns console massages with the status of the test.
#' @export
chattr_test <- function(defaults = NULL) {
if (is.null(defaults)) defaults <- chattr_defaults()
Expand Down
18 changes: 8 additions & 10 deletions R/chattr-use.R
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
#' @export
chattr_use <- function(model_label = NULL) {
interactive_label <- is_interactive() && is.null(model_label)
overwrite <- FALSE
if (interactive_label) {
model_label <- ch_get_ymls()
}
Expand All @@ -42,7 +41,7 @@ ch_get_ymls <- function(menu = TRUE) {
copilot_token <- openai_token_copilot(
defaults = copilot_defaults$default,
fail = FALSE
)
)
copilot_exists <- !is.null(copilot_token)

gpt_token <- openai_token_chat(fail = FALSE)
Expand All @@ -53,7 +52,7 @@ ch_get_ymls <- function(menu = TRUE) {
read_yaml()

llama_exists <- file_exists(llama_defaults$default$path) &&
file_exists(llama_defaults$default$model)
file_exists(llama_defaults$default$model)

prep_files <- files %>%
map(read_yaml) %>%
Expand All @@ -71,25 +70,25 @@ ch_get_ymls <- function(menu = TRUE) {
path_ext_remove()
)

if(!copilot_exists) {
if (!copilot_exists) {
prep_files$copilot <- NULL
}

if(!gpt_exists) {
if (!gpt_exists) {
prep_files$gpt35 <- NULL
prep_files$gpt4 <- NULL
prep_files$davinci <- NULL
}

if(!llama_exists) {
if (!llama_exists) {
prep_files$llamagpt <- NULL
}

if(length(prep_files) == 0) {
if (length(prep_files) == 0) {
abort(
"No model setup found. Please use `?chattr_use` to get started",
call = NULL
)
)
}

orig_names <- names(prep_files)
Expand All @@ -106,7 +105,7 @@ ch_get_ymls <- function(menu = TRUE) {
}) %>%
set_names(orig_names)

if(menu) {
if (menu) {
cli_h3("chattr - Available models")
cli_text("Select the number of the model you would like to use: ")
model_no <- menu(prep_files)
Expand All @@ -115,5 +114,4 @@ ch_get_ymls <- function(menu = TRUE) {
} else {
prep_files
}

}
4 changes: 2 additions & 2 deletions R/chattr.R
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
#' Submits prompt to LLM
#' @param prompt Request to send to LLM. Defaults to NULL
#' @inheritParams ch_submit
#' @returns The output of the LLM to the console, document or script.
#' @export
chattr <- function(prompt = NULL,
preview = FALSE,
prompt_build = TRUE,
stream = NULL) {

ui <- ui_current()
if (ui == "") ui <- "console"
defaults <- chattr_defaults(type = ui)

if(is.null(defaults$provider)) {
if (is.null(defaults$provider)) {
chattr_use()
defaults <- chattr_defaults(type = ui)
}
Expand Down
2 changes: 0 additions & 2 deletions R/ide.R
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,7 @@ ide_comment_selection <- function() {

text_range <- active_doc$selection[[1]]$range
start_row <- text_range$start[[1]]
start_col <- text_range$start[[2]]
end_row <- text_range$end[[1]]
end_col <- text_range$end[[2]]

selected <- active_doc$contents[start_row:end_row]
end_size <- nchar(selected[length(selected)])
Expand Down
19 changes: 19 additions & 0 deletions R/utils.R
Original file line number Diff line number Diff line change
Expand Up @@ -110,3 +110,22 @@
cli_li("{.val0 Model:} {.val1 {x$model}}")
cli_li("{.val0 Label:} {.val1 {x$label}}")
}

# ------------------------ Determine OS ----------------------------------------
os_get <- function() {
if (.Platform$OS.type == "windows") {
"win"

Check warning on line 117 in R/utils.R

View check run for this annotation

Codecov / codecov/patch

R/utils.R#L117

Added line #L117 was not covered by tests
} else if (Sys.info()["sysname"] == "Darwin") {
"mac"

Check warning on line 119 in R/utils.R

View check run for this annotation

Codecov / codecov/patch

R/utils.R#L119

Added line #L119 was not covered by tests
} else {
"unix"
}
}

os_win <- function() {
ifelse(os_get() == "win", TRUE, FALSE)
}

os_mac <- function() {
ifelse(os_get() == "mac", TRUE, FALSE)

Check warning on line 130 in R/utils.R

View check run for this annotation

Codecov / codecov/patch

R/utils.R#L130

Added line #L130 was not covered by tests
}
1 change: 0 additions & 1 deletion inst/configs/copilot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ default:
include_doc_contents: FALSE
system_msg: You are a helpful coding assistant
token_url: "https://api.github.com/copilot_internal/v2/token"
hosts_path: "~/.config/github-copilot"
model_arguments:
stream: TRUE
chat:
Expand Down
2 changes: 0 additions & 2 deletions inst/prompt/script.R
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,3 @@ writeLines(
),
here::here("inst", "prompt", "base.txt")
)


3 changes: 3 additions & 0 deletions man/chattr.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 5 additions & 2 deletions man/chattr_app.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading
Loading