Skip to content

Commit

Permalink
Merge pull request #205 from MichelNivard/198-fix-lintr-errors
Browse files Browse the repository at this point in the history
Fix linter errors
  • Loading branch information
JamesHWade authored May 13, 2024
2 parents 0e94813 + 2a734d5 commit 8c5e583
Show file tree
Hide file tree
Showing 44 changed files with 204 additions and 267 deletions.
1 change: 1 addition & 0 deletions .Rbuildignore
Original file line number Diff line number Diff line change
Expand Up @@ -17,3 +17,4 @@ dev/
^vignette/chat\.Rmd$
^vignette/ollama\.Rmd$
^vignette/chat-in-source\.Rmd$
.lintr
34 changes: 34 additions & 0 deletions .github/workflows/lint.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# Workflow derived from https://github.com/r-lib/actions/tree/v2/examples
# Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help
on:
push:
branches: [main, master]
pull_request:
branches: [main, master]

name: lint

permissions: read-all

jobs:
lint:
runs-on: ubuntu-latest
env:
GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v4

- uses: r-lib/actions/setup-r@v2
with:
use-public-rspm: true

- uses: r-lib/actions/setup-r-dependencies@v2
with:
extra-packages: any::lintr, local::.
needs: lint

- name: Lint
run: lintr::lint_package()
shell: Rscript {0}
env:
LINTR_ERROR_ON_LINT: true
55 changes: 0 additions & 55 deletions .github/workflows/lintr.yml

This file was deleted.

5 changes: 5 additions & 0 deletions .lintr
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
linters:
linters_with_defaults(
line_length_linter = line_length_linter(100),
object_length_linter = NULL
)
1 change: 1 addition & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ Cohere is now available as another service. The current version includes the fol
### Internal

- Reverted back to use an R6 class for OpenAI streaming (which now inherits from `SSEparser::SSEparser`). This doesn't affect how the users interact with the addins, but avoids a wider range of server errors.
- We now make heavy use of `{lintr}` for keeping code consistency.
- Fixed a bug in retrieval of OpenAI models
- Fixed a bug in Azure OpenAI request formation.
- Fixed a bug in "in source" calls for addins.
Expand Down
4 changes: 2 additions & 2 deletions R/addin_chatgpt.R
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ random_port <- function() {
#' @inheritParams shiny::runApp
#' @return This function returns nothing because is meant to run an app as a
#' side effect.
run_app_as_bg_job <- function(appDir = ".", job_name, host, port) {
run_app_as_bg_job <- function(appDir = ".", job_name, host, port) { # nolint
job_script <- create_tmp_job_script(
appDir = appDir,
port = port,
Expand All @@ -66,7 +66,7 @@ run_app_as_bg_job <- function(appDir = ".", job_name, host, port) {
#' application from the specified directory with the specified port and host.
#' @inheritParams shiny::runApp
#' @return A string containing the path of a temporary job script
create_tmp_job_script <- function(appDir, port, host) {
create_tmp_job_script <- function(appDir, port, host) { # nolint
script_file <- tempfile(fileext = ".R")

line <-
Expand Down
25 changes: 7 additions & 18 deletions R/api_perform_request.R
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@ gptstudio_request_perform <- function(skeleton, ...) {
}

#' @export
gptstudio_request_perform.gptstudio_request_openai <- function(skeleton, shinySession = NULL, ...) {
gptstudio_request_perform.gptstudio_request_openai <- function(skeleton, ...,
shiny_session = NULL) {
# Translate request

skeleton$history <- chat_history_append(
Expand Down Expand Up @@ -53,26 +54,13 @@ gptstudio_request_perform.gptstudio_request_openai <- function(skeleton, shinySe
response <- NULL

if (isTRUE(skeleton$stream)) {
if (is.null(shinySession)) stop("Stream requires a shiny session object")
if (is.null(shiny_session)) stop("Stream requires a shiny session object")

stream_handler <- OpenaiStreamParser$new(
session = shinySession,
session = shiny_session,
user_prompt = skeleton$prompt
)

# This should work exactly the same as stream_chat_completion
# but it uses curl::curl_connection(partial=FALSE), which makes it
# somehow different. `partial` has no documentation and can't be be changed

# request %>%
# req_perform_stream(
# buffer_kb = 32,
# callback = function(x) {
# rawToChar(x) %>% stream_handler$handle_streamed_element()
# TRUE
# }
# )

stream_chat_completion(
messages = skeleton$history,
element_callback = stream_handler$parse_sse,
Expand Down Expand Up @@ -185,7 +173,8 @@ gptstudio_request_perform.gptstudio_request_azure_openai <- function(skeleton, .
}

#' @export
gptstudio_request_perform.gptstudio_request_ollama <- function(skeleton, shinySession = NULL, ...) {
gptstudio_request_perform.gptstudio_request_ollama <- function(skeleton, ...,
shiny_session = NULL) {
# Translate request

skeleton$history <- chat_history_append(
Expand All @@ -203,7 +192,7 @@ gptstudio_request_perform.gptstudio_request_ollama <- function(skeleton, shinySe
model = skeleton$model,
messages = skeleton$history,
stream = skeleton$stream,
shinySession = shinySession,
shiny_session = shiny_session,
user_prompt = skeleton$prompt
)

Expand Down
8 changes: 2 additions & 6 deletions R/api_skeletons.R
Original file line number Diff line number Diff line change
Expand Up @@ -192,12 +192,8 @@ new_gptstudio_request_skeleton_perplexity <- function(
}

# Cohere Skeleton Creation Function
new_gptstudio_request_skeleton_cohere <- function(
model = "command",
prompt = "What is R?",
history = NULL,
stream = FALSE # forcing false until streaming implemented for cohere
) {
new_gptstudio_request_skeleton_cohere <- function(model = "command", prompt = "What is R?",
history = NULL, stream = FALSE) {
new_gpstudio_request_skeleton(
url = "https://api.cohere.ai/v1/chat",
api_key = Sys.getenv("COHERE_API_KEY"),
Expand Down
2 changes: 1 addition & 1 deletion R/app_chat_style.R
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ render_docs_message_content <- function(x) {
#'
#' @return A modified textAreaInput
text_area_input_wrapper <-
function(inputId,
function(inputId, # nolint
label,
value = "",
width = NULL,
Expand Down
2 changes: 0 additions & 2 deletions R/app_config.R
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,6 @@ save_user_config <- function(code_style,
}

set_user_options <- function(config) {
op <- options()

op_gptstudio <- list(
gptstudio.code_style = config$code_style,
gptstudio.skill = config$skill,
Expand Down
3 changes: 2 additions & 1 deletion R/create_prompt.R
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ chat_create_system_prompt <-
} else {
""
}

# nolint start
about_style <- if (!is.null(style)) {
switch(style,
"no preference" = "",
Expand All @@ -97,6 +97,7 @@ chat_create_system_prompt <-
} else {
""
}
# nolint end

in_source_instructions <-
if (in_source) {
Expand Down
4 changes: 4 additions & 0 deletions R/gptstudio-package.R
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,7 @@
#' @importFrom glue glue
## gptstudio namespace: end
NULL

dummy <- function() {
SSEparser::SSEparser
}
17 changes: 10 additions & 7 deletions R/gptstudio-sitrep.R
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#' Check API Connection
#'
#' This generic function checks the API connection for a specified service by dispatching to related methods.
#' This generic function checks the API connection for a specified service
#' by dispatching to related methods.
#'
#' @param service The name of the API service for which the connection is being checked.
#' @param api_key The API key used for authentication.
Expand Down Expand Up @@ -148,10 +149,12 @@ gptstudio_sitrep <- function(verbose = TRUE) {
if (file.exists(user_config)) {
cli::cli_inform("Using user configuration file at {.file {user_config}}")
} else {
cli::cli_text("No user configuration file found at {.file {user_config}}.
Using default configuration.
Change configuration settings in the chat app.
Lauch the chat app with addins or {.run [gptstudio_chat()](gptstudio::gptstudio_chat())}.")
cli::cli_text(
"No user configuration file found at {.file {user_config}}.
Using default configuration.
Change configuration settings in the chat app.
Lauch the chat app with addins or {.run [gptstudio_chat()](gptstudio::gptstudio_chat())}."
)
}
cli::cli_h2("Current Settings")
cli::cli_bullets(c(
Expand Down Expand Up @@ -204,9 +207,9 @@ gptstudio_sitrep <- function(verbose = TRUE) {
cli::cli_h3("Check Ollama for Local API connection")
ollama_is_available(verbose = TRUE)
cli::cli_h2("Getting help")
cli::cli_inform("See the {.href [gptstudio homepage](https://michelnivard.github.io/gptstudio/)} for getting started guides and package documentation. File an issue or contribute to the package at the {.href [GitHub repo](https://github.com/MichelNivard/gptstudio)}.")
cli::cli_inform("See the {.href [gptstudio homepage](https://michelnivard.github.io/gptstudio/)} for getting started guides and package documentation. File an issue or contribute to the package at the {.href [GitHub repo](https://github.com/MichelNivard/gptstudio)}.") # nolint
} else {
cli::cli_text("Run {.run [gptstudio_sitrep(verbose = TRUE)](gptstudio::gptstudio_sitrep(verbose = TRUE))} to check API connections.")
cli::cli_text("Run {.run [gptstudio_sitrep(verbose = TRUE)](gptstudio::gptstudio_sitrep(verbose = TRUE))} to check API connections.") # nolint
}
cli::cli_rule(left = "End of gptstudio configuration")
}
Expand Down
6 changes: 4 additions & 2 deletions R/mod_app.R
Original file line number Diff line number Diff line change
Expand Up @@ -145,11 +145,13 @@ html_dependencies <- function() {
#'
#' @return A Translator from `shiny.i18n::Translator`
create_translator <- function(language = getOption("gptstudio.language")) {
translator <- shiny.i18n::Translator$new(translation_json_path = system.file("translations/translation.json", package = "gptstudio"))
translator <- shiny.i18n::Translator$new(
translation_json_path = system.file("translations/translation.json", package = "gptstudio")
)
supported_languages <- translator$get_languages()

if (!language %in% supported_languages) {
cli::cli_abort("Language {.val {language}} is not supported. Must be one of {.val {supported_languages}}")
cli::cli_abort("Language {.val {language}} is not supported. Must be one of {.val {supported_languages}}") # nolint
}

translator$set_translation_language(language)
Expand Down
5 changes: 1 addition & 4 deletions R/mod_chat.R
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ mod_chat_ui <- function(id, translator = create_translator()) {
welcomeMessageOutput(ns("welcome")),
uiOutput(ns("history")),
streamingMessageOutput(ns("streaming")),
# uiOutput(ns("streaming"))
),
div(
class = "mt-auto",
Expand Down Expand Up @@ -73,8 +72,6 @@ mod_chat_server <- function(id,
moduleServer(id, function(input, output, session) {
# Session data ----

ns <- session$ns

rv <- reactiveValues()
rv$reset_welcome_message <- 0L
rv$reset_streaming_message <- 0L
Expand Down Expand Up @@ -127,7 +124,7 @@ mod_chat_server <- function(id,

response <- gptstudio_request_perform(
skeleton = skeleton,
shinySession = session
shiny_session = session
) %>%
gptstudio_response_process()

Expand Down
8 changes: 6 additions & 2 deletions R/mod_history.R
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
mod_history_ui <- function(id) {
ns <- NS(id)
conversation_history <- read_conversation_history()

btn_new_chat <- actionButton(
inputId = ns("new_chat"),
Expand Down Expand Up @@ -96,7 +95,12 @@ mod_history_server <- function(id, settings) {
file.remove(conversation_history_file)
removeModal(session)

showNotification("Deleted all conversations", type = "warning", duration = 3, session = session)
showNotification(
ui = "Deleted all conversations",
type = "warning",
duration = 3,
session = session
)
rv$reload_conversation_history <- rv$reload_conversation_history + 1L
}) %>%
bindEvent(input$confirm_delete_all)
Expand Down
12 changes: 6 additions & 6 deletions R/mod_settings.R
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ mod_settings_ui <- function(id, translator = create_translator()) {
selectInput(
inputId = ns("skill"),
label = "Programming Skill", # TODO: update translator
# label = translator$t("Programming Skill"),
choices = c("beginner", "intermediate", "advanced", "genius"),
selected = getOption("gptstudio.skill"),
width = "100%"
Expand Down Expand Up @@ -132,10 +131,6 @@ mod_settings_server <- function(id) {
rv$modify_session_settings <- 0L
rv$create_new_chat <- 0L

api_services <- utils::methods("gptstudio_request_perform") %>%
stringr::str_remove(pattern = "gptstudio_request_perform.gptstudio_request_") %>%
purrr::discard(~ .x == "gptstudio_request_perform.default")

observe({
msg <- glue::glue("Fetching models for {input$service} service...")
showNotification(ui = msg, type = "message", duration = 3, session = session)
Expand Down Expand Up @@ -170,7 +165,12 @@ mod_settings_server <- function(id) {
selected = if (default_model %in% models) default_model else models[1]
)
} else {
showNotification(ui = "No models available", duration = 3, type = "error", session = session)
showNotification(
ui = "No models available",
duration = 3,
type = "error",
session = session
)
cli::cli_alert_danger("No models available")

updateSelectInput(
Expand Down
Loading

0 comments on commit 8c5e583

Please sign in to comment.