From 12868f58622c25f8252959911e469432af54cd15 Mon Sep 17 00:00:00 2001 From: samuel Calderon Date: Fri, 22 Mar 2024 15:47:46 -0500 Subject: [PATCH 1/2] get_available_models() is now a S3 generic --- R/mod_settings.R | 4 +- R/models.R | 101 +++++++++++++++++++++++++++++------------------ 2 files changed, 66 insertions(+), 39 deletions(-) diff --git a/R/mod_settings.R b/R/mod_settings.R index e233106d..edea7f81 100644 --- a/R/mod_settings.R +++ b/R/mod_settings.R @@ -152,7 +152,9 @@ mod_settings_server <- function(id) { showNotification(ui = msg, type = "message",duration = 3, session = session) models <- tryCatch({ - get_available_models(input$service) + input$service %>% + new_service() %>% + get_available_models() }, error = function(e) { showNotification( ui = cli::ansi_strip(e$message), diff --git a/R/models.R b/R/models.R index 9d88e362..e2148f84 100644 --- a/R/models.R +++ b/R/models.R @@ -8,43 +8,68 @@ #' @export #' #' @examples -#' get_available_endpoints() +#' get_available_models() get_available_models <- function(service) { - if (service == "openai") { - models <- - request_base("models") %>% - httr2::req_perform() %>% - httr2::resp_body_json() %>% - purrr::pluck("data") %>% - purrr::map_chr("id") - - models <- models %>% - stringr::str_subset("^gpt") %>% - stringr::str_subset("instruct", negate = TRUE) %>% - stringr::str_subset("vision", negate = TRUE) %>% - sort() - - idx <- which(models == "gpt-3.5-turbo") - models <- c(models[idx], models[-idx]) - return(models) - } else if (service == "huggingface") { - c("gpt2", "tiiuae/falcon-7b-instruct", "bigcode/starcoderplus") - } else if (service == "anthropic") { - c("claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-2.1", "claude-instant-1.2") - } else if (service == "azure_openai") { - "Using ENV variables" - } else if (service == "perplexity") { - c("sonar-small-chat", "sonar-small-online", "sonar-medium-chat", - "sonar-medium-online", "codellama-70b-instruct", "mistral-7b-instruct", - "mixtral-8x7b-instruct") - } else if (service == "ollama") { - if (!ollama_is_available()) stop("Couldn't find ollama in your system") - ollama_list() %>% - purrr::pluck("models") %>% - purrr::map_chr("name") - } else if (service == "cohere") { - c("command", "command-light", "command-nightly", "command-light-nightly") - } else if (service == "google") { - get_available_models_google() - } + UseMethod("get_available_models") } + +new_service <- function(service_name = character()) { + stopifnot(rlang::is_scalar_character(service_name)) + class(service_name) <- c(service_name, "gptstudio_service") + + service_name +} + +get_available_models.openai <- function(service) { + models <- + request_base("models") %>% + httr2::req_perform() %>% + httr2::resp_body_json() %>% + purrr::pluck("data") %>% + purrr::map_chr("id") + + models <- models %>% + stringr::str_subset("^gpt") %>% + stringr::str_subset("instruct", negate = TRUE) %>% + stringr::str_subset("vision", negate = TRUE) %>% + sort() + + idx <- which(models == "gpt-3.5-turbo") + models <- c(models[idx], models[-idx]) + return(models) +} + +get_available_models.huggingface <- function(service) { + c("gpt2", "tiiuae/falcon-7b-instruct", "bigcode/starcoderplus") +} + +get_available_models.anthropic <- function(service) { + c("claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-2.1", "claude-instant-1.2") +} + +get_available_models.azure_openai <- function(service) { + "Using ENV variables" +} + +get_available_models.perplexity <- function(service) { + c("sonar-small-chat", "sonar-small-online", "sonar-medium-chat", + "sonar-medium-online", "codellama-70b-instruct", "mistral-7b-instruct", + "mixtral-8x7b-instruct") +} + +get_available_models.ollama <- function(service) { + if (!ollama_is_available()) stop("Couldn't find ollama in your system") + ollama_list() %>% + purrr::pluck("models") %>% + purrr::map_chr("name") +} + +get_available_models.cohere <- function(service) { + c("command", "command-light", "command-nightly", "command-light-nightly") +} + +get_available_models.google <- function(service) { + get_available_models_google() +} + + From a3533f5e99c484285812511cb5157f2273b22a7e Mon Sep 17 00:00:00 2001 From: samuel Calderon Date: Fri, 29 Mar 2024 19:22:39 -0500 Subject: [PATCH 2/2] log model fetching in console --- R/mod_settings.R | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/R/mod_settings.R b/R/mod_settings.R index d0f28f22..bab5226a 100644 --- a/R/mod_settings.R +++ b/R/mod_settings.R @@ -139,7 +139,7 @@ mod_settings_server <- function(id) { observe({ msg <- glue::glue("Fetching models for {input$service} service...") showNotification(ui = msg, type = "message", duration = 3, session = session) - + cli::cli_alert_info(msg) models <- tryCatch( { get_available_models(input$service) @@ -151,12 +151,15 @@ mod_settings_server <- function(id) { type = "error", session = session ) + + cli::cli_alert_danger(e$message) return(NULL) } ) if (length(models) > 0) { showNotification(ui = "Got models!", duration = 3, type = "message", session = session) + cli::cli_alert_success("Got models!") default_model <- getOption("gptstudio.model") @@ -168,6 +171,7 @@ mod_settings_server <- function(id) { ) } else { showNotification(ui = "No models available", duration = 3, type = "error", session = session) + cli::cli_alert_danger("No models available") updateSelectInput( session = session,