The question is sent to the chatbot server, and the answer is returned. The default chatbot server is ollama running locally, and the default model is codestral:latest. It was also tested with mistral:7b-instruct-v0.2-q6_K for an even smaller model.
ai_ask(
question,
context = NULL,
max_tokens = getOption("SciViews.chatbot.max_tokens",
Sys.getenv("SCIVIEWS_CHATBOT_MAX_TOKENS", 1000L)),
lang = getOption("data.io_lang", "en"),
url = getOption("SciViews.chatbot.url", Sys.getenv("SCIVIEWS_CHATBOT_URL",
"http://localhost:11434/api/chat")),
model = getOption("SciViews.chatbot.model", Sys.getenv("SCIVIEWS_CHATBOT_MODEL",
"codestral:latest")),
api_key = Sys.getenv("CONNECT_API_KEY", ""),
verbose = FALSE
)
ai_explain_term(term, lang = getOption("data.io_lang", "en"), ...)
ai_explain_function(
fun,
package = NULL,
lang = getOption("data.io_lang", "en"),
...
)
ai_explain_code(code, lang = getOption("data.io_lang", "en"), ...)
ai_explain_error(
code = NULL,
error = NULL,
lang = getOption("data.io_lang", "en"),
...
)
A character string with the question to ask.
An R object used as context (usually a data frame). This is not used yet, but it should be implemented in the future.
The maximum number of tokens to return in the answer. By default, it is 1000.
The language to use for the answer. Default is "en". You can also use "fr" for instance.
The URL of the chatbot server. Default is http://localhost:11434/api/chat
The LLM (large language model) to use. Default is codestral. Make sure you complies to its license (see https://mistral.ai/news/mistral-ai-non-production-license-mnpl/), or switch to another model that better suits your requirements.
The API key to use for connecting to the chatbot server (optional, see your server administrator).
Should more information be printed? FALSE
by default.
The term to describe.
Further arguments passed to ai_ask()
.
The R function to explain.
The R package that provides the function.
A small chunk of R code to explain.
The error message that R returns.
The answer is returned invisibly. The function is used for its side-effect of displaying the chatbot help page with the question, answer and examples
if (FALSE) { # \dontrun{
# Basic questions
ai_ask("Who are you?")
ai_ask("What is a chatbot?")
ai_ask("Qui es-tu ?")
ai_ask("Qu'est-ce que R ?")
ai_ask("Qu'est-ce que RStudio ?")
ai_ask("What is GitHub?")
ai_ask("Qu'est-ce que le R Markdown ?")
ai_ask("What is data science?")
# Inappropriate questions
ai_ask("Qu'est ce qu'un Acanthurus sp ?")
ai_ask("Raconte-moi une bonne blague.")
ai_ask("Va te faire voir !")
# Now, more complex questions
ai_ask("Comment filtrer un data frame en R?")
ai_ask("Write R code to filter a data frame.")
ai_ask("Que fait AIC()? Donne un exemple.")
ai_ask("Qu'est ce que l'hétéroscédasticite et comment la détecter dans une ANOVA à un facteur ?")
ai_ask("How to determine which model is better using an ANOVA for nested linear models?")
# Explain terms
ai_explain_term("True positive")
ai_explain_term("percentile", lang = "fr")
ai_explain_term("git push")
ai_explain_term("Quarto", lang = "fr")
ai_explain_term("boite à moustaches") # Language mismatch
ai_explain_term("boites à moustaches parallèles", lang = "fr")
# Explain R functions
ai_explain_function("mean")
ai_explain_function("fmean", lang = "fr")
ai_explain_function("collapse::fmean", lang = "fr")
ai_explain_function("glm", package = "stats", lang = "fr")
ai_explain_function("replace_na", "tidyr", lang = "fr") # collapse::replace_na() used instead!
try(ai_explain_function("nonexistingfunction")) # Error
try(ai_explain_function("apropos", package = "stats")) # Wrong package
try(ai_explain_function("apropos", package = "unknownpkg")) # Unknown package
# Explain R code
ai_explain_code("y <- c(1, 5, 7, NA, -Inf, 8)")
ai_explain_code(r"-[
mtcars |>
filter(cyl == 4) |>
summarise(mean_hp = mean(hp), median_disp = median(disp))]-")
ai_explain_code(r"-[
mtcars %>.%
sfilter(., cyl == 4) %>.%
ssummarise(., mean_hp = fmean(hp), median_disp = fmedian(disp))
]-")
ai_explain_code(r"-[
chart(data = trees, Volume ~ Girth) +
geom_point() +
geom_smooth()
]-")
ai_explain_error(error = "longer object length is not a multiple of shorter object length")
ai_explain_error(error = "Error: object 'mydata' not found")
ai_explain_error(error = "Error in lenght(1:10) : could not find function \"lenght\"", lang = "fr")
ai_explain_error(code = r"-[y <- c(1, 5, 7, NA, -Inf, 8, )]-", lang = "fr")
ai_explain_error(code = r"-[trees %>.% filter(Girth > 10))]-", lang = "fr")
ai_explain_error(code = r"-[
urchin <- read("urchin", package = "data.io")
]-", error = r"-[
Error in read("urchin", package = "data.io") :
dataset 'urchin' not found in package 'data.io'
]-", lang = "fr")
} # }