Add logs in a R Plumber Api - r

I'm creating some APIs with R and Plumber. I configure the entrypoint.R like that
library(plumber)
library(logger)
# Create Log directory
log_dir <- "/api/logs"
if (!fs::dir_exists(log_dir)) fs::dir_create(log_dir)
log_appender(appender_tee(tempfile("plumber_", log_dir, ".log")))
convert_empty <- function(string) {
if (string == "") {
"-"
} else {
string
}
}
# Initiate
# pr <- plumber::plumb(here::here('plumber.R')) # local test
pr <- plumber::plumb(rev(commandArgs())[1]) # via Docker
args <- list(host = '0.0.0.0', port = 8000)
if (packageVersion('plumber') >= '1.0.0') { pr$setDocs(TRUE) } else { args$swagger <- TRUE }
# Create log hook
pr$registerHooks(
list(
preroute = function() {
# Start timer for log info
tictoc::tic()
},
postroute = function(req, res) {
end <- tictoc::toc(quiet = TRUE)
# Log details about the request and the response
# log_info('{convert_empty(req$REMOTE_ADDR)} "{convert_empty(req$HTTP_USER_AGENT)}" {convert_empty(req$HTTP_HOST)} {convert_empty(req$REQUEST_METHOD)} {convert_empty(req$PATH_INFO)} {convert_empty(res$status)} {round(end$toc - end$tic, digits = getOption("digits", 5))}')
log_info('{convert_empty(req$REMOTE_ADDR)}')
}
)
)
# Run entrypoint
do.call(pr$run, args)
I can't have the full logs because this line
log_info('{convert_empty(req$REMOTE_ADDR)} "{convert_empty(req$HTTP_USER_AGENT)}" {convert_empty(req$HTTP_HOST)} {convert_empty(req$REQUEST_METHOD)} {convert_empty(req$PATH_INFO)} {convert_empty(res$status)} {round(end$toc - end$tic, digits = getOption("digits", 5))}')
creates an error and the application stops to work.
Also, I want to add my custom logs in the API to track the flow for debug purposes. For example in the api
#* Ping
#* #get /ping
function() {
rtn <- jsonlite::unbox(data.frame(Status = TRUE))
return(rtn)
}
I want to add my log so I can check what functions are calling and what is happening in them.

I found a library called Log4R that allows me to add logs from my functions and save them on a file. Same examples here.
Install the package
install.packages("log4r")
Use the package
logger <- logger()
info(logger, "Located nearest gas station.")
#> INFO [2019-09-04 16:31:04] Located nearest gas station.
warn(logger, "Ez-Gas sensor network is not available.")
#> WARN [2019-09-04 16:31:04] Ez-Gas sensor network is not available.
debug(logger, "Debug messages are suppressed by default.")

Related

R - mocking API requests with `gh` package

I am trying to mock the output of a gh API request:
httptest2::with_mock_dir("gh", {
test_that("api works", {
gh::gh("GET /repos/r-lib/gh")
})
})
I am trying to set up testing for custom functions that routinely make API calls to GitHub and I am using gh to make these requests. I am following this tutorial as guidance: https://books.ropensci.org/http-testing/
However, no directory is created when this function is run. Is there anyway to capture the output of gh::gh and store it as a mock API return so that I can run my tests without needing GitHub authentication or even an internet connection?
httptest2 is specifically designed to test httr2 requests:
This package helps with writing tests for packages that use httr2
Unfortunately, gh uses httr:
Imports:
cli (>= 3.0.1),
gitcreds,
httr (>= 1.2),
ini,
jsonlite
This means that you can't directly use httptest2 with gh.
However, using gh source code, you can extract the parameters of the GET request sent to httr by gh:
gh_get <- function(endpoint, ..., per_page = NULL, .token = NULL, .destfile = NULL,
.overwrite = FALSE, .api_url = NULL, .method = "GET",
.limit = NULL, .accept = "application/vnd.github.v3+json",
.send_headers = NULL, .progress = TRUE, .params = list()) {
params <- c(list(...), .params)
params <- gh:::drop_named_nulls(params)
if (is.null(per_page)) {
if (!is.null(.limit)) {
per_page <- max(min(.limit, 100), 1)
}
}
if (!is.null(per_page)) {
params <- c(params, list(per_page = per_page))
}
req <- gh:::gh_build_request(
endpoint = endpoint, params = params,
token = .token, destfile = .destfile,
overwrite = .overwrite, accept = .accept,
send_headers = .send_headers,
api_url = .api_url, method = .method
)
req
}
req <- gh_get("GET /repos/r-lib/gh")
req
#$method
#[1] "GET"
#$url
#[1] "https://api.github.com/repos/r-lib/gh"
#$headers
# User-Agent Accept
# "https://github.com/r-lib/gh" "application/vnd.github.v3+json"
#$query
#NULL
#$body
#NULL
#$dest
#<request>
#Output: write_memory
This allows with the example you provided to use httr2 to send the same request :
library(httr2)
resp_httr2 <- request(base_url=req$url) %>%
req_perform() %>%
resp_body_json()
If you are mainly interested in json content, the results are the same, only the attributes differ :
resp_gh <- gh::gh("GET /repos/r-lib/gh")
all.equal(resp_gh,resp_httr2,check.attributes=FALSE)
#[1] TRUE
If you want to use httptest2, switching to httr2 would work:
with_mock_dir("gh", {
test_that("api works", {
resp <- request(base_url=req$url) %>%
req_perform() %>%
resp_body_json()
expect_equal(resp$full_name,"r-lib/gh")})
})
#Test passed 🎉
#[1] TRUE
Offline testing now works because gh\api.github.com directory was created by httptest2.
Maybe you can take inspiration from tests/testthat/test-mock-repos.R
res <- gh(
TMPL("/repos/{owner}/{repo}"),
owner = "gh-testing",
repo = test_repo,
.token = tt()
)
expect_equal(res$name, test_repo)
expect_equal(res$description, "Test repo for gh")
expect_equal(res$homepage, "https://github.com/r-lib/gh")
expect_false(res$private)
expect_false(res$has_issues)
expect_false(res$has_wiki)
A GET method would not create any directory.

Can future processes free themselves?

i have a script/workframe that calls a function with the future package. The function does not return anything, but instead starts an API-Call, something like this:
library(future)
future::plan("multisession")
future_get_function = function (msg) {
link <- ("http://127.0.0.1:7774/echo?")
message <- list(msg=msg)
httr::GET(link, query=message)
Sys.sleep(20)
}
future::future({
future_get_function(msg)
}, globals = list(msg=msg)}
library(plumber)
#* Echo back the input
#* #param msg The message to echo
#* #get /echo
function(msg="") {
print("received something")
list(msg = paste0("The message is: '", msg, "'"))
}
Now, from what ive seen, I need to free up the processes that are done with the value function afterwards, correct? Otherwise, if I have more calls then processers/tasks available, they are stuck?
So is there a function that i can insert at the end of future_get_function or somewhere in the future call (promises would also be fine), that frees the process automatically once its done, without me having to call value or similar?
Thanks in advance.
Future processes definitely free themselves after completion.
You could use then to follow up promises completion, and promise_all to check when all promises are finished :
library(future)
library(promises)
plan(multisession)
tic <- Sys.time()
future_get_function = function (msg,wait) {
#link <- ("http://127.0.0.1:7774/echo?")
#message <- list(msg=msg)
#httr::GET(link, query=message)
print(paste(msg,": launched after",round(difftime(Sys.time(),tic),1), 'seconds'))
Sys.sleep(wait)
invisible(msg)
}
f1 <- future(future_get_function('msg1',5))
f2 <- future(future_get_function('msg2',10))
f_after <- function(res) {
print(paste(res,"sent after",round(difftime(Sys.time(),tic),1), 'seconds'))}
f_failed <- function(res) {
print(paste("future failed"))}
f_end <- function(res) {
print(paste("All messages sent after",round(difftime(Sys.time(),tic),1), 'seconds'))}
promises::then(f1, onFulfilled = f_after, onRejected = f_failed)
promises::then(f2, onFulfilled = f_after, onRejected = f_failed)
f_wait_all <- promises::promise_all(f1,f2)
promises::then(f_wait_all, onFulfilled = f_end, onRejected = f_failed)
[1] "msg1 : launched after 0 seconds"
[1] "msg1 sent after 5.1 seconds"
[1] "msg2 : launched after 0.2 seconds"
[1] "msg2 sent after 10.2 seconds"
[1] "All messages sent after 10.2 seconds"

How to implement async calls in R HTTPUV startServer?

The R httpuv startServer function should support async processing in the call portion of the app parameter but I'm not able to get it to work. Does anyone know how to do this? The example below won't work but it shows the idea of what I'm trying to do, run each request (or for a specific page) async so a page can load while another request is processing.
startServer(
host,
port,
app = list(
call = function(req) {
req <- list(
"REQUEST_METHOD" = req$REQUEST_METHOD,
"SCRIPT_NAME" = req$SCRIPT_NAME,
"PATH_INFO" = req$PATH_INFO,
"QUERY_STRING" = req$QUERY_STRING,
"SERVER_NAME" = req$SERVER_NAME,
"SERVER_PORT" = req$SERVER_PORT,
"HEADERS" = req$HEADERS,
"rook.input" = req[["rook.input"]]$read_lines()
)
future_promise({
if(req$PATH_INFO %in% valid_dynamic_paths){
x <- eval(dynamic[[req$PATH_INFO]][req$REQUEST_METHOD])
list(
status = x[["status"]],
headers = x[["headers"]],
body = x[["body"]]
)
}else{
list(
status = 404,
headers = list(
'Content-Type' = 'text/html'
),
body = "404. Page not found."
)
}
})
},
staticPaths = static
)
)
I was able to get something similar to work. The code below shows the gist of it:
# fork a process for each new request
future::plan(future::multicore)
httpuv::runServer("0.0.0.0", 8080, list(
call = function(req) {
# `as.promise` is necessary, because `httpuv` is using `is.promise`
# under the hood to act differently. Unfortunately `is.promise` returns
# `FALSE` for a `future`.
promises::as.promise(
future::future({
Sys.sleep(5)
# Respond with HTTP 200 OK
list(
status = 200,
body = "Slept for 5 seconds",
headers = list(
# Content-Type is important, otherwise you will run
# into a "not compatible with STRSXP" error.
"content-type" = "text/plain"
)
)
})
)
}
))
Calling the server with to requests at (nearly) the same time, will show that you are waiting only for 5 seconds for both requests, and not 5 for one and 10 for the other.
time curl -s localhost:8080 > /dev/null &
time curl -s localhost:8080 > /dev/null
# After 5 seconds you should see output similar to the following:
# real 0m5.089s
# user 0m0.011s
# sys 0m0.010s
# real 0m5.112s
# user 0m0.020s
# sys 0m0.024s

Unable to start Julia connection on port 1023: all connections are in use

I am trying to run Julia function via R using XRJulia package. Below is my code snippet.
## start
library(XRJulia)
prevInterface <- XR::getInterface()
if (is.null(prevInterface)) {
ev <- RJulia(.makeNew = TRUE)
} else {
ev <- RJulia(.makeNew = FALSE)
}
juliaAddToPath(directory = '/home/.julia/lib/v0.6/', package = NULL, evaluator = ev)
runjl <- juliaEval('function sum(a, b)
c= a+b;
return c
end
')
runjl_function <- JuliaFunction(runjl)
sum_result <- runjl_function(1, 5)
XR::rmInterface(XR::getInterface())
## end
This code is working fine. But few times when I am running above code multiple times I am getting
error: Unable to start Julia connection on port 1023: all connections
are in use.
How to close all connections of Julia and what is the systematic way..? Please suggest.
You have the function ServerQuit() in the RJuliaConnect:
https://github.com/johnmchambers/XRJulia/blob/master/R/RJuliaConnect.R

Reconnect if connection fails using R

I have written small function but somehow does not work as expected.
I have connection to server and sometime the server is down so I cannot connect. The script is running in batch so I have to have it automatized.
The script should run the conn <- function(..) successfully (it means no error message) if not restart/re-check and restart again in approx. 1min time. This should run in loop until connection is established something like 12 hours. (approx.). The connection should be assigned to conn object so the object must return successful connection. (something like <Connection established, # 20180522 20:43:41 CET>
The function which does not work is here:
connect <- function(c) { message(paste("remaining run", c));
withRestarts(err <- tryCatch({ conn <- server.coonect(settings...) },
error=function(e) { invokeRestart("reconnect") }), reconnect = function() { message("re-connecting");
stopifnot(c > 0); for(i in 1:10) { Sys.sleep(6); cat(i) }; connect(c-1) }) }
connect(1000) # with approx. 1min sleep we get here over 12 hours running..
So the question is what is wrong and how to rewrite the function such it runs as expected. Thanks.
EDIT
It seems that the function should be:
connect <- function(c) { message(paste("remaining run", c));
withRestarts(err <- tryCatch({ server.coonect(settings...) },
error=function(e) { invokeRestart("reconnect") }), reconnect = function() { message("re-connecting");
stopifnot(c > 0); for(i in 1:10) { Sys.sleep(6); cat(i) } }) }
conn <- connect(1000)
EDIT 2
Here is comment for the above function I have tested:
I have tested the EDIT function by simulating the connection by first running the function without internet connection (now the function checks every 1:10 o 6sec, and after the function is running I connect to the internet, now I expect the function in next iteration pics up and connects to server if available...) what happens is that the function does not pick up the later possibility to connect...
If you only want to loop over the connection establishment this will work:
# simulate an instable connection
server.connect <- function(...) {
if (round(as.numeric(Sys.time())) %% 10 != 0) # about 90 % failed connections
stop("Connection error")
return(TRUE) # success
}
connect <- function(attempts = 1, sleep.seconds = 6) {
for (i in 1:attempts) {
res <- try(server.connect("my connection string"), silent = TRUE)
if (!("try-error" %in% class(res))) {
print("Connected...")
return(res)
}
print(paste0("Attempt #", i, " failed"))
Sys.sleep(sleep.seconds)
}
stop("Maximum number of connection attempts exceeded")
}
con <- connect(attempts = 10, sleep = 1)
Example execution log:
[1] "Attempt #1 failed"
[1] "Attempt #2 failed"
[1] "Attempt #3 failed"
[1] "Attempt #4 failed"
[1] "Attempt #5 failed"
[1] "Attempt #6 failed"
[1] "Connected..."

Resources