Trouble with pulling Option Data IBrokers - r

I have been pulling option data from Interactive brokers for the past 2 months. Since the new year when I go to pull option data, I get this message "Error in tmp[[id]] : subscript out of bounds"
Here is the code.
tws = twsConnect(port = 7498)
isConnected(tws)
snapShot <- function (twsCon, eWrapper, timestamp, file, playback = 1, ...)
{
if (missing(eWrapper))
eWrapper <- eWrapper()
names(eWrapper$.Data$data) <- eWrapper$.Data$symbols
con <- twsCon[[1]]
if (inherits(twsCon, "twsPlayback")) {
sys.time <- NULL
while (TRUE) {
if (!is.null(timestamp)) {
last.time <- sys.time
sys.time <- as.POSIXct(strptime(paste(readBin(con,
character(), 2),
collapse = " "), timestamp))
if (!is.null(last.time)) {
Sys.sleep((sys.time - last.time) * playback)
}
curMsg <- .Internal(readBin(con, "character",
1L, NA_integer_, TRUE, FALSE))
if (length(curMsg) < 1)
next
processMsg(curMsg, con, eWrapper, format(sys.time,
timestamp), file, ...)
}
else {
curMsg <- readBin(con, character(), 1)
if (length(curMsg) < 1)
next
processMsg(curMsg, con, eWrapper, timestamp,
file, ...)
if (curMsg == .twsIncomingMSG$REAL_TIME_BARS)
Sys.sleep(5 * playback)
}
}
}
else {
while (TRUE) {
socketSelect(list(con), FALSE, NULL)
curMsg <- .Internal(readBin(con, "character", 1L,
NA_integer_, TRUE, FALSE))
if (!is.null(timestamp)) {
processMsg(curMsg, con, eWrapper, format(Sys.time(),
timestamp), file, ...)
}
else {
processMsg(curMsg, con, eWrapper, timestamp,
file, ...)
}
if (!any(sapply(eWrapper$.Data$data, is.na)))
return(do.call(rbind, lapply(eWrapper$.Data$data,
as.data.frame)))
}
}
}
opt_put = twsOption("", expiry ="20190125", symbol = "AAPL", right = 'P',
strike = "148")
reqMktData(tws, Contract = opt_put, eventWrapper = eWrapper.data(1),
CALLBACK = snapShot)
# Error in `*tmp*`[[id]] : subscript out of bounds
The API team at interactive brokers could not help and had no experience with R. Thanks in advance guys. I would also like to mention, I have no problem pulling equity data

Related

R Script to Download Data from Inconsistent Website

I'm newish to R (and programming in general) and am automating myself out of a job ;)
I have written a script that (1) takes a CSV file of "API numbers," (2) finds and downloads an HTML table for each API number, and (3) saves the info as a CSV table. It works - it's just not pretty. One of the problems is the website I'm downloading the data from gives a 500 Internal Server Error sometimes. In order to address the website's sporadic availability, I have built some real ugly nested if statements that delay the script for increasing amounts of time. It's overkill, but I don't want the download to fail when I leave it overnight.
I'm looking for feedback on the workaround download delay. Is there a better way to do this? Is there a way to tell R to keep trying the download until it's successful?
This script will download data and save each API number as a separate CSV. The example list of API numbers has 60. You can find it here: https://www.dropbox.com/s/fwvcxun8hr0xy4n/API%20List.csv?dl=0
Thanks in advance!
######################### User-Defined Parameters ##########################################
### Specify where the API list is and where to download temp data
welllist = ".../API List.csv" # each API will have a seperate CSV in this directory as well
tempdata = ".../tempdata.txt"
######################### Get API List and Parse API ##########################################
wells = read.csv(file = welllist, header = 1, sep = ",")
colnum = 1
rownum = nrow(wells)
API = data.frame(1:rownum,"A","B","C",stringsAsFactors = F)
colnames(API) = c("number", "type","county","sequence")
for (i in 1:rownum) {
current = toString(wells[i,colnum])
dashloc = as.data.frame(gregexpr(pattern = "-", text = current))
type = substr(x = current, start = 0, stop = dashloc[1,1]-1)
if (type != "05") {print(paste("WARNING! API DOES NOT BEGIN WITH 05", "- WELL", i,wells[i,2]))}
county = substr(x = current, start = dashloc[1,1]+1, stop = dashloc[2,1]-1)
sequence = substr(x = current, start = dashloc[2,1]+1, stop = nchar(current))
API$type[i] = type
API$county[i] = county
API$sequence[i] = sequence
}
######################### Download the Data ##########################################
end = nrow(API)
for (i in 1:end) {
county = API$county[i]
sequence = API$sequence[i]
dataurl = paste("http://cogcc.state.co.us/production/?&apiCounty=",county,"&apiSequence=",sequence,sep = "")
### ***** U-G-L-Y Retry Data Download if Server Error or if File Size is Too Small ***** ###
err <- try(download.file(url = dataurl, destfile = tempdata, quiet = F, mode = "wb"))
if (class(err) == "try-error" || file.size(tempdata) < 300000) {
Sys.sleep(2)
err <- try(download.file(url = dataurl, destfile = tempdata, quiet = F, mode = "wb"))
}
if (class(err) == "try-error" || file.size(tempdata) < 300000) {
Sys.sleep(4)
err <- try(download.file(url = dataurl, destfile = tempdata, quiet = F, mode = "wb"))
}
if (class(err) == "try-error" || file.size(tempdata) < 300000) {
Sys.sleep(8)
err <- try(download.file(url = dataurl, destfile = tempdata, quiet = F, mode = "wb"))
}
if (class(err) == "try-error" || file.size(tempdata) < 300000) {
Sys.sleep(16)
err <- try(download.file(url = dataurl, destfile = tempdata, quiet = F, mode = "wb"))
}
if (class(err) == "try-error" || file.size(tempdata) < 300000) {
Sys.sleep(32)
err <- try(download.file(url = dataurl, destfile = tempdata, quiet = F, mode = "wb"))
}
if (class(err) == "try-error" || file.size(tempdata) < 300000) {
Sys.sleep(64)
err <- try(download.file(url = dataurl, destfile = tempdata, quiet = F, mode = "wb"))
}
if (class(err) == "try-error" || file.size(tempdata) < 300000) {
Sys.sleep(128)
err <- try(download.file(url = dataurl, destfile = tempdata, quiet = F, mode = "wb"))
}
if (class(err) == "try-error" || file.size(tempdata) < 300000) {
Sys.sleep(256)
err <- try(download.file(url = dataurl, destfile = tempdata, quiet = F, mode = "wb"))
}
if (class(err) == "try-error" || file.size(tempdata) < 300000) {
Sys.sleep(512)
err <- try(download.file(url = dataurl, destfile = tempdata, quiet = F, mode = "wb"))
}
if (class(err) == "try-error" || file.size(tempdata) < 300000) {
Sys.sleep(1024)
err <- try(download.file(url = dataurl, destfile = tempdata, quiet = F, mode = "wb"))
}
if (class(err) == "try-error" || file.size(tempdata) < 300000) {
write.csv(x = paste("Error downloading", sequence, "at", Sys.time()), file = paste(dirname(wells),"errorlog.txt",sep = "/"))
next
}
### Save the CSV ###
write.csv(x = tempdata, file = paste(dirname(welllist),"/",sequence,"_production.csv",sep = ""))
}
Periodically, the website breaks and gives: HTTP status was '500 Internal Server Error'

Error in textConnection(): all connections are in use

I have read most of the posts concerning an error of this type but neither applies to my case. I am new in R, working on an assignment for school based on Nolan and Lang's book Data Science Case Studies in R. I am working on using stats to identify spam, url for the code can be found here, which require working with files from http://spamassassin.apache.org/old/publiccorpus/ (which are pretty big)
Now the problem I am facing is the following (just posting the chunks of code where I have encountered the issue):
sampleSplit = lapply(sampleEmail, splitMessage)
processHeader = function(header)
{
# modify the first line to create a key:value pair
header[1] = sub("^From", "Top-From:", header[1])
headerMat = read.dcf(textConnection(header), all = TRUE)
headerVec = unlist(headerMat)
dupKeys = sapply(headerMat, function(x) length(unlist(x)))
names(headerVec) = rep(colnames(headerMat), dupKeys)
return(headerVec)
}
headerList = lapply(sampleSplit,
function(msg) {
processHeader(msg$header)} )
contentTypes = sapply(headerList, function(header)
header["Content-Type"])
names(contentTypes) = NULL
contentTypes
hasAttach = grep("^ *multi", tolower(contentTypes))
hasAttach
boundaries = getBoundary(contentTypes[ hasAttach ])
boundaries
boundary = boundaries[9]
body = sampleSplit[[15]]$body
bString = paste("--", boundary, sep = "")
bStringLocs = which(bString == body)
bStringLocs
eString = paste("--", boundary, "--", sep = "")
eStringLoc = which(eString == body)
eStringLoc
diff(c(bStringLocs[-1], eStringLoc))
### This code has mistakes in it - and we fix them later!
processAttach = function(body, contentType){
boundary = getBoundary(contentType)
bString = paste("--", boundary, "$", sep = "")
bStringLocs = grep(bString, body)
eString = paste("--", boundary, "--$", sep = "")
eStringLoc = grep(eString, body)
n = length(body)
if (length(eStringLoc) == 0) eStringLoc = n + 1
if (length(bStringLocs) == 1) attachLocs = NULL
else attachLocs = c(bStringLocs[-1], eStringLoc)
msg = body[ (bStringLocs[1] + 1) : min(n, (bStringLocs[2] - 1),
na.rm = TRUE)]
if ( eStringLoc < n )
msg = c(msg, body[ (eStringLoc + 1) : n ])
if ( !is.null(attachLocs) ) {
attachLens = diff(attachLocs, lag = 1)
attachTypes = mapply(function(begL, endL) {
contentTypeLoc = grep("[Cc]ontent-[Tt]ype", body[ (begL + 1) : (endL - 1)])
contentType = body[ begL + contentTypeLoc]
contentType = gsub('"', "", contentType )
MIMEType = sub(" *Content-Type: *([^;]*);?.*", "\\1", contentType)
return(MIMEType)
}, attachLocs[-length(attachLocs)], attachLocs[-1])
}
if (is.null(attachLocs)) return(list(body = msg, attachInfo = NULL) )
else return(list(body = msg,
attachDF = data.frame(aLen = attachLens,
aType = attachTypes,
stringsAsFactors = FALSE)))
}
bodyList = lapply(sampleSplit, function(msg) msg$body)
attList = mapply(processAttach, bodyList[hasAttach],
contentTypes[hasAttach],
SIMPLIFY = FALSE)
lens = sapply(attList, function(processedA)
processedA$attachDF$aLen)
head(lens)
attList[[2]]$attachDF
body = bodyList[hasAttach][[2]]
length(body)
body[35:45]
processAttach = function(body, contentType){
n = length(body)
boundary = getBoundary(contentType)
bString = paste("--", boundary, sep = "")
bStringLocs = which(bString == body)
eString = paste("--", boundary, "--", sep = "")
eStringLoc = which(eString == body)
if (length(eStringLoc) == 0) eStringLoc = n
if (length(bStringLocs) <= 1) {
attachLocs = NULL
msgLastLine = n
if (length(bStringLocs) == 0) bStringLocs = 0
} else {
attachLocs = c(bStringLocs[ -1 ], eStringLoc)
msgLastLine = bStringLocs[2] - 1
}
msg = body[ (bStringLocs[1] + 1) : msgLastLine]
if ( eStringLoc < n )
msg = c(msg, body[ (eStringLoc + 1) : n ])
if ( !is.null(attachLocs) ) {
attachLens = diff(attachLocs, lag = 1)
attachTypes = mapply(function(begL, endL) {
CTloc = grep("^[Cc]ontent-[Tt]ype", body[ (begL + 1) : (endL - 1)])
if ( length(CTloc) == 0 ) {
MIMEType = NA
} else {
CTval = body[ begL + CTloc[1] ]
CTval = gsub('"', "", CTval )
MIMEType = sub(" *[Cc]ontent-[Tt]ype: *([^;]*);?.*", "\\1", CTval)
}
return(MIMEType)
}, attachLocs[-length(attachLocs)], attachLocs[-1])
}
if (is.null(attachLocs)) return(list(body = msg, attachDF = NULL) )
return(list(body = msg,
attachDF = data.frame(aLen = attachLens,
aType = unlist(attachTypes),
stringsAsFactors = FALSE)))
}
readEmail = function(dirName) {
# retrieve the names of files in directory
fileNames = list.files(dirName, full.names = TRUE)
# drop files that are not email
notEmail = grep("cmds$", fileNames)
if ( length(notEmail) > 0) fileNames = fileNames[ - notEmail ]
# read all files in the directory
lapply(fileNames, readLines, encoding = "latin1")
}
processAllEmail = function(dirName, isSpam = FALSE)
{
# read all files in the directory
messages = readEmail(dirName)
fileNames = names(messages)
n = length(messages)
# split header from body
eSplit = lapply(messages, splitMessage)
rm(messages)
# process header as named character vector
headerList = lapply(eSplit, function(msg)
processHeader(msg$header))
# extract content-type key
contentTypes = sapply(headerList, function(header)
header["Content-Type"])
# extract the body
bodyList = lapply(eSplit, function(msg) msg$body)
rm(eSplit)
# which email have attachments
hasAttach = grep("^ *multi", tolower(contentTypes))
# get summary stats for attachments and the shorter body
attList = mapply(processAttach, bodyList[hasAttach],
contentTypes[hasAttach], SIMPLIFY = FALSE)
bodyList[hasAttach] = lapply(attList, function(attEl)
attEl$body)
attachInfo = vector("list", length = n )
attachInfo[ hasAttach ] = lapply(attList,
function(attEl) attEl$attachDF)
# prepare return structure
emailList = mapply(function(header, body, attach, isSpam) {
list(isSpam = isSpam, header = header,
body = body, attach = attach)
},
headerList, bodyList, attachInfo,
rep(isSpam, n), SIMPLIFY = FALSE )
names(emailList) = fileNames
invisible(emailList)
}
Everything runs fine right up to:
emailStruct = mapply(processAllEmail, fullDirNames,
isSpam = rep( c(FALSE, TRUE), 3:2))
emailStruct = unlist(emailStruct, recursive = FALSE)
sampleStruct = emailStruct[ indx ]
save(emailStruct, file="emailXX.rda")
I get the error Error in textConnection(header) : all connections are in use, therefore it doesn't recognize "emailStruct", which I need later on. I seriously don't know how to overcome it so that I can continue with the rest of the code, which requires some of these variables to work.
When you run textConnection() you are opening a text connection, but you are never closing it. Try closing it explicitly after you read from it
read.dcf(tc<-textConnection(header), all = TRUE)
close(tc)

Oracle R Enterprise: Error ORE object has no unique key

I need to make a function which call Oracle R Enterprise ore.corr function and output result as a data.frame.
My R code here:
f_sts_corelation =
function(dat, target_col="",corr_type="spearman",group_by="")
{
v_target_col = gsub("\n","",target_col, fixed = TRUE);
v_target_col_list = "";
library("gdata");
for (s_name in strsplit(v_target_col,",")[[1]])
{
n_pos = regexpr(".",s_name,fixed = TRUE);
if (n_pos > 0)
{
s_name = substring(s_name,n_pos+1);
}
s_name = gsub("\"","",s_name, fixed = TRUE);
if (is.numeric(dat[,trim(s_name)]))
{
if (nchar(v_target_col_list)== 0)
{
v_target_col_list = trim(s_name)
}
else
{
v_target_col_list =paste(v_target_col_list,",",trim(s_name))
}
}
}
ore.data = ore.push(dat)
v_id = c()
v_group=c()
v_row = c()
v_col = c()
v_statistic = c()
v_pvalue = c()
v_df = c()
#group_by = ""
s_group_by = trim(gsub("\n","",group_by, fixed = TRUE));
if (nchar(s_group_by) > 0)
{
n_pos = regexpr(".",s_group_by,fixed = TRUE);
if (n_pos > 0)
{
s_group_by = substring(s_group_by,n_pos+1);
}
s_group_by = trim(gsub("\"","",s_group_by, fixed = TRUE));
ore.corr.res = ore.corr(ore.data,var = v_target_col_list, group.by = s_group_by)
for (i in 1:length(ore.corr.res))
{
if (i == 1)
{
v_group = rep(names(ore.corr.res[i]),length(ore.corr.res[[i]]$ROW))
v_row = as.vector(ore.corr.res[[i]]$ROW)
v_col = as.vector(ore.corr.res[[i]]$COL)
v_statistic = as.vector(ore.corr.res[[i]][,3])
v_pvalue = as.vector(ore.corr.res[[i]][,4])
v_df = as.vector(ore.corr.res[[i]][,5])
}
else
{
v_group = c(v_group,rep(names(ore.corr.res[i]),length(ore.corr.res[[i]]$ROW)))
v_row = c(v_row,as.vector(ore.corr.res[[i]]$ROW))
v_col = c(v_col,as.vector(ore.corr.res[[i]]$COL))
v_statistic = c(v_statistic,as.vector(ore.corr.res[[i]][,3]))
v_pvalue = c(v_pvalue,as.vector(ore.corr.res[[i]][,4]))
v_df = c(v_df,as.vector(ore.corr.res[[i]][,5]))
}
}
}
else if(nchar(s_group_by) == 0)
{
ore.corr.res = ore.corr(ore.data,var = v_target_col_list)
v_group = rep(" ",length(ore.corr.res$ROW))
v_row = as.vector(ore.corr.res$ROW)
v_col = as.vector(ore.corr.res$COL)
v_statistic = as.vector(ore.corr.res[,3])
v_pvalue = as.vector(ore.corr.res[,4])
v_df = as.vector(ore.corr.res[,5])
}
df_res = data.frame(Group_by = v_group,
Row = v_row,
Col = v_col,
Statistic = v_statistic,
P_Value = v_pvalue,
DF = v_df)
}
After that, I run the function by following script:
dat = iris;
corr_type="spearman";
V_target_col= '"IRIS_N$10002"."Sepal.Length",
"IRIS_N$10002"."Sepal.Width",
"IRIS_N$10002"."Petal.Width",
"IRIS_N$10002"."Petal.Length"';
group_by =
'
"IRIS_N$10002"."Species"
'
df_result = f_sts_corelation(dat,target_col = target_col, group_by = group_by)
But following error happen.
Error: ORE object has no unique key
I have tried to run each R command inside my function step by step and I sure that the Error happen from the last R command:
df_res = data.frame(Group_by = v_group,
Row = v_row,
Col = v_col,
Statistic = v_statistic,
P_Value = v_pvalue,
DF = v_df)
I don't know how to avoid this error.

R Wait until system executable is finished

I am trying to run a series of input files (located in my C:/GenSoftware/Colony/datFiles/ directory) through an executable Colony2.exe that is located in my C:/GenSoftware/Colony/ directory. I attempt to rename file 1, copy it to the same directory as the executable, run Colony2.exe using the run.colony function (pasted at the bottom) in the package rcolony, delete the file, and proceed to file 2.
However, the code attempts to continue before the executable is finished. How can I get my loop to wait until the Colony2.exe is finished before it proceeds to the next line of code and then re-run the loop. run.colony invokes the system command (pasted at bottom).
Here is my code thus far...
rm(list=ls())
setwd("C:/GenSoftware/Colony/")
getwd()
datFiles <- list.files("datFiles")
library(rcolony)
d <- 0
for (d in 1:length(datFiles))
{
d <- d+1
setwd("C:/GenSoftware/Colony/datFiles/")
file.rename(datFiles[d],"Colony2.DAT")
file.copy(from = "C:/GenSoftware/Colony/datFiles/Colony2.DAT",to = "C:/GenSoftware/Colony/")
datPath <- "C:/GenSoftware/Colony/Colony2.DAT"
setwd("C:/GenSoftware/Colony/")
run.colony(colonyexecpath = "Colony2.exe", datPath, wait = TRUE, monitor = FALSE)
unlink(x = "C:/GenSoftware/Colony/Colony2.DAT", recursive = FALSE, force = TRUE)
setwd("C:/GenSoftware/Colony/datFiles/")
file.rename("Colony2.DAT",datFiles[d])
}
######## END OF MY CODE, START OF run.colony CODE
run.colony
function (colonyexecpath = "prompt", datfilepath = "prompt",
wait = FALSE, monitor = TRUE)
{
if (colonyexecpath == "prompt") {
cat("Please click to select your Colony2 executable (probably called Colony2.exe or Colony2).\n\n")
flush.console()
colonyexecpath <- file.choose()
}
if (datfilepath == "prompt") {
cat("Please click to select your DAT file.\n\n")
flush.console()
datfilepath <- file.choose()
}
datadir <- sub("([A-Z a-z0-9:/\\]+[/\\]+)([A-Z.a-z0-9]+)",
"\\1", datfilepath)
filename <- sub("([A-Z a-z0-9:/\\]+[/\\]+)([A-Z.a-z0-9]+)",
"\\2", datfilepath)
colonyexec <- sub("([A-Z a-z0-9:/\\]+[/\\]+)([A-Z.a-z0-9]+)",
"\\2", colonyexecpath)
current.wd <- getwd()
x <- readLines(paste(datadir, filename, sep = ""), n = 2)
outputfilename <- substring(x[2], 1, 20)
outputfilename <- sub("^[\t\n\f\r ]*", "", outputfilename)
outputfilename <- sub("[\t\n\f\r ]*$", "", outputfilename)
outputfilename
if (file.exists(paste(datadir, outputfilename, ".MidResult",
sep = ""))) {
stop("\nThere are output files already in the directory. \nColony has already run. \nTry deleting (or moving) these files and starting again.\n")
}
setwd(datadir)
if (monitor == TRUE & wait == TRUE) {
stop("If you want to monitor the output, you must set wait as FALSE. Otherwise you cannot run other functions in the same R console.")
}
cat("Be aware: this may take several minutes, hours, or even weeks to run, depending on the settings used.\n")
platform <- .Platform
if (platform$OS.type == "unix") {
if (file.exists("Colony2") == FALSE) {
system(paste("cp", colonyexecpath, datadir, sep = " "))
}
if (filename != "Colony2.DAT") {
system(paste("mv", paste(datadir, filename, sep = ""),
paste(datadir, "Colony2.DAT", sep = ""), sep = " "))
}
if (filename != "Colony2.DAT") {
system(paste("cp", paste(datadir, "Colony2.DAT",
sep = ""), paste(datadir, filename, sep = ""),
sep = " "))
}
cat("#! /bin/sh\necho Running Colony2\nexport G95_MEM_SEGMENTS=0\n./Colony2",
file = paste(datadir, "Colony2.sh", sep = ""), append = FALSE)
if (monitor == TRUE) {
system("sh Colony2.sh | tee temp.txt", wait = wait)
}
else {
system("sh Colony2.sh", wait = wait)
}
system(paste("rm", colonyexec))
if (file.exists("Colony2.sh")) {
system(paste("rm Colony2.sh"))
}
else {
}
if (filename != "Colony2.DAT") {
system("rm Colony2.DAT")
}
}
else {
if (platform$OS.type == "windows") {
shell(paste("copy", colonyexecpath, datadir, sep = " "))
if (filename != "Colony2.DAT") {
shell(paste("rename", paste(datadir, filename,
sep = ""), paste(datadir, "Colony2.DAT", sep = ""),
sep = " "))
}
shell.exec("Colony2.exe")
if (filename != "Colony2.DAT") {
shell(paste("rename", paste(datadir, "Colony2.DAT",
sep = ""), paste(datadir, filename, sep = ""),
sep = " "))
}
shell("del Colony2.exe")
}
else {
stop(paste("This function is not correctly configured to run on",
platform$OS.type, "systems."))
}
}
setwd(current.wd)
}

IBrokers reqMktData, how to add timeout to the callback function?

I have been using a modified snapShot function from the great IBrokers package to get "Last" prices from IB and it has been working great for liquid stocks.
The call I make is eg.
reqMktData(tws, twsSTK("AAPL"), eventWrapper=eWrapper.data.Last(1),CALLBACK=snapShot)
The problem arises when trying to retrieve very illiquid stocks or options. I would therefore need to add a timeout to the snapShot function. How and where could the timeout be added?
The code with the snapShot function:
library(IBrokers)
tws <- twsConnect()
eWrapper.data.Last <- function(n) {
eW <- eWrapper(NULL) # use basic template
eW$assign.Data("data", rep(list(structure(.xts(matrix(rep(NA_real_,2),nc=2),0),
.Dimnames=list(NULL,c("LastSize","Last")))),n))
eW$tickPrice <- function(curMsg, msg, timestamp, file, ...)
{
tickType = msg[3]
msg <- as.numeric(msg)
id <- msg[2] #as.numeric(msg[2])
data <- eW$get.Data("data") #[[1]] # list position of symbol (by id == msg[2])
attr(data[[id]],"index") <- as.numeric(Sys.time())
nr.data <- NROW(data[[id]])
if(tickType == .twsTickType$LAST) {
data[[id]][nr.data,2] <- msg[4]
}
eW$assign.Data("data", data)
c(curMsg, msg)
}
eW$tickSize <- function(curMsg, msg, timestamp, file, ...)
{
data <- eW$get.Data("data")
tickType = msg[3]
msg <- as.numeric(msg)
id <- as.numeric(msg[2])
attr(data[[id]],"index") <- as.numeric(Sys.time())
nr.data <- NROW(data[[id]])
if(tickType == .twsTickType$LAST_SIZE) {
data[[id]][nr.data,1] <- msg[4]
}
eW$assign.Data("data", data)
c(curMsg, msg)
}
return(eW)
}
snapShot <- function (twsCon, eWrapper, timestamp, file, playback = 1, ...)
{
if (missing(eWrapper))
eWrapper <- eWrapper()
names(eWrapper$.Data$data) <- eWrapper$.Data$symbols
con <- twsCon[[1]]
if (inherits(twsCon, "twsPlayback")) {
sys.time <- NULL
while (TRUE) {
if (!is.null(timestamp)) {
last.time <- sys.time
sys.time <- as.POSIXct(strptime(paste(readBin(con,
character(), 2), collapse = " "), timestamp))
if (!is.null(last.time)) {
Sys.sleep((sys.time - last.time) * playback)
}
curMsg <- .Internal(readBin(con, "character",
1L, NA_integer_, TRUE, FALSE))
if (length(curMsg) < 1)
next
processMsg(curMsg, con, eWrapper, format(sys.time,
timestamp), file, ...)
}
else {
curMsg <- readBin(con, character(), 1)
if (length(curMsg) < 1)
next
processMsg(curMsg, con, eWrapper, timestamp,
file, ...)
if (curMsg == .twsIncomingMSG$REAL_TIME_BARS)
Sys.sleep(5 * playback)
}
}
}
else {
while (TRUE) {
socketSelect(list(con), FALSE, NULL)
curMsg <- .Internal(readBin(con, "character", 1L,
NA_integer_, TRUE, FALSE))
if (!is.null(timestamp)) {
processMsg(curMsg, con, eWrapper, format(Sys.time(),
timestamp), file, ...)
}
else {
processMsg(curMsg, con, eWrapper, timestamp,
file, ...)
}
if (!any(sapply(eWrapper$.Data$data, is.na)))
return(do.call(rbind, lapply(eWrapper$.Data$data,
as.data.frame)))
}
}
}
You can use evalWithTimeout from the R.utils. I didn't test it, but I'm pretty sure wrapping evalWithTimeout around the while loop would achieve what you're after.
library(R.utils)
evalWithTimeout(
while (TRUE) {
socketSelect(list(con), FALSE, NULL)
curMsg <- .Internal(readBin(con, "character", 1L,
NA_integer_, TRUE, FALSE))
if (!is.null(timestamp)) {
processMsg(curMsg, con, eWrapper, format(Sys.time(),
timestamp), file, ...)
}
else {
processMsg(curMsg, con, eWrapper, timestamp,
file, ...)
}
if (!any(sapply(eWrapper$.Data$data, is.na)))
return(do.call(rbind, lapply(eWrapper$.Data$data,
as.data.frame)))
}, timeout=5, onTimeout="warning")

Resources