R time interval - r

I have time array;
time_array
[1] 08:05:02 08:46:08 09:13:54 09:51:21 10:07:31 11:34:12
[7] 11:45:28 12:18:14 12:26:05 12:58:35 13:11:09 14:14:29
[13] 15:28:47 15:56:47 16:22:06 16:40:15 18:15:00 19:01:39
[19] 20:36:22 21:16:27 21:26:11 21:43:06 23:51:47 00:18:35
[25] 01:02:37 02:08:14 02:52:09 04:02:01 04:37:46 05:41:58
[31] 05:59:22 07:22:07 08:32:47 09:13:23 09:39:01 10:19:35
[37] 11:53:18 12:05:53 12:18:42 12:33:04 13:16:19 13:37:34
[43] 13:54:14 14:31:39 14:44:46 15:26:23 16:03:25 17:21:44
[49] 18:00:24 19:10:50 19:44:01 20:55:16 21:09:06 22:02:01
[55] 22:53:00 23:19:43 23:59:01 01:39:20 02:28:35 02:41:08
[61] 02:57:42 03:03:45 03:10:13 04:57:44 06:00:59 07:51:50
[67] 08:10:19 08:23:24 08:32:52 09:37:47 10:43:16 11:17:07
I need to do some control on these values. For example;
if(time_array[i]>"08:05:00" | time_array[i]<"08:15:00")){
cond[i]<-1
}else{
cond[i]<-0
}
How can I do this?

We can do something like this in base R:
# Convert to POSIXct
times <- as.POSIXct(time_array, format = "%H:%M:%S")
# Store in data.frame and flag entries
df <- data.frame(
time = time_array,
flag = as.numeric(
times > as.POSIXct("08:05:00", format = "%H:%M:%S") &
times < as.POSIXct("08:15:00", format = "%H:%M:%S")))
head(df);
# time flag
#1 08:05:02 1
#2 08:46:08 0
#3 09:13:54 0
#4 09:51:21 0
#5 10:07:31 0
#6 11:34:12 0
Sample data
time_array <- c(
'08:05:02', '08:46:08', '09:13:54', '09:51:21', '10:07:31', '11:34:12',
'11:45:28', '12:18:14', '12:26:05', '12:58:35', '13:11:09', '14:14:29',
'15:28:47', '15:56:47', '16:22:06', '16:40:15', '18:15:00', '19:01:39',
'20:36:22', '21:16:27', '21:26:11', '21:43:06', '23:51:47', '00:18:35',
'01:02:37', '02:08:14', '02:52:09', '04:02:01', '04:37:46', '05:41:58',
'05:59:22', '07:22:07', '08:32:47', '09:13:23', '09:39:01', '10:19:35',
'11:53:18', '12:05:53', '12:18:42', '12:33:04', '13:16:19', '13:37:34',
'13:54:14', '14:31:39', '14:44:46', '15:26:23', '16:03:25', '17:21:44',
'18:00:24', '19:10:50', '19:44:01', '20:55:16', '21:09:06', '22:02:01',
'22:53:00', '23:19:43', '23:59:01', '01:39:20', '02:28:35', '02:41:08',
'02:57:42', '03:03:45', '03:10:13', '04:57:44', '06:00:59', '07:51:50',
'08:10:19', '08:23:24', '08:32:52', '09:37:47', '10:43:16', '11:17:07')

Related

Create a vector of years and weeks with ISO-8601 format in R

I would like to create a vector from one date to another displayed as YYYYWW which increments by week. It is important that the weeks are displayed in ISO-8601 standard format, here is a link for reference to the ISO-8601: https://www.epochconverter.com/weeks/2021
To my knowledge, the neatest way to generate weeks in this format is by using lubridates isoyear and isoweek. For example:
from_date <- paste0(lubridate::isoyear("2020-01-01"),lubridate::isoweek("2021-01-01"))
> from_date
> "202053"
to_date <- paste0(lubridate::isoyear(Sys.Date()),lubridate::isoweek(Sys.Date()))
> to_date
> "20222"
How can I create an array between from_date and to_date with an increment of one week keeping the ISO-8601 standard?
I have tried something like
YearWeek <- seq(from_date, to_date, by = "weeks")
but seq takes date classes in this instance which forces me out of the ISO-8601 standard.
For reference I would like my final result to look like this:
# Vector of YearWeek from 2021-01-01 to todays date incrmented by week
> YearWeek
> [1] 202053 202101 202102 202103 202104 202105 202106 202107 202108 202109 202110 202111 202112 202113 202114
[16] 202115 202116 202117 202118 202119 202120 202121 202122 202123 202124 202125 202126 202127 202128 202129
[31] 202130 202131 202132 202133 202134 202135 202136 202137 202138 202139 202140 202141 202142 202143 202144
[46] 202145 202146 202147 202148 202149 202150 202151 202152 202201
Please find below one possible solution to what you are looking for:
Reprex
Code
library(lubridate)
# Data
vec_date <- seq(date("2020-01-01"), date("2022-01-12"), by = "weeks")
# Convert data into isodate and isoweek
vec_date_iso <- paste0(lubridate::isoyear(vec_date),lubridate::isoweek(vec_date))
Output
vec_date_iso
#> [1] "20201" "20202" "20203" "20204" "20205" "20206" "20207" "20208"
#> [9] "20209" "202010" "202011" "202012" "202013" "202014" "202015" "202016"
#> [17] "202017" "202018" "202019" "202020" "202021" "202022" "202023" "202024"
#> [25] "202025" "202026" "202027" "202028" "202029" "202030" "202031" "202032"
#> [33] "202033" "202034" "202035" "202036" "202037" "202038" "202039" "202040"
#> [41] "202041" "202042" "202043" "202044" "202045" "202046" "202047" "202048"
#> [49] "202049" "202050" "202051" "202052" "202053" "20211" "20212" "20213"
#> [57] "20214" "20215" "20216" "20217" "20218" "20219" "202110" "202111"
#> [65] "202112" "202113" "202114" "202115" "202116" "202117" "202118" "202119"
#> [73] "202120" "202121" "202122" "202123" "202124" "202125" "202126" "202127"
#> [81] "202128" "202129" "202130" "202131" "202132" "202133" "202134" "202135"
#> [89] "202136" "202137" "202138" "202139" "202140" "202141" "202142" "202143"
#> [97] "202144" "202145" "202146" "202147" "202148" "202149" "202150" "202151"
#> [105] "202152" "20221" "20222"
Created on 2022-01-12 by the reprex package (v2.0.1)

Calculate a rolling percent change in R

I am trying to calculate a 20-day rolling percent change in R based off of a stock's closing price. Below is a sample of the most recent 100 days of closing price data. df$Close[1] is the most recent day, df$Close[2] is the previous day, and so on.
df$Close
[1] 342.94 346.22 346.18 335.24 330.45 334.20 325.45 333.79 334.90 341.66 333.74 334.49 329.75 329.82 330.56 322.81 317.87 306.84
[19] 310.39 310.60 324.46 338.03 333.12 341.06 337.25 341.01 345.30 338.69 340.77 342.96 347.56 340.89 327.74 327.64 335.37 338.62
[37] 341.13 335.85 331.62 328.08 329.98 323.57 316.92 312.22 315.81 328.69 324.61 341.88 340.78 339.99 335.34 324.76 328.53 324.54
[55] 323.77 325.45 330.05 329.22 333.64 332.96 326.23 343.01 339.39 339.61 340.65 353.58 352.96 345.96 343.21 357.48 355.70 364.72
[73] 373.06 373.92 376.53 376.51 378.69 378.00 377.57 382.18 376.26 375.28 382.05 379.38 380.66 372.63 364.38 368.39 365.51 363.35
[91] 359.37 355.12 355.45 358.45 366.56 363.18 362.65 359.96 361.13 361.61
Previously, I had used the following code to calculate the percent change:
PercChange(df, Var = 'Close', type = 'percent', NewVar = 'OneMonthChange', slideBy = 20)
which gave me the following output:
df$OneMonthChange
[1] 5.695617e-02 2.422862e-02 3.920509e-02 -1.706445e-02 -2.016308e-02 -1.997009e-02 -5.748624e-02 -1.446751e-02 -1.722569e-02
[10] -3.790530e-03 -3.976292e-02 -1.877438e-02 6.132910e-03 6.653644e-03 -1.434237e-02 -4.668950e-02 -6.818515e-02 -8.637785e-02
[19] -6.401906e-02 -5.327969e-02 -1.672829e-02 4.468894e-02 5.111700e-02 9.237076e-02 6.788892e-02 3.748213e-02 6.373802e-02
[28] -9.330759e-03 -2.934445e-05 8.735551e-03 3.644063e-02 4.966745e-02 -2.404651e-03 9.551981e-03 3.582790e-02 4.046705e-02
[37] 3.357067e-02 2.013851e-02 -6.054430e-03 -1.465642e-02 1.149496e-02 -5.667473e-02 -6.620702e-02 -8.065134e-02 -7.291942e-02
[46] -7.039425e-02 -8.032072e-02 -1.179327e-02 -7.080213e-03 -4.892581e-02 -5.723925e-02 -1.095635e-01 -1.193642e-01 -1.320603e-01
[55] -1.401216e-01 -1.356139e-01 -1.284428e-01 -1.290476e-01 -1.163493e-01 -1.287875e-01 -1.329666e-01 -8.598913e-02 -1.116608e-01
[64] -1.048289e-01 -1.051069e-01 -5.112310e-02 -3.134091e-02 -6.088656e-02 -6.101064e-02 -1.615522e-02 -1.021232e-02 2.703312e-02
[73] 4.954283e-02 4.315804e-02 2.719882e-02 3.670356e-02 4.422997e-02 5.011668e-02 4.552377e-02 5.688449e-02 3.507469e-02
[82] 3.391465e-02 6.444333e-02 8.011616e-02 8.157409e-02 4.583216e-02 1.691226e-02 -1.310009e-02 -6.253229e-03 -2.445900e-02
[91] -2.817816e-02 1.119052e-02 2.662970e-02 4.914242e-02 8.787654e-02 6.454450e-02 5.280729e-02 3.546875e-02 2.567525e-02
[100] 2.392683e-02
The PercChange function has now been deprecated and I need to find a new function to replace it. Essentially, I need a function that calculates the percent change of df$Close[1:20] (This would be Close of day 1 minus close of day 20, divided by close of day 20), then rolls to [2:21] for the next row, then [3:22],[4:23], and so on.
Thanks in advance!
A tidyverse approach
library(tidyr)
library(dplyr)
df %>% mutate(OneMonthChange=(Close-lead(Close, 20))/lead(Close, 20),
OneMonthChange=replace_na(OneMonthChange,0))
Close OneMonthChange
1 342.94 5.695617e-02
2 346.22 2.422862e-02
3 346.18 3.920509e-02
4 335.24 -1.706445e-02
5 330.45 -2.016308e-02
6 334.20 -1.997009e-02
etc...
Here is a simple Base R solution:
PercChange<- function(x, slideBy){
-diff(x, slideBy)/ tail(x, -slideBy)
}
PercChange(df$Close, slideBy = 20)
[1] 5.695617e-02 2.422862e-02 3.920509e-02 -1.706445e-02
[5] -2.016308e-02 -1.997009e-02 -5.748624e-02 -1.446751e-02
[9] -1.722569e-02 -3.790530e-03 -3.976292e-02 -1.877438e-02
If you desire a datframe back, then modify this into:
PercChange<- function(data, Var, NewVar, slideBy){
x <- data[[Var]]
data[NewVar] <- c(-diff(x, slideBy)/ tail(x, -slideBy), numeric(slideBy))
data
}
PercChange(df, Var = 'Close', NewVar = 'OneMonthChange', slideBy = 20)
data:
df <- structure(list(Close = c(342.94, 346.22, 346.18, 335.24, 330.45,
334.2, 325.45, 333.79, 334.9, 341.66, 333.74, 334.49, 329.75,
329.82, 330.56, 322.81, 317.87, 306.84, 310.39, 310.6, 324.46,
338.03, 333.12, 341.06, 337.25, 341.01, 345.3, 338.69, 340.77,
342.96, 347.56, 340.89, 327.74, 327.64, 335.37, 338.62, 341.13,
335.85, 331.62, 328.08, 329.98, 323.57, 316.92, 312.22, 315.81,
328.69, 324.61, 341.88, 340.78, 339.99, 335.34, 324.76, 328.53,
324.54, 323.77, 325.45, 330.05, 329.22, 333.64, 332.96, 326.23,
343.01, 339.39, 339.61, 340.65, 353.58, 352.96, 345.96, 343.21,
357.48, 355.7, 364.72, 373.06, 373.92, 376.53, 376.51, 378.69,
378, 377.57, 382.18, 376.26, 375.28, 382.05, 379.38, 380.66,
372.63, 364.38, 368.39, 365.51, 363.35, 359.37, 355.12, 355.45,
358.45, 366.56, 363.18, 362.65, 359.96, 361.13, 361.61)), class = "data.frame", row.names = c(NA,
-100L))

generate sequence of numbers with 000000 as start value

I am writing a function that takes in a start and end day in the format of dhhmmss (day-hour-minutes-second) and calculates the length of the Palindrome numbers between the start and end dhhmmss.
By defintion the start hhmmss is 000000 and end hhmmss is 235959.
My function has to take only the start d and end d and calculate the length of the Palindrome numbers between these two
Here's how I did it
Reverse.numberAsString <- function(x){ # Reverse using string manipulation
x.out <- as.character(x) # convert number to a character string
x.out <- unlist(strsplit(x.out, '')) # break the string up into a vector
x.out <- rev(x.out) # reverse it
x.out <- paste(x.out, collapse='') # join it back together
x.out <- as.numeric(x.out) # turn it back to a number
return(x.out)
}
is.Palindrome <- function(x){
x == sapply(x,Reverse.numberAsString)
}
palindrom_fun <- function(n1, n2){
if (n1 > n2) { print('n1 cannot be > n2')
} else {
n1.mod <- as.numeric(paste(c(n1, "000000"), collapse = ""))
n2.mod <- as.numeric(paste(c(n2, "235959"), collapse = ""))
x <- seq(from = n1.mod, to = n2.mod, by = 1)
palindrome_number <- x[is.Palindrome(x)]
length.palindrom <- length(palindrome_number)
return(length.palindrom)
}
}
palindrom_fun(1, 2)
# 1236
However, the above function will not work if n1 = 0 and n1 = 1 because of the line
n1.mod <- as.numeric(paste(c(n1, "000000"), collapse = ""))
n2.mod <- as.numeric(paste(c(n2, "235959"), collapse = ""))
since R is not able to create a sequence of number from 0000000 to 1235959. How can I get my function to work for this case?
You may compare head and reversed tail of character vectors using : (since head and tail are slow). For the desired sequence you may use sprintf to generate leading zeros.
isPalindrome <- Vectorize(function(x) {
s <- el(strsplit(as.character(x), ""))
ll <- length(s)
l2 <- pmax(floor(ll / 2), 1)
# out <- all(head(s, l) == rev(tail(s, l))) ## slower
out <- all(s[1:l2] == s[ll:(ll - l2 + 1)])
return(out)
})
## Test
x <- c("0000000", "1123456", "1231321", "0000", "1234", "11", "12", "1")
isPalindrome(x)
# 0000000 1123456 1231321 0000 1234 11 12 1
# TRUE FALSE TRUE TRUE FALSE TRUE FALSE TRUE
In the following palindromFun function I'll add the actual palindroms as attributes so that they are being returned by the function. (To switch off this behavior just comment out the line with the ## mark).
palindromFun <- function(n1, n2) {
if (n1 > n2) {
print('n1 cannot be > n2')
} else {
tm <- sprintf("%06d", 0:235959)
dy <- n1:n2
r <- paste0(rep(dy, each=length(tm)), tm)
pd <- isPalindrome(r)
out <- sum(pd)
out <- `attr<-`(out, "palindroms", r[pd]) ## mark
return(out)
}
}
Result 1
r1 <- palindromFun(n1=0, n2=1)
r1
# [1] 472
# attr(,"palindroms")
# [1] "0000000" "0001000" "0002000" "0003000" "0004000" "0005000" "0006000"
# [8] "0007000" "0008000" "0009000" "0010100" "0011100" "0012100" "0013100"
# [15] "0014100" "0015100" "0016100" "0017100" "0018100" "0019100" "0020200"
# [22] "0021200" "0022200" "0023200" "0024200" "0025200" "0026200" "0027200"
# [29] "0028200" "0029200" "0030300" "0031300" "0032300" "0033300" "0034300"
# [36] "0035300" "0036300" "0037300" "0038300" "0039300" "0040400" "0041400"
# [43] "0042400" "0043400" "0044400" "0045400" "0046400" "0047400" "0048400"
# [50] "0049400" "0050500" "0051500" "0052500" "0053500" "0054500" "0055500"
# [57] "0056500" "0057500" "0058500" "0059500" "0060600" "0061600" "0062600"
# [64] "0063600" "0064600" "0065600" "0066600" "0067600" "0068600" "0069600"
# [71] "0070700" "0071700" "0072700" "0073700" "0074700" "0075700" "0076700"
# [78] "0077700" "0078700" "0079700" "0080800" "0081800" "0082800" "0083800"
# [85] "0084800" "0085800" "0086800" "0087800" "0088800" "0089800" "0090900"
# [92] "0091900" "0092900" "0093900" "0094900" "0095900" "0096900" "0097900"
# [99] "0098900" "0099900" "0100010" "0101010" "0102010" "0103010" "0104010"
# [106] "0105010" "0106010" "0107010" "0108010" "0109010" "0110110" "0111110"
# [113] "0112110" "0113110" "0114110" "0115110" "0116110" "0117110" "0118110"
# [120] "0119110" "0120210" "0121210" "0122210" "0123210" "0124210" "0125210"
# [127] "0126210" "0127210" "0128210" "0129210" "0130310" "0131310" "0132310"
# [134] "0133310" "0134310" "0135310" "0136310" "0137310" "0138310" "0139310"
# [141] "0140410" "0141410" "0142410" "0143410" "0144410" "0145410" "0146410"
# [148] "0147410" "0148410" "0149410" "0150510" "0151510" "0152510" "0153510"
# [155] "0154510" "0155510" "0156510" "0157510" "0158510" "0159510" "0160610"
# [162] "0161610" "0162610" "0163610" "0164610" "0165610" "0166610" "0167610"
# [169] "0168610" "0169610" "0170710" "0171710" "0172710" "0173710" "0174710"
# [176] "0175710" "0176710" "0177710" "0178710" "0179710" "0180810" "0181810"
# [183] "0182810" "0183810" "0184810" "0185810" "0186810" "0187810" "0188810"
# [190] "0189810" "0190910" "0191910" "0192910" "0193910" "0194910" "0195910"
# [197] "0196910" "0197910" "0198910" "0199910" "0200020" "0201020" "0202020"
# [204] "0203020" "0204020" "0205020" "0206020" "0207020" "0208020" "0209020"
# [211] "0210120" "0211120" "0212120" "0213120" "0214120" "0215120" "0216120"
# [218] "0217120" "0218120" "0219120" "0220220" "0221220" "0222220" "0223220"
# [225] "0224220" "0225220" "0226220" "0227220" "0228220" "0229220" "0230320"
# [232] "0231320" "0232320" "0233320" "0234320" "0235320" "1000001" "1001001"
# [239] "1002001" "1003001" "1004001" "1005001" "1006001" "1007001" "1008001"
# [246] "1009001" "1010101" "1011101" "1012101" "1013101" "1014101" "1015101"
# [253] "1016101" "1017101" "1018101" "1019101" "1020201" "1021201" "1022201"
# [260] "1023201" "1024201" "1025201" "1026201" "1027201" "1028201" "1029201"
# [267] "1030301" "1031301" "1032301" "1033301" "1034301" "1035301" "1036301"
# [274] "1037301" "1038301" "1039301" "1040401" "1041401" "1042401" "1043401"
# [281] "1044401" "1045401" "1046401" "1047401" "1048401" "1049401" "1050501"
# [288] "1051501" "1052501" "1053501" "1054501" "1055501" "1056501" "1057501"
# [295] "1058501" "1059501" "1060601" "1061601" "1062601" "1063601" "1064601"
# [302] "1065601" "1066601" "1067601" "1068601" "1069601" "1070701" "1071701"
# [309] "1072701" "1073701" "1074701" "1075701" "1076701" "1077701" "1078701"
# [316] "1079701" "1080801" "1081801" "1082801" "1083801" "1084801" "1085801"
# [323] "1086801" "1087801" "1088801" "1089801" "1090901" "1091901" "1092901"
# [330] "1093901" "1094901" "1095901" "1096901" "1097901" "1098901" "1099901"
# [337] "1100011" "1101011" "1102011" "1103011" "1104011" "1105011" "1106011"
# [344] "1107011" "1108011" "1109011" "1110111" "1111111" "1112111" "1113111"
# [351] "1114111" "1115111" "1116111" "1117111" "1118111" "1119111" "1120211"
# [358] "1121211" "1122211" "1123211" "1124211" "1125211" "1126211" "1127211"
# [365] "1128211" "1129211" "1130311" "1131311" "1132311" "1133311" "1134311"
# [372] "1135311" "1136311" "1137311" "1138311" "1139311" "1140411" "1141411"
# [379] "1142411" "1143411" "1144411" "1145411" "1146411" "1147411" "1148411"
# [386] "1149411" "1150511" "1151511" "1152511" "1153511" "1154511" "1155511"
# [393] "1156511" "1157511" "1158511" "1159511" "1160611" "1161611" "1162611"
# [400] "1163611" "1164611" "1165611" "1166611" "1167611" "1168611" "1169611"
# [407] "1170711" "1171711" "1172711" "1173711" "1174711" "1175711" "1176711"
# [414] "1177711" "1178711" "1179711" "1180811" "1181811" "1182811" "1183811"
# [421] "1184811" "1185811" "1186811" "1187811" "1188811" "1189811" "1190911"
# [428] "1191911" "1192911" "1193911" "1194911" "1195911" "1196911" "1197911"
# [435] "1198911" "1199911" "1200021" "1201021" "1202021" "1203021" "1204021"
# [442] "1205021" "1206021" "1207021" "1208021" "1209021" "1210121" "1211121"
# [449] "1212121" "1213121" "1214121" "1215121" "1216121" "1217121" "1218121"
# [456] "1219121" "1220221" "1221221" "1222221" "1223221" "1224221" "1225221"
# [463] "1226221" "1227221" "1228221" "1229221" "1230321" "1231321" "1232321"
# [470] "1233321" "1234321" "1235321"
Result 2
r2 <- palindromFun(n1=0, n2=2)
r2
# [1] 708
# attr(,"palindroms")
# [1] "0000000" "0001000" "0002000" "0003000" "0004000" "0005000" "0006000"
# [8] "0007000" "0008000" "0009000" "0010100" "0011100" "0012100" "0013100"
# [15] "0014100" "0015100" "0016100" "0017100" "0018100" "0019100" "0020200"
# [22] "0021200" "0022200" "0023200" "0024200" "0025200" "0026200" "0027200"
# [29] "0028200" "0029200" "0030300" "0031300" "0032300" "0033300" "0034300"
# [36] "0035300" "0036300" "0037300" "0038300" "0039300" "0040400" "0041400"
# [43] "0042400" "0043400" "0044400" "0045400" "0046400" "0047400" "0048400"
# [50] "0049400" "0050500" "0051500" "0052500" "0053500" "0054500" "0055500"
# [57] "0056500" "0057500" "0058500" "0059500" "0060600" "0061600" "0062600"
# [64] "0063600" "0064600" "0065600" "0066600" "0067600" "0068600" "0069600"
# [71] "0070700" "0071700" "0072700" "0073700" "0074700" "0075700" "0076700"
# [78] "0077700" "0078700" "0079700" "0080800" "0081800" "0082800" "0083800"
# [85] "0084800" "0085800" "0086800" "0087800" "0088800" "0089800" "0090900"
# [92] "0091900" "0092900" "0093900" "0094900" "0095900" "0096900" "0097900"
# [99] "0098900" "0099900" "0100010" "0101010" "0102010" "0103010" "0104010"
# [106] "0105010" "0106010" "0107010" "0108010" "0109010" "0110110" "0111110"
# [113] "0112110" "0113110" "0114110" "0115110" "0116110" "0117110" "0118110"
# [120] "0119110" "0120210" "0121210" "0122210" "0123210" "0124210" "0125210"
# [127] "0126210" "0127210" "0128210" "0129210" "0130310" "0131310" "0132310"
# [134] "0133310" "0134310" "0135310" "0136310" "0137310" "0138310" "0139310"
# [141] "0140410" "0141410" "0142410" "0143410" "0144410" "0145410" "0146410"
# [148] "0147410" "0148410" "0149410" "0150510" "0151510" "0152510" "0153510"
# [155] "0154510" "0155510" "0156510" "0157510" "0158510" "0159510" "0160610"
# [162] "0161610" "0162610" "0163610" "0164610" "0165610" "0166610" "0167610"
# [169] "0168610" "0169610" "0170710" "0171710" "0172710" "0173710" "0174710"
# [176] "0175710" "0176710" "0177710" "0178710" "0179710" "0180810" "0181810"
# [183] "0182810" "0183810" "0184810" "0185810" "0186810" "0187810" "0188810"
# [190] "0189810" "0190910" "0191910" "0192910" "0193910" "0194910" "0195910"
# [197] "0196910" "0197910" "0198910" "0199910" "0200020" "0201020" "0202020"
# [204] "0203020" "0204020" "0205020" "0206020" "0207020" "0208020" "0209020"
# [211] "0210120" "0211120" "0212120" "0213120" "0214120" "0215120" "0216120"
# [218] "0217120" "0218120" "0219120" "0220220" "0221220" "0222220" "0223220"
# [225] "0224220" "0225220" "0226220" "0227220" "0228220" "0229220" "0230320"
# [232] "0231320" "0232320" "0233320" "0234320" "0235320" "1000001" "1001001"
# [239] "1002001" "1003001" "1004001" "1005001" "1006001" "1007001" "1008001"
# [246] "1009001" "1010101" "1011101" "1012101" "1013101" "1014101" "1015101"
# [253] "1016101" "1017101" "1018101" "1019101" "1020201" "1021201" "1022201"
# [260] "1023201" "1024201" "1025201" "1026201" "1027201" "1028201" "1029201"
# [267] "1030301" "1031301" "1032301" "1033301" "1034301" "1035301" "1036301"
# [274] "1037301" "1038301" "1039301" "1040401" "1041401" "1042401" "1043401"
# [281] "1044401" "1045401" "1046401" "1047401" "1048401" "1049401" "1050501"
# [288] "1051501" "1052501" "1053501" "1054501" "1055501" "1056501" "1057501"
# [295] "1058501" "1059501" "1060601" "1061601" "1062601" "1063601" "1064601"
# [302] "1065601" "1066601" "1067601" "1068601" "1069601" "1070701" "1071701"
# [309] "1072701" "1073701" "1074701" "1075701" "1076701" "1077701" "1078701"
# [316] "1079701" "1080801" "1081801" "1082801" "1083801" "1084801" "1085801"
# [323] "1086801" "1087801" "1088801" "1089801" "1090901" "1091901" "1092901"
# [330] "1093901" "1094901" "1095901" "1096901" "1097901" "1098901" "1099901"
# [337] "1100011" "1101011" "1102011" "1103011" "1104011" "1105011" "1106011"
# [344] "1107011" "1108011" "1109011" "1110111" "1111111" "1112111" "1113111"
# [351] "1114111" "1115111" "1116111" "1117111" "1118111" "1119111" "1120211"
# [358] "1121211" "1122211" "1123211" "1124211" "1125211" "1126211" "1127211"
# [365] "1128211" "1129211" "1130311" "1131311" "1132311" "1133311" "1134311"
# [372] "1135311" "1136311" "1137311" "1138311" "1139311" "1140411" "1141411"
# [379] "1142411" "1143411" "1144411" "1145411" "1146411" "1147411" "1148411"
# [386] "1149411" "1150511" "1151511" "1152511" "1153511" "1154511" "1155511"
# [393] "1156511" "1157511" "1158511" "1159511" "1160611" "1161611" "1162611"
# [400] "1163611" "1164611" "1165611" "1166611" "1167611" "1168611" "1169611"
# [407] "1170711" "1171711" "1172711" "1173711" "1174711" "1175711" "1176711"
# [414] "1177711" "1178711" "1179711" "1180811" "1181811" "1182811" "1183811"
# [421] "1184811" "1185811" "1186811" "1187811" "1188811" "1189811" "1190911"
# [428] "1191911" "1192911" "1193911" "1194911" "1195911" "1196911" "1197911"
# [435] "1198911" "1199911" "1200021" "1201021" "1202021" "1203021" "1204021"
# [442] "1205021" "1206021" "1207021" "1208021" "1209021" "1210121" "1211121"
# [449] "1212121" "1213121" "1214121" "1215121" "1216121" "1217121" "1218121"
# [456] "1219121" "1220221" "1221221" "1222221" "1223221" "1224221" "1225221"
# [463] "1226221" "1227221" "1228221" "1229221" "1230321" "1231321" "1232321"
# [470] "1233321" "1234321" "1235321" "2000002" "2001002" "2002002" "2003002"
# [477] "2004002" "2005002" "2006002" "2007002" "2008002" "2009002" "2010102"
# [484] "2011102" "2012102" "2013102" "2014102" "2015102" "2016102" "2017102"
# [491] "2018102" "2019102" "2020202" "2021202" "2022202" "2023202" "2024202"
# [498] "2025202" "2026202" "2027202" "2028202" "2029202" "2030302" "2031302"
# [505] "2032302" "2033302" "2034302" "2035302" "2036302" "2037302" "2038302"
# [512] "2039302" "2040402" "2041402" "2042402" "2043402" "2044402" "2045402"
# [519] "2046402" "2047402" "2048402" "2049402" "2050502" "2051502" "2052502"
# [526] "2053502" "2054502" "2055502" "2056502" "2057502" "2058502" "2059502"
# [533] "2060602" "2061602" "2062602" "2063602" "2064602" "2065602" "2066602"
# [540] "2067602" "2068602" "2069602" "2070702" "2071702" "2072702" "2073702"
# [547] "2074702" "2075702" "2076702" "2077702" "2078702" "2079702" "2080802"
# [554] "2081802" "2082802" "2083802" "2084802" "2085802" "2086802" "2087802"
# [561] "2088802" "2089802" "2090902" "2091902" "2092902" "2093902" "2094902"
# [568] "2095902" "2096902" "2097902" "2098902" "2099902" "2100012" "2101012"
# [575] "2102012" "2103012" "2104012" "2105012" "2106012" "2107012" "2108012"
# [582] "2109012" "2110112" "2111112" "2112112" "2113112" "2114112" "2115112"
# [589] "2116112" "2117112" "2118112" "2119112" "2120212" "2121212" "2122212"
# [596] "2123212" "2124212" "2125212" "2126212" "2127212" "2128212" "2129212"
# [603] "2130312" "2131312" "2132312" "2133312" "2134312" "2135312" "2136312"
# [610] "2137312" "2138312" "2139312" "2140412" "2141412" "2142412" "2143412"
# [617] "2144412" "2145412" "2146412" "2147412" "2148412" "2149412" "2150512"
# [624] "2151512" "2152512" "2153512" "2154512" "2155512" "2156512" "2157512"
# [631] "2158512" "2159512" "2160612" "2161612" "2162612" "2163612" "2164612"
# [638] "2165612" "2166612" "2167612" "2168612" "2169612" "2170712" "2171712"
# [645] "2172712" "2173712" "2174712" "2175712" "2176712" "2177712" "2178712"
# [652] "2179712" "2180812" "2181812" "2182812" "2183812" "2184812" "2185812"
# [659] "2186812" "2187812" "2188812" "2189812" "2190912" "2191912" "2192912"
# [666] "2193912" "2194912" "2195912" "2196912" "2197912" "2198912" "2199912"
# [673] "2200022" "2201022" "2202022" "2203022" "2204022" "2205022" "2206022"
# [680] "2207022" "2208022" "2209022" "2210122" "2211122" "2212122" "2213122"
# [687] "2214122" "2215122" "2216122" "2217122" "2218122" "2219122" "2220222"
# [694] "2221222" "2222222" "2223222" "2224222" "2225222" "2226222" "2227222"
# [701] "2228222" "2229222" "2230322" "2231322" "2232322" "2233322" "2234322"
# [708] "2235322"
My number of palindroms seems to be different from yours, though.
Here is a quick method to create the desired sequence using R's builtin time and date functions.
#create the time sequence for every second for 1 day
dateseq <- seq(as.POSIXct("2020-08-15"), as.POSIXct("2020-08-16"), by="1 sec")
#remove the last element (midnight the next day)
dateseq <- dateseq[-86401]
#format the desire
answer <- format(dateseq, "%H%M%S")
tail(answer)
#[1] "235954" "235955" "235956" "235957" "235958" "235959"
Here's one way to approach the entire problem using a functional approach, using only base R. That is, breaking each problem down to a single task and building up the functionality you need:
# Converts strings in the format "1234556" to date times
as_time <- function(chr) {
chr[nchar(chr) == 7] <- paste0("0", chr[nchar(chr) == 7])
strptime(chr, "%d%H%M%S")
}
# Converts date-times to strings in format "1234556"
as_chr <- function(t) {
paste0(as.numeric(substr(t, 9, 10)), strftime(t, "%H%M%S"))
}
# Gets a sequence of valid strings between to strings in format "1234556"
seq_times <- function(t1, t2)
{
as_chr(seq(as_time(t1), as_time(t2), by = "1 sec"))
}
# Reverse strings in a character vector
rev_string <- function(s) {
sapply(s, function(x) intToUtf8(rev(utf8ToInt(x))), USE.NAMES = FALSE)
}
# Returns only the subset of a given character vector that are palindromes
get_palindromes <- function(t1, t2) {
str <- seq_times(t1, t2)
str[str == rev_string(str)]
}
So now we can do:
get_palindromes("1000000", "2000000")
#> [1] "1000001" "1001001" "1002001" "1003001" "1004001" "1005001" "1010101"
#> [8] "1011101" "1012101" "1013101" "1014101" "1015101" "1020201" "1021201"
#> [15] "1022201" "1023201" "1024201" "1025201" "1030301" "1031301" "1032301"
#> [22] "1033301" "1034301" "1035301" "1040401" "1041401" "1042401" "1043401"
#> [29] "1044401" "1045401" "1050501" "1051501" "1052501" "1053501" "1054501"
#> [36] "1055501" "1060601" "1061601" "1062601" "1063601" "1064601" "1065601"
#> [43] "1070701" "1071701" "1072701" "1073701" "1074701" "1075701" "1080801"
#> [50] "1081801" "1082801" "1083801" "1084801" "1085801" "1090901" "1091901"
#> [57] "1092901" "1093901" "1094901" "1095901" "1100011" "1101011" "1102011"
#> [64] "1103011" "1104011" "1105011" "1110111" "1111111" "1112111" "1113111"
#> [71] "1114111" "1115111" "1120211" "1121211" "1122211" "1123211" "1124211"
#> [78] "1125211" "1130311" "1131311" "1132311" "1133311" "1134311" "1135311"
#> [85] "1140411" "1141411" "1142411" "1143411" "1144411" "1145411" "1150511"
#> [92] "1151511" "1152511" "1153511" "1154511" "1155511" "1160611" "1161611"
#> [99] "1162611" "1163611" "1164611" "1165611" "1170711" "1171711" "1172711"
#> [106] "1173711" "1174711" "1175711" "1180811" "1181811" "1182811" "1183811"
#> [113] "1184811" "1185811" "1190911" "1191911" "1192911" "1193911" "1194911"
#> [120] "1195911" "1200021" "1201021" "1202021" "1203021" "1204021" "1205021"
#> [127] "1210121" "1211121" "1212121" "1213121" "1214121" "1215121" "1220221"
#> [134] "1221221" "1222221" "1223221" "1224221" "1225221" "1230321" "1231321"
#> [141] "1232321" "1233321" "1234321" "1235321"
and
get_palindromes("2235000", "3060000")
#> [1] "2235322" "3000003" "3001003" "3002003" "3003003" "3004003" "3005003"
#> [8] "3010103" "3011103" "3012103" "3013103" "3014103" "3015103" "3020203"
#> [15] "3021203" "3022203" "3023203" "3024203" "3025203" "3030303" "3031303"
#> [22] "3032303" "3033303" "3034303" "3035303" "3040403" "3041403" "3042403"
#> [29] "3043403" "3044403" "3045403" "3050503" "3051503" "3052503" "3053503"
#> [36] "3054503" "3055503"
What do you mean by the length? If you mean the count then I think we can use of simple math to see how many possibilities are there.
Let us say for n1 = 1 and n2 =2, out of 7 places available(dhhmmss), you can have only 2 choices for the 1st and the 7th place. Now for the remaining 6 places, we need to think only about first 3 places as the rest of them will be same as the first three( by the palindrome logic).
Now for the 2nd place, we can have only 3 choices(0, 1, 2 as we can only have the hour from 00 to 23, just consider the ten's place). Let us store the value at the 2nd place to a variable h. Next, we have 3rd place which can have 10, 10 and 4 choices for h={0,1,2} respectively. Following that, we have the 4th place which can only have 6 choices( ranging from 00 to 59,here just the ten's place).
Hence, the total choices are 2*[10+10+4]*6 = 288 choices.
You can use rep() to create the various time elements (days, hours,etc) and then expand.grid() to get every combination of the elements. stri_reverse() from stringi can be used to compare the reverse of the string and thus establish if it is a palindrome.
find_palindrome<-function(day_start,day_end){
day<-rep(day_start:day_end)
hour<-rep(0:23)
min_sec<-rep(0:59)
#expand.grid() finds every combination of inputs
#min_sec is used twice within expand.grid(), once for minutes and once for seconds.
# The "%02d" within sprint() preserves a 2-digit length (e.g. '01' instead of '1'.)
df<-expand.grid(day, sprintf("%02d",hour), sprintf("%02d",min_sec), sprintf("%02d",min_sec))
df<-as.data.frame(df)
#create a column concatinating the values
df$compare1<-paste(df[,1],df[,2], df[,3], df[,4], sep="")
#reverse the order in another column
df$compare2<-stringi::stri_reverse(df$compare1)
#compare the numbers to find your palendromes
palindrone<-df$compare1[df$compare1 == df$compare2]
return(palindrone)
}
Then run the function:
#example using day 0 to day 2
find_palindrome(0,2)

Split a sequence of numbers into groups of 10 digits using R

I would like for R to read in the first 10,000 digits of Pi and group every 10 digits together
e.g., I want R to read in a sequence
pi <- 3.14159265358979323846264338327950288419716939937510582097...
and would like R to give me a table where each row contains 10 digit:
3141592653
5897932384
6264338327
...
I am new to R and really don't know where to start so any help would be much appreciated!
Thank you in advance
https://rextester.com/OQRM27791
p <- strsplit("314159265358979323846264338327950288419716939937510582097", "")
digits <- p[[1]]
split(digits, ceiling((1:length(digits)) / 10));
Here's one way to do it. It's fully reproducible, so just cut and paste it into your R console. The vector result is the first 10,000 digits of pi, split into 1000 strings of 10 digits.
For this many digits, I have used an online source for the precalculated value of pi. This is read in using readChar and the decimal point is stripped out with gsub. The resulting string is split into individual characters and put in a 1000 * 10 matrix (filled row-wise). The rows are then pasted into strings, giving the result. I have displayed only the first 100 entries of result for clarity of presentation.
pi_url <- "https://www.pi2e.ch/blog/wp-content/uploads/2017/03/pi_dec_1m.txt"
pi_char <- gsub("\\.", "", readChar(url, 1e4 + 1))
pi_mat <- matrix(strsplit(pi_char, "")[[1]], byrow = TRUE, ncol = 10)
result <- apply(pi_mat, 1, paste0, collapse = "")
head(result, 100)
#> [1] "3141592653" "5897932384" "6264338327" "9502884197" "1693993751"
#> [6] "0582097494" "4592307816" "4062862089" "9862803482" "5342117067"
#> [11] "9821480865" "1328230664" "7093844609" "5505822317" "2535940812"
#> [16] "8481117450" "2841027019" "3852110555" "9644622948" "9549303819"
#> [21] "6442881097" "5665933446" "1284756482" "3378678316" "5271201909"
#> [26] "1456485669" "2346034861" "0454326648" "2133936072" "6024914127"
#> [31] "3724587006" "6063155881" "7488152092" "0962829254" "0917153643"
#> [36] "6789259036" "0011330530" "5488204665" "2138414695" "1941511609"
#> [41] "4330572703" "6575959195" "3092186117" "3819326117" "9310511854"
#> [46] "8074462379" "9627495673" "5188575272" "4891227938" "1830119491"
#> [51] "2983367336" "2440656643" "0860213949" "4639522473" "7190702179"
#> [56] "8609437027" "7053921717" "6293176752" "3846748184" "6766940513"
#> [61] "2000568127" "1452635608" "2778577134" "2757789609" "1736371787"
#> [66] "2146844090" "1224953430" "1465495853" "7105079227" "9689258923"
#> [71] "5420199561" "1212902196" "0864034418" "1598136297" "7477130996"
#> [76] "0518707211" "3499999983" "7297804995" "1059731732" "8160963185"
#> [81] "9502445945" "5346908302" "6425223082" "5334468503" "5261931188"
#> [86] "1710100031" "3783875288" "6587533208" "3814206171" "7766914730"
#> [91] "3598253490" "4287554687" "3115956286" "3882353787" "5937519577"
#> [96] "8185778053" "2171226806" "6130019278" "7661119590" "9216420198"
Created on 2020-07-23 by the reprex package (v0.3.0)
We can use str_extract:
pi <- readLines("https://www.pi2e.ch/blog/wp-content/uploads/2017/03/pi_dec_1m.txt")
library(stringr)
t <- unlist(str_extract_all(sub("\\.","", pi), "\\d{10}"))
t[1:100]
[1] "3141592653" "5897932384" "6264338327" "9502884197" "1693993751" "0582097494" "4592307816" "4062862089"
[9] "9862803482" "5342117067" "9821480865" "1328230664" "7093844609" "5505822317" "2535940812" "8481117450"
[17] "2841027019" "3852110555" "9644622948" "9549303819" "6442881097" "5665933446" "1284756482" "3378678316"
[25] "5271201909" "1456485669" "2346034861" "0454326648" "2133936072" "6024914127" "3724587006" "6063155881"
[33] "7488152092" "0962829254" "0917153643" "6789259036" "0011330530" "5488204665" "2138414695" "1941511609"
[41] "4330572703" "6575959195" "3092186117" "3819326117" "9310511854" "8074462379" "9627495673" "5188575272"
[49] "4891227938" "1830119491" "2983367336" "2440656643" "0860213949" "4639522473" "7190702179" "8609437027"
[57] "7053921717" "6293176752" "3846748184" "6766940513" "2000568127" "1452635608" "2778577134" "2757789609"
[65] "1736371787" "2146844090" "1224953430" "1465495853" "7105079227" "9689258923" "5420199561" "1212902196"
[73] "0864034418" "1598136297" "7477130996" "0518707211" "3499999983" "7297804995" "1059731732" "8160963185"
[81] "9502445945" "5346908302" "6425223082" "5334468503" "5261931188" "1710100031" "3783875288" "6587533208"
[89] "3814206171" "7766914730" "3598253490" "4287554687" "3115956286" "3882353787" "5937519577" "8185778053"
[97] "2171226806" "6130019278" "7661119590" "9216420198"

R: Convert time series xyz to raster

I have a time series dt (time, lat, value) composed of 204 rows and 19 columns.
I would like to plot my time series as raster plot using the raster and oceanmap packages.
I am trying to convert the data frame to raster but with some problems
Here my code:
library(raster)
dt <- t(data.table::data.table(tb)) #tb is the previous matrix with values
rownames(dt) <- tm.mtx #my time as Dates
colnames(dt) <- cdt #lat coordinates
dt[is.nan(dt)] <- NA
dt.melt <- reshape2::melt(dt, id=c(row.names(dt)))
spg <- dt.melt
colnames(spg) <- c("x","y","z")
coordinates(spg) <- ~ x + y
it returns this:
Error in .local(obj, ...) :
cannot derive coordinates from non-numeric matrix
I understood I have to convert Dates as numeric, so:
rownames(dt) <- tm #vector of time in seconds from "2002-08-15"
dt.melt <- reshape2::melt(dt, id=c(row.names(dt)))
spg <- dt.melt
colnames(spg) <- c("x","y","z")
coordinates(spg) <- ~ x + y
gridded(spg) = T
returning:
suggested tolerance minimum: 0.107308
Error in points2grid(points, tolerance, round) :
dimension 1 : coordinate intervals are not constant
from here I do not know what to do.
I attach below some partial data
head(dt)
-46.4375070233063 -46.4791737000479 -46.5208403767895 -46.5625070535311
2002-08-15 0.1736002 0.1750216 0.1732153 0.1774634
2002-09-15 0.1544965 0.1651486 0.1691601 0.1751704
2002-10-15 0.1882919 0.1835454 0.1881429 0.1941372
2002-11-15 0.2535837 0.2623392 0.2630876 0.2709922
2002-12-15 0.2187145 0.2208886 0.2157844 0.2170794
2003-01-15 0.1413760 0.1334794 0.1383164 0.1370846
-46.6041737302728 -46.6458404070144 -46.687507083756 -46.7291737604976
2002-08-15 0.1773711 0.1833218 0.1922676 0.1941034
2002-09-15 0.1771452 0.1796460 0.1761335 0.1702302
2002-10-15 0.1968857 0.2063893 0.2040608 0.1979190
2002-11-15 0.2707081 0.2532419 0.2730597 0.2807892
2002-12-15 0.2154748 0.2144032 0.2160844 0.2267726
2003-01-15 0.1385788 0.1351013 0.1375451 0.1544153
-46.7708404372393 -46.8125071139809 -46.8541737907225 -46.8958404674641
2002-08-15 0.1936037 0.1870466 0.1829850 0.1970123
2002-09-15 0.1650944 0.1764924 0.1801598 0.1772189
2002-10-15 0.1889605 0.1884651 0.1911619 0.1967313
2002-11-15 0.2862864 0.3205168 0.3466025 0.3267133
2002-12-15 0.2262225 0.2184253 0.2562738 0.2705393
2003-01-15 0.2592504 0.3460798 0.4375293 0.5184022
-46.9375071442058 -46.9791738209474 -47.020840497689 -47.0625071744306
2002-08-15 0.2062590 0.2063133 0.2125093 0.2129866
2002-09-15 0.1775195 0.1759667 0.1642771 0.1685364
2002-10-15 0.2002673 0.1997982 0.1997716 0.1917371
2002-11-15 0.2311774 0.3478559 0.3199840 0.3296006
2002-12-15 0.3308617 0.3285339 0.5459298 0.6657989
2003-01-15 0.6338662 0.6308048 0.5857707 0.4804939
-47.1041738511723 -47.1458405279139 -47.1875072046555
2002-08-15 0.2157603 0.2188090 0.2243740
2002-09-15 0.1742391 0.1754706 0.1729936
2002-10-15 0.2043249 0.2011236 0.2345921
2002-11-15 0.3316522 0.3430276 0.3336774
2002-12-15 0.6513002 0.5746747 0.4276562
2003-01-15 0.4010864 0.4328661 0.4627818
tm
[1] 1 2678401 5270401 7948801 10540801 13219201 15897601 18316801
[9] 20995201 23587201 26265601 28857601 31536001 34214401 36806401 39484801
[17] 42076801 44755201 47433601 49939201 52617601 55209601 57888001 60480001
[25] 63158401 65836801 68428801 71107201 73699201 76377601 79056001 81475201
[33] 84153601 86745601 89424001 92016001 94694401 97372801 99964801 102643201
[41] 105235201 107913601 110592001 113011201 115689601 118281601 120960001 123552001
[49] 126230401 128908801 131500801 134179201 136771201 139449601 142128001 144547201
[57] 147225601 149817601 152496001 155088001 157766401 160444801 163036801 165715201
[65] 168307201 170985601 173664001 176169601 178848001 181440001 184118401 186710401
[73] 189388801 192067201 194659201 197337601 199929601 202608001 205286401 207705601
[81] 210384001 212976000 215654401 218246401 220924801 223603201 226195201 228873601
[89] 231465601 234144001 236822401 239241601 241920001 244512001 247190401 249782401
[97] 252460801 255139201 257731201 260409601 263001601 265680001 268358401 270777601
[105] 273456001 276048001 278726401 281318401 283996801 286675201 289267201 291945601
[113] 294537601 297216001 299894401 302400001 305078401 307670401 310348801 312940801
[121] 315619201 318297601 320889601 323568001 326160001 328838401 331516801 333936001
[129] 336614401 339206401 341884801 344476801 347155201 349833601 352425601 355104001
[137] 357696001 360374401 363052801 365472001 368150401 370742401 373420801 376012801
[145] 378691201 381369601 383961601 386640001 389232001 391910401 394588801 397008001
[153] 399686401 402278401 404956801 407548801 410227201 412905601 415497601 418176001
[161] 420768001 423446400 426124800 428630400 431308800 433900800 436579200 439171200
[169] 441849600 444528000 447120000 449798400 452390400 455068400 457747200 460166400
[177] 462844800 465436800 468115200 470707200 473385600 476064000 478656000 481334400
[185] 483926400 486604800 489283200 491702400 494380800 496972800 499651200 502243200
[193] 504921600 507600000 510192000 512870400 515462400 518140800 520819200 523238400
[201] 525916800 528508800 531187200 533779200
data
http://www.filedropper.com/df

Resources