I am trying to use R to replicate the following NodeJS WebSocket script, and I am managing to establish a connection with the server, however I am not receiving back any messages with the appropriate data that I would like. For context, I am using FinancialModelingPrep's websocket service to stream real-time stock prices. Here is the NodeJS script I am trying to replicate:
const WebSocket = require('ws');
const ws = new WebSocket('wss://websockets.financialmodelingprep.com', null, null, null, {rejectUnauthorized: false});
const login = {
'event':'login',
'data': {
'apiKey': "<apiKey>",
}
}
const subscribe = {
'event':'subscribe',
'data': {
'ticker': "aapl",
}
}
ws.on('open', function open() {
ws.send(JSON.stringify(login));
ws.send(JSON.stringify(subscribe));
});
ws.on('message', function(data, flags) {
console.log(data)
});
Here is my attempt to replicate it in R using the 'websocket' library:
login <- list(
'event' = 'login',
'data' = list(
'apiKey' = "<apiKey>"
)
)
subscribe <- list(
'event' = 'subscribe',
'data' = list(
'ticker' = "btcusd"
)
)
unsubscribe <- list(
'event' = 'unsubscribe',
'data' = list(
'ticker' = "btcusd"
)
)
ws <- websocket::WebSocket$new(
url = 'wss://crypto.financialmodelingprep.com',
protocols = character(0),
headers = NULL,
autoConnect = F,
accessLogChannels = c("all"),
maxMessageSize = 32 * 1024 * 1024
)
ws$onOpen(function(event) {
cat("Connection opened\n")
ws$send(toJSON(login))
cat("Login in then pause...\n")
Sys.sleep(1)
ws$send(toJSON(subscribe))
cat("Subscribe then pause...\n")
Sys.sleep(1)
cat("Unsubscribing...\n")
ws$send(toJSON(unsubscribe))
})
ws$onMessage(function(event) {
cat("Client got msg: ", fromJSON(event$data), "\n")
})
ws$onClose(function(event) {
cat("Client disconnected with code ", event$code,
" and reason ", event$reason, "\n", sep = "")
})
ws$onError(function(event) {
cat("Client failed to connect: ", event$message, "\n")
})
ws$connect()
When I run the R script to connect, I receive the "connection opened" message as well as the logging in, subscribing and unsubscribing messages. However, it seems as if the onMessage part is never triggered. Below is the output that I receive from the server:
Connection opened
Login in then pause...
[2022-11-19 12:05:47] [frame_header] Dispatching write containing 1 message(s) containing 6 header bytes and 74 payload bytes
[2022-11-19 12:05:47] [frame_header] Header Bytes:
[0] (6) 81 CA DF B7 07 F8
[2022-11-19 12:05:47] [frame_payload] Payload Bytes:
[0] (74) [1] ��b���s���%���n���+ڻ�s���|ھ�n���%�0���4��aλ�aͻ�4��2���a���4���%���
Subscribe then pause...
[2022-11-19 12:05:48] [frame_header] Dispatching write containing 1 message(s) containing 6 header bytes and 52 payload bytes
[2022-11-19 12:05:48] [frame_header] Header Bytes:
[0] (6) 81 B4 05 00 D3 E9
[2022-11-19 12:05:48] [frame_payload] Payload Bytes:
[0] (52) [1] ~"��`n��?[�pb��wi��']��aa��':��qi��`r��^"��fu��']��
Unsubscribing...
[2022-11-19 12:05:49] [frame_header] Dispatching write containing 1 message(s) containing 6 header bytes and 54 payload bytes
[2022-11-19 12:05:49] [frame_header] Header Bytes:
[0] (6) 81 B6 E1 AF 95 38
[2022-11-19 12:05:49] [frame_payload] Payload Bytes:
[0] (54) [1] ���N������M���Z���Q�ʷe͍�Y�η���Q���JÕ����M�˷e��
[2022-11-19 12:06:49] [error] handle_read_frame error: websocketpp.transport:7 (End of File)
[2022-11-19 12:06:49] [disconnect] Disconnect close local:[1006,End of File] remote:[1006]
Client disconnected with code 1006 and reason
Lastly, here is the output I am supposed to receive (according to FinancialModelingPrep's documentation):
wss://crypto.financialmodelingprep.com
{
"s": "btcusd",
"t": 16487238632060000000,
"e": "binance",
"type": "Q",
"bs": 0.00689248,
"bp": 47244.8,
"as": 1.72784126,
"ap": 47244.9
}
I have a feeling I'm doing something wrong.. I am out of ideas on how to achieve the above output using R, and unfortunately I am not experienced enough to do it via NodeJS. Any help would be appreciated.
Related
I'm trying to write telegram bot with payment processing function. This methods are implemented in the telegram API, but unfortunately these methods are not implemented in R package telegram.bot.
My code:
library(telegram.bot)
library(glue)
library(httr)
bot_token <- "your_bot_token"
provider_token <- "your:TEST:token"
updater <- Updater(token = bot_token)
currency <- "your_currency_code"
## Start
start <- function(bot, update) {
# create keyboard
RKM <- ReplyKeyboardMarkup(
keyboard = list(
list(
KeyboardButton(text = "donate")
)
),
resize_keyboard = TRUE,
one_time_keyboard = TRUE
)
# send keyboard
bot$sendMessage(update$message$chat_id,
text = 'Command',
reply_markup = RKM)
}
## Send invoice
send_invoice <- function(bot, update) {
chat_id <- update$from_chat_id()
title <- "Title"
desc <- "Detail"
payload <- "specialItem-001"
prices <- '[{"label": "Payment", "amount": 24900}]'
invoice <- glue("https://api.telegram.org/bot{bot_token}/sendInvoice?chat_id={chat_id}&title={title}&description={desc}&payload={payload}&provider_token={provider_token}¤cy={currency}&prices={prices}")
httr::POST(
url = invoice
)
}
## Accept pre checkout query
pre_checkout <- function(bot, update) {
chat_id <- update$pre_checkout_query$from$id
invoice_id <- update$pre_checkout_query$id
accept_invoice <- glue("https://api.telegram.org/bot{bot_token}/answerPreCheckoutQuery?pre_checkout_query_id={invoice_id}&ok=TRUE")
httr::POST(
url = accept_invoice
)
}
## View payment info
success_pay <- function(bot, update) {
str(update)
}
## Message filter
MessageFilters$invoice <- BaseFilter(function(message) {
message$text == "donate"
}
)
## Send RKM
h_start <- CommandHandler('start', start)
## Invoice hendler
invoice_hendler <- MessageHandler(send_invoice, filters = MessageFilters$invoice)
## Pre checkout hendler
check_update <- function(update) {
TRUE
}
handle_update <- function(update, dispatcher) {
self$callback(dispatcher$bot, update)
}
pre_checkout_handler <- Handler(pre_checkout,
check_update = check_update,
handle_update = handle_update,
handlername = "FooHandler")
## Successful payment hendler
payment_handler <- MessageHandler(success_pay, filters = MessageFilters$successful_payment)
## add hendler to dispatcher
updater <- updater +
h_start +
invoice_hendler +
pre_checkout_handler +
payment_handler
## Start pooling
updater$start_polling(verbose = TRUE, clean = TRUE)
So I have to write a custom handler and the payment goes through, but the bot crashes when bot processing Update with successful payment message. Error:
handler$check_update(update)) { : argument is of length zero
I thick problem with my handler config (check_update and handle_update). How can i fix it?
I have been trying to download all the YouTube comments on popular videos using python requests, but it has been throwing up the following error after about a quarter of the total comments:
{'error': {'code': 400, 'message': "The API server failed to successfully process the request. While this can be a transient error, it usually indicates that the request's input is invalid. Check the structure of the commentThread resource in the request body to ensure that it is valid.", 'errors': [{'message': "The API server failed to successfully process the request. While this can be a transient error, it usually indicates that the request's input is invalid. Check the structure of the commentThread resource in the request body to ensure that it is valid.", 'domain': 'youtube.commentThread', 'reason': 'processingFailure', 'location': 'body', 'locationType': 'other'}]}}
I found this thread detailing the same issue, and it seems that it is not possible to download all the comments on popular videos.
This is my code:
import argparse
import urllib
import requests
import json
import time
start_time = time.time()
class YouTubeApi():
YOUTUBE_COMMENTS_URL = 'https://www.googleapis.com/youtube/v3/commentThreads'
comment_counter = 0
with open("API_keys.txt", "r") as f:
key_list = f.readlines()
key_list = [key.strip('/n') for key in key_list]
def format_comments(self, results, likes_required):
comments_list = []
try:
for item in results["items"]:
comment = item["snippet"]["topLevelComment"]
likes = comment["snippet"]["likeCount"]
if likes < likes_required:
continue
author = comment["snippet"]["authorDisplayName"]
text = comment["snippet"]["textDisplay"]
str = "Comment by {}:\n \"{}\"\n\n".format(author, text)
str = str.encode('ascii', 'replace').decode()
comments_list.append(str)
self.comment_counter += 1
print("Comments downloaded:", self.comment_counter, end="\r")
except(KeyError):
print(results)
return comments_list
def get_video_comments(self, video_id, likes_required):
with open("API_keys.txt", "r") as f:
key_list = f.readlines()
key_list = [key.strip('/n') for key in key_list]
if self.comment_counter <= 900000:
key = self.key_list[0]
elif self.comment_counter <= 1800000:
key = self.key_list[1]
elif self.comment_counter <= 2700000:
key = self.key_list[2]
elif self.comment_counter <= 3600000:
key = self.key_list[3]
elif self.comment_counter <= 4500000:
key = self.key_list[4]
params = {
'part': 'snippet,replies',
'maxResults': 100,
'videoId': video_id,
'textFormat': 'plainText',
'key': key
}
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36'
}
try:
#data = self.openURL(self.YOUTUBE_COMMENTS_URL, params)
comments_data = requests.get(self.YOUTUBE_COMMENTS_URL, params=params, headers=headers)
except ChunkedEncodingError:
tries = 5
print("Chunked Error. Retrying...")
for n in range(tries):
try:
x = 0
x += 1
print("Trying", x, "times")
response = session.post("https://www.youtube.com/comment_service_ajax", params=params, data=data, headers=headers)
comments_data = json.loads(response.text)
except ChunkedEncodingError as c:
print(c)
results = comments_data.json()
nextPageToken = results.get("nextPageToken")
commments_list = []
commments_list += self.format_comments(results, likes_required)
while nextPageToken:
params.update({'pageToken': nextPageToken})
try:
comments_data = requests.get(self.YOUTUBE_COMMENTS_URL, params=params, headers=headers)
except ChunkedEncodingError as c:
tries = 5
print("Chunked Error. Retrying...")
for n in range(tries):
try:
x = 0
x += 1
print("Trying", x, "times")
response = session.post("https://www.youtube.com/comment_service_ajax", params=params, data=data, headers=headers)
comments_data = json.loads(response.text)
except ChunkedEncodingError as c:
print(c)
results = comments_data.json()
nextPageToken = results.get("nextPageToken")
commments_list += self.format_comments(results, likes_required)
return commments_list
def get_video_id_list(self, filename):
try:
with open(filename, 'r') as file:
URL_list = file.readlines()
except FileNotFoundError:
exit("File \"" + filename + "\" not found")
list = []
for url in URL_list:
if url == "\n": # ignore empty lines
continue
if url[-1] == '\n': # delete '\n' at the end of line
url = url[:-1]
if url.find('='): # get id
id = url[url.find('=') + 1:]
list.append(id)
else:
print("Wrong URL")
return list
def main():
yt = YouTubeApi()
parser = argparse.ArgumentParser(add_help=False, description=("Download youtube comments from many videos into txt file"))
required = parser.add_argument_group("required arguments")
optional = parser.add_argument_group("optional arguments")
here: https://console.developers.google.com/apis/credentials")
optional.add_argument("--likes", '-l', help="The amount of likes a comment needs to be saved", type=int)
optional.add_argument("--input", '-i', help="URL list file name")
optional.add_argument("--output", '-o', help="Output file name")
optional.add_argument("--help", '-h', help="Help", action='help')
args = parser.parse_args()
# --------------------------------------------------------------------- #
likes = 0
if args.likes:
likes = args.likes
input_file = "URL_list.txt"
if args.input:
input_file = args.input
output_file = "Comments.txt"
if args.output:
output_file = args.output
list = yt.get_video_id_list(input_file)
if not list:
exit("No URLs in input file")
try:
vid_counter = 0
with open(output_file, "a") as f:
for video_id in list:
vid_counter += 1
print("Downloading comments for video ", vid_counter, ", id: ", video_id, sep='')
comments = yt.get_video_comments(video_id, likes)
if comments:
for comment in comments:
f.write(comment)
print('\nDone!')
except KeyboardInterrupt:
exit("User Aborted the Operation")
# --------------------------------------------------------------------- #
if __name__ == '__main__':
main()
The next best method would be to randomly sample them. Does anyone know if this is possible with the API V3?
Even if the API returns a processingFailure error, you could still catch that (or any other API error for that matter) for to terminate gracefully your pagination loop. This way your script will provide the top-level comments that it fetched from of the API prior to the occurrence of the first API error.
The error response provided by the YouTube Data API is (usually) of the following form:
{
"error": {
"errors": [
{
"domain": <string>,
"reason": <string>,
"message": <string>,
"locationType": <string>,
"location": <string>
}
],
"code": <integer>,
"message": <string>
}
}
Hence, you could have defined the following function:
def is_error_response(response):
error = response.get('error')
if error is None:
return False
print("API Error: "
f"code={error['code']} "
f"domain={error['errors'][0]['domain']} "
f"reason={error['errors'][0]['reason']} "
f"message={error['errors'][0]['message']!r}")
return True
that you'll invoke after each statement of form results = comments_data.json(). In case of the first occurrence of that statement, you'll have:
results = comments_data.json()
if is_error_response(results):
return []
nextPageToken = results.get("nextPageToken")
For the second instance of that statement:
results = comments_data.json()
if is_error_response(results):
return comments_list
nextPageToken = results.get("nextPageToken")
Notice that the function is_error_response above prints out an error message on stdout in case its argument in an API error response; this is for the purpose of having the user of your script informed about the API call failure.
I am trying to use AWS API in R. I am using R so that i can add it to an exisiting ShinyApp (which is a web application build in R).
This API: https://docs.aws.amazon.com/connect/latest/APIReference/API_GetCurrentMetricData.html
Using signature version 4:
https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html
I have added the 'Authorization' details to the header as per AMZ documentation, and the exact header works perfectly in python, but strangely not in R and returns an error saying 'InvalidSignatureException'.
My guess is that i have got something wrong with the variables in the POST function. I've quite a few different arrangements of the POST variables to try and get it to work but nothing seems to help.
This is the python code which works:
import requests
url = "https://connect.eu-central-1.amazonaws.com/metrics/current/XXXXXXX"
payload = "{\r\n \"InstanceId\" : \"XXXXXXX\",\r\n \"Filters\" : {\r\n \"Queues\" : [\r\n \"XXXXXXX\",\r\n \"arn:aws:connect:eu-central-1:XXXXXXX:instance/XXXXXXX/queue/XXXXXXX\"\r\n ]\r\n },\r\n \"CurrentMetrics\" : [\r\n {\r\n \"Name\" : \"AGENTS_ONLINE\",\r\n \"Unit\" : \"COUNT\"\r\n },\r\n {\r\n \"Name\" : \"AGENTS_AVAILABLE\",\r\n \"Unit\" : \"COUNT\"\r\n },\r\n {\r\n \"Name\" : \"OLDEST_CONTACT_AGE\",\r\n \"Unit\" : \"SECONDS\"\r\n },\r\n {\r\n \"Name\": \"AGENTS_ERROR\",\r\n \"Unit\": \"COUNT\"\r\n }\r\n ]\r\n}\r\n "
headers = {
'Content-Type': "application/json",
'X-Amz-Content-Sha256': "XXXXXXX",
'Host': "connect.eu-central-1.amazonaws.com",
'X-Amz-Date': "20190724T162517Z",
'Authorization': "AWS4-HMAC-SHA256 Credential=XXXXXXX/20190724/eu-central-1/connect/aws4_request, SignedHeaders=content-type;host;x-amz-content-sha256;x-amz-date, Signature=XXXXXXX",
}
response = requests.request("POST", url, data=payload, headers=headers)
print(response.text)
My attempt to write in R. (returns x-amzn-ErrorType: InvalidSignatureException)
library(httr)
library(jsonlite)
request_body_json <-
'{
"InstanceId" : "xxxxxxxxxx",
"Filters" : {
"Queues" : [
"xxxxxxxxxx",
"arn:aws:connect:eu-central-1:xxxxxxxxxx:instance/xxxxxxxxxx/queue/xxxxxxxxxx"
]
},
"CurrentMetrics" : [
{
"Name" : "AGENTS_ONLINE",
"Unit" : "COUNT"
},
{
"Name" : "AGENTS_AVAILABLE",
"Unit" : "COUNT"
},
{
"Name" : "OLDEST_CONTACT_AGE",
"Unit" : "SECONDS"
},
{
"Name": "AGENTS_ERROR",
"Unit": "COUNT"
}
]
}'
hdrs <- list(
'Content-Type'= "application/json",
'X-Amz-Content-Sha256'= "XXXXXXX",
'Host'= "connect.eu-central-1.amazonaws.com",
'X-Amz-Date'= "20190724T162517Z",
'Authorization'= "AWS4-HMAC-SHA256 Credential=XXXXXXX/20190724/eu-central-1/connect/aws4_request, SignedHeaders=content-type;host;x-amz-content-sha256;x-amz-date, Signature=XXXXXXX"
)
a <- POST(
url = "https://connect.eu-central-1.amazonaws.com/metrics/current/xxxxxxxxxx"
,do.call(add_headers,hdrs)
,verbose(info = TRUE)
,body = request_body_json
,encode = "json"
)
Fixed and added the authorisations steps in too.
# Librarys used
library(httr)
library(jsonlite)
library(aws.signature)
#gather variables
host <- "connect.eu-central-1.amazonaws.com"
sha <- "xxxx"
amzdate <- format(Sys.time(), "%Y%m%dT%H%M%SZ",tz = "UTC")
payload <-
'{
"InstanceId" : "xxxx-xxxx-xxxx-xxxx-xxxx",
"Filters" : {
"Queues" : [
"xxxx-xxxx-xxxx-xxxx"
]
},
"CurrentMetrics" : [
{
"Name" : "OLDEST_CONTACT_AGE",
"Unit" : "SECONDS"
},
{
"Name": "CONTACTS_IN_QUEUE",
"Unit": "COUNT"
}
]
}'
content_type <-"application/json"
url_api <-"https://connect.eu-central-1.amazonaws.com/metrics/current/xxxx-xxxx-xxxx-xxxx"
region <- "eu-central-1"
service <- "connect"
verb <- "POST"
action <- "/metrics/current/xxxx-xxxx-xxxx-xxx-xxxx"
key <- "xxxx"
secret <- "xxxx"
#headers so far (can't add signature yet)
hdrs <- list('Content-Type' = content_type,
Host = host,
'x-amz-content-sha256' = sha,
'x-amz-date' = amzdate)
#get v4 signature
sig <- signature_v4_auth(datetime = amzdate,
region = region,
service = service,
verb = verb,
action = action,
query_args = list(),
canonical_headers = hdrs,
request_body = payload,
key = key,
secret = secret,
session_token = NULL,
query = FALSE,
algorithm = "AWS4-HMAC-SHA256")
#add signature header to header
auth <- sig$SignatureHeader
a <- httr::VERB(verb = "POST"
,url = url_api
,encode = 'json'
,content_type_json()
,body = payload
,httr::add_headers(
'Content-Type' = content_type,
Authorization = auth,
Host = host,
`X-Amz-Content-Sha256` = sha,
`X-Amz-Date` = amzdate)
)
results <- content(a,"parsed")
I am writing a Let's Encrypt ACME client in Elixir+escript. The basics seem to be almost done except whenever I submit I get an error back that the key is too large. I have included full source code as well as the results from IO.inspect.
Code:
defmodule NginxDockerCerts do
#base "https://acme-staging.api.letsencrypt.org"
def main(_args) do
{ :ok, %{ body: body } } = HTTPoison.get(#base <> "/directory")
endpoints = Poison.decode!(body)
response = send_request(
endpoints["new-reg"],
%{ resource: "new-reg", contact: ["mailto:some#dude.com"] } )
IO.inspect(response)
end
defp send_request(url, payload) do
[private_pem] = File.read!("acme_key/private.key") |> :public_key.pem_decode
private_key = :public_key.pem_entry_decode(private_pem)
IO.inspect(private_key)
header = generate_header(private_key)
request = %{
payload: b64(payload),
header: header,
protected: Map.merge(header, %{ nonce: nonce() }) |> b64
}
signature = "#{request[:payload]}.#{request[:protected]}"
|> :public_key.sign(:sha256, private_key)
|> b64
request = Map.put(request, :signature, signature)
IO.inspect Poison.encode!(request)
HTTPoison.post(url, Poison.encode!(request))
end
def nonce do
{ :ok, %{ headers: le_headers } } = HTTPoison.head(#base <> "/directory")
%{ "Replay-Nonce" => ret } = Enum.into(le_headers, %{})
ret
end
# From the erlang docs:
# Key = public_key:pem_entry_decode(RSAEntry, "abcd1234").
# 'RSAPrivateKey'{version = 'two-prime',
# modulus = 1112355156729921663373...2737107,
# publicExponent = 65537,
# privateExponent = 58064406231183...2239766033,
# prime1 = 11034766614656598484098...7326883017,
# prime2 = 10080459293561036618240...77738643771,
# exponent1 = 77928819327425934607...22152984217,
# exponent2 = 36287623121853605733...20588523793,
# coefficient = 924840412626098444...41820968343,
# otherPrimeInfos = asn1_NOVALUE}
defp generate_header(tuple) do
modulus = elem(tuple, 2) |> Integer.to_string |> b64
exponent = elem(tuple, 3) |> Integer.to_string |> b64
%{
alg: "RS256",
jwk: %{ e: exponent, kty: "RSA", n: modulus }
}
end
defp b64(map) when is_map(map) do
map |> Poison.encode! |> b64
end
defp b64(str) when is_binary(str) do
Base.url_encode64(str, padding: false)
end
end
IO.inspect(private_key):
{:RSAPrivateKey, :"two-prime",
26481417694003873198106692499951294632119767655743036419434387596516366789190701823748669795645320777784137996798720069366288226752787622864701474496474449030506709232606243234272615772772466937031758607350665927756585455359455675325206612466347661342624412526201816026642296038870194629664917766048486662184822018742110266572261560976195129025393833146676682160704214817952205520453818572377683628606127111066471430372867661099887369197771195456883038219184710367619732291239260771178775267372942936652670907351820071078279876378927060202383261106965701867067167942374432028957977689700100770593457365162852102994011,
65537,
17914363694762679375590280482557038734074338145129736174308002717419978357245064091366667308389692794651262006012958394118656461740464764327121166238936702897104154200338706284869255844574321706109571964796859390986591027409882468918977602937203771390592387954119924829351148128283413627511087311865963595647490630788471521560987974810414561435248783963374268078499217692971351713183762668005601250077516937260913883929834502151959055904643039470485089018275006363270281504396441774935380614604745759736941304083977023940167676210942357786129557579261694171244151039301867300063202512362347353203092947886172660735393,
178446771914665404540695905367251634656603255428643793397732858767097609718259368256180025741118899743098924191892492358642536804962239144370164933768905841306132489144152437556760519068703335707439676521498046102518700677420206003309978859591990308609070226460982935297571405985650480509898548679141561783409,
148399533428755367732844203564110326749419930547476575029427037695010307782491560655098073907520278149390027076214510851700665327903331461330257316301355625227675837876554040085530270141360998128496771695290961076657330075330485591211033119775343431677271581646470921917040927840058534430663755569847957098379,
173496648605071408699339341045453818586204782529834351152938645769270680587115197307101867039967578203618114203933621017140604584820619131790019979779680353116338191796637764448772807948772568899131006122851732603054596464051422956877924576259853677682245842193987696327661510566545050393063851309736807837969,
104457312306054806733354226690510299270297554298879135185963633915322955098861989175280059316535971303378417368095146254784372068320316518037537578135981756067227887713555921436525884491068010508044075485229218997318755721273030971941897994237703401844823768755563271113340785238716148920776808645385905807487,
167919839448914714974178630755108847455535988232500360818424531493548836346962617553207612824957810666270161293456074716336654588650394703385074422991192683242596666041121521405849960615014764544853974028554205453198599086267359147206158727144128096959265897638538959277458154234233457521650291895863656575525,
:asn1_NOVALUE}
IO.inspect Poison.encode!(request):
"{\"signature\":\"bzWgP-iSDaFG1tl0nbm4jfWEeQbwpfPEGqKdyslskweSpZCMXdgP7NPCAYlbX8W5qExPA9wGtI0uR2tfRA-ALE6prvRxAwzh3BfwPNscwgXzTNUy79KLkW78eFMLrUvU8shtkwxXlmmPCxGd2PAMM5vXUHM_ovVhTHKqojdy5ECzpw6u03k2wJZzLIn68o-G4ZuOnThl2HzDky29CCYSZnkDk3uE2L5rt1fyqG2II6DhKYKlXB8fittUSJk4xDr7ufu4kKOlMqXB2JyfR6kpEVigqUsjSzAWCzdP3WXgk-M8A2ElqGuwzXgXiXiOMD04SIOCSxfe1Qu6KFeQBeEgPw\",\"protected\":\"eyJub25jZSI6IkVfUVZLVjlNVklRT29odjRnbG51VDEyem1NNE5sb0tBRUZzUFl6WWJuUXMiLCJqd2siOnsibiI6Ik1qWTBPREUwTVRjMk9UUXdNRE00TnpNeE9UZ3hNRFkyT1RJME9UazVOVEV5T1RRMk16SXhNVGszTmpjMk5UVTNORE13TXpZME1UazBNelF6T0RjMU9UWTFNVFl6TmpZM09Ea3hPVEEzTURFNE1qTTNORGcyTmprM09UVTJORFV6TWpBM056YzNPRFF4TXpjNU9UWTNPVGczTWpBd05qa3pOall5T0RneU1qWTNOVEkzT0RjMk1qSTROalEzTURFME56UTBPVFkwTnpRME5Ea3dNekExTURZM01Ea3lNekkyTURZeU5ETXlNelF5TnpJMk1UVTNOekkzTnpJME5qWTVNemN3TXpFM05UZzJNRGN6TlRBMk5qVTVNamMzTlRZMU9EVTBOVFV6TlRrME5UVTJOelV6TWpVeU1EWTJNVEkwTmpZek5EYzJOakV6TkRJMk1qUTBNVEkxTWpZeU1ERTRNVFl3TWpZMk5ESXlPVFl3TXpnNE56QXhPVFEyTWprMk5qUTVNVGMzTmpZd05EZzBPRFkyTmpJeE9EUTRNakl3TVRnM05ESXhNVEF5TmpZMU56SXlOakUxTmpBNU56WXhPVFV4TWprd01qVXpPVE00TXpNeE5EWTJOelkyT0RJeE5qQTNNRFF5TVRRNE1UYzVOVEl5TURVMU1qQTBOVE00TVRnMU56SXpOemMyT0RNMk1qZzJNRFl4TWpjeE1URXdOalkwTnpFME16QXpOekk0TmpjMk5qRXdPVGs0T0Rjek5qa3hPVGMzTnpFeE9UVTBOVFk0T0RNd016Z3lNVGt4T0RRM01UQXpOamMyTVRrM016SXlPVEV5TXpreU5qQTNOekV4TnpnM056VXlOamN6TnpJNU5ESTVNelkyTlRJMk56QTVNRGN6TlRFNE1qQXdOekV3TnpneU56azROell6TnpnNU1qY3dOakF5TURJek9ETXlOakV4TURZNU5qVTNNREU0Tmpjd05qY3hOamM1TkRJek56UTBNekl3TWpnNU5UYzVOemMyT0RrM01EQXhNREEzTnpBMU9UTTBOVGN6TmpVeE5qSTROVEl4TURJNU9UUXdNVEUiLCJrdHkiOiJSU0EiLCJlIjoiTmpVMU16YyJ9LCJhbGciOiJSUzI1NiJ9\",\"payload\":\"eyJyZXNvdXJjZSI6Im5ldy1yZWciLCJjb250YWN0IjpbIm1haWx0bzpzb21lQGR1ZGUuY29tIl19\",\"header\":{\"jwk\":{\"n\":\"MjY0ODE0MTc2OTQwMDM4NzMxOTgxMDY2OTI0OTk5NTEyOTQ2MzIxMTk3Njc2NTU3NDMwMzY0MTk0MzQzODc1OTY1MTYzNjY3ODkxOTA3MDE4MjM3NDg2Njk3OTU2NDUzMjA3Nzc3ODQxMzc5OTY3OTg3MjAwNjkzNjYyODgyMjY3NTI3ODc2MjI4NjQ3MDE0NzQ0OTY0NzQ0NDkwMzA1MDY3MDkyMzI2MDYyNDMyMzQyNzI2MTU3NzI3NzI0NjY5MzcwMzE3NTg2MDczNTA2NjU5Mjc3NTY1ODU0NTUzNTk0NTU2NzUzMjUyMDY2MTI0NjYzNDc2NjEzNDI2MjQ0MTI1MjYyMDE4MTYwMjY2NDIyOTYwMzg4NzAxOTQ2Mjk2NjQ5MTc3NjYwNDg0ODY2NjIxODQ4MjIwMTg3NDIxMTAyNjY1NzIyNjE1NjA5NzYxOTUxMjkwMjUzOTM4MzMxNDY2NzY2ODIxNjA3MDQyMTQ4MTc5NTIyMDU1MjA0NTM4MTg1NzIzNzc2ODM2Mjg2MDYxMjcxMTEwNjY0NzE0MzAzNzI4Njc2NjEwOTk4ODczNjkxOTc3NzExOTU0NTY4ODMwMzgyMTkxODQ3MTAzNjc2MTk3MzIyOTEyMzkyNjA3NzExNzg3NzUyNjczNzI5NDI5MzY2NTI2NzA5MDczNTE4MjAwNzEwNzgyNzk4NzYzNzg5MjcwNjAyMDIzODMyNjExMDY5NjU3MDE4NjcwNjcxNjc5NDIzNzQ0MzIwMjg5NTc5Nzc2ODk3MDAxMDA3NzA1OTM0NTczNjUxNjI4NTIxMDI5OTQwMTE\",\"kty\":\"RSA\",\"e\":\"NjU1Mzc\"},\"alg\":\"RS256\"}}"
IO.inspect(response):
{:ok,
%HTTPoison.Response{body: "{\n \"type\": \"urn:acme:error:malformed\",\n \"detail\": \"Key too large: 4934 \\u003e 4096\",\n \"status\": 400\n}",
headers: [{"Server", "nginx"}, {"Content-Type", "application/problem+json"},
{"Content-Length", "104"},
{"Boulder-Request-Id", "5BbL5f23yYxol0_rHyC7OT88PF5BJle7lY970szFsqg"},
{"Replay-Nonce", "DFPac8kWo7OoL5LSHUI6CZr5nHjZzyeA64eXQdcniVg"},
{"Expires", "Sat, 04 Feb 2017 05:12:57 GMT"},
{"Cache-Control", "max-age=0, no-cache, no-store"}, {"Pragma", "no-cache"},
{"Date", "Sat, 04 Feb 2017 05:12:57 GMT"}, {"Connection", "close"}],
status_code: 400}}
You're converting the integers in RSAPrivateKey to binary using String.to_integer, which creates an ASCII representation of the digits in base 10. You need convert every byte in the number into a binary with the appropriate total number of bytes.
One way to do this is to specify a size using the <<>> syntax:
iex(1)> n = 0x12345678
305419896
iex(2)> <<n::size(32)>> # or just <<n::32>>
<<18, 52, 86, 120>>
I did Server-Client Application with lua using one Esp8266. I wanna do this with two Esp8266. I wanna use one of these Esp8266 is Server and the other other one is Client. You can see below first code using for get RSSI from one AP and second code is using for writing these RSSI in a Server. How can i placed these two codes in two Esp8266?
i=5
tmr.alarm(1,10000,1, function()
print(wifi.sta.getap(scan_cfg, 1, listap))
if i>1 then
print(i)
i=i-1
else
tmr.stop(1)
print("Timer Durdu")
end
end
)
function listap(t)
for bssid,v in pairs(t) do
local ssid = string.match(v, "([^,]+)")
l=string.format("%-10s",ssid)
stringtoarray = {}
index = 1
for value in string.gmatch(v,"%w+") do
stringtoarray [index] = value
index = index + 1
end
print(l)
print(stringtoarray[2])
end
end
scan_cfg = {}
scan_cfg.ssid = "VSP250s"
scan_cfg.bssid = "00:09:df:8e:03:b4"
scan_cfg.channel = 0
scan_cfg.show_hidden = 1
Second code:
srv=net.createServer(net.TCP)
srv:listen(80,function(conn)
conn:on("receive", function(client,request)
local buf = "";
local _, _, method, path, vars = string.find(request, "([A-Z]+) (.+)?(.+) HTTP");
if(method == nil)then
_, _, method, path = string.find(request, "([A-Z]+) (.+) HTTP");
end
local _GET = {}
if (vars ~= nil)then
for k, v in string.gmatch(vars, "(%w+)=(%w+)&*") do
_GET[k] = v
end
end
buf = buf.."<!DOCTYPE html><html><div id='container'><font size='5'>"
buf = buf..'<style>body{width:auto;height:auto;background-color:#ffffff;}'
buf = buf..'.button {font-size: 20px;}</style>'
buf = buf.."<head> <meta http-equiv='refresh' content=3> "
buf = buf.."<p><h1>RSSI meter<br> ESP8266</h1>";
--buf = buf.."<p>Refresh : <button class='button'>ON</button> </p>";
--buf = buf.."<p>Relay Switch : <button class='button'>ON</button> "
--buf = buf.."<button class='button'>OFF</button><br>"
buf = buf..'<B>Voltage :<font color=red>'..string.format('%s',l)..' V</font></b><br>'
buf = buf..'<B>Current :<B><font color=blue>'..string.format('%g',stringtoarray[2])..' A</font></b><br>'
--buf = buf..'<B>Power Consumption :<B><font color=DeepSkyBlue>'..'Not Available'..'</font></b><br><BR>'
-- buf = buf..'<p>Function Button :<B><font color=BlueViolet>'..button_status..'</font></b><br></p>';
buf = buf..'</head>'
buf = buf..'<br><br><details><summary><font color=red>BURAK IPEK</font><p>'
buf = buf..'<summary><p>Vestel Electronics </p></details>'
buf = buf.."</body></font></div></html>"
client:send(buf);
client:close();
collectgarbage();
end)
end)
Put each code into a lua file. Include both from init.lua with typing
dofile("client.lua");
dofile("server.lua");
To make things easier, write methods.
Good luck.