Pass POST data to unix domain socket through nginx - nginx

I have a unix domain socket file and it is working with with nc command. Now I want to access it via nginx but it does not work. Am I missing something?
test with nc => it works
$ echo '{ "method" : "getinfo", "params" : [], "id" : "1" }' | nc -U /home/zono/.lightning/lightning-rpc
{ "jsonrpc": "2.0", "id" : "1", "result" :
{
"id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}
test via nginx => it does not work
// /etc/nginx/sites-enabled/default
upstream nginx-internal-sock {
server unix:/home/zono/.lightning/lightning-rpc;
}
server {
listen 80;
location / {
proxy_pass http://nginx-internal-sock;
}
}
$ curl -H "content-type: application/json" -X POST --data '{ "method" : "getinfo", "params" : [], "id" : "1" }' http://127.0.0.1
2019-03-20T04:25:52.551Z lightningd(30143):jcon fd 32: Invalid token in json input: 'POST / HTTP/1.0??Host: nginx-internal-sock??Connection: close??C'
Update 1
There's been a development. However I can't get whole data.
// install nginx-extras
apt-get install nginx-extras
// /etc/nginx/sites-enabled/default
server {
listen 80;
location / {
content_by_lua '
ngx.req.read_body()
local body_data = ngx.req.get_body_data()
local sock = ngx.socket.tcp()
local ok, err = sock:connect("unix:/home/zono/.lightning/lightning-rpc")
local bytes = sock:send(body_data)
local line, err = sock:receive("*a")
ngx.say(line)
ok, err = sock:close()
';
}
}
// Response is nil
$ curl -X POST --data '{ "method" : "getinfo", "params" : [], "id" : "1" }' http://127.0.0.1
nil
// /var/log/nginx/error.log
2019/03/20 07:43:39 [error] 4926#4926: *35 lua tcp socket read timed out, client: 127.0.0.1, server: , request: "POST / HTTP/1.1", host: "127.0.0.1"
// When I set "sock:receive("*l")" the response is the part of the data.
$ curl -X POST --data '{ "method" : "getinfo", "params" : [], "id" : "1" }' http://127.0.0.1
{ "jsonrpc": "2.0", "id" : "1", "result" :
I'm checking the reference now. http://w3.impa.br/~diego/software/luasocket/tcp.html
'*a': reads from the socket until the connection is closed. No end-of-line translation is performed;
'*l': reads a line of text from the socket. The line is terminated by a LF character (ASCII 10), optionally preceded by a CR character (ASCII 13). The CR and LF characters are not included in the returned line. In fact, all CR characters are ignored by the pattern. This is the default pattern;
number: causes the method to read a specified number of bytes from the socket.

I found out the answer.
// install nginx-extras
apt-get install nginx-extras
// /etc/nginx/sites-enabled/default
server {
listen 80;
location / {
content_by_lua '
ngx.req.read_body()
local body_data = ngx.req.get_body_data()
local sock = ngx.socket.tcp()
local ok, err = sock:connect("unix:/home/zono/.lightning/lightning-rpc")
local bytes = sock:send(body_data)
local readline = sock:receiveuntil("\\n\\n")
local line, err, part = readline()
if line then
ngx.say(line)
end
ok, err = sock:close()
';
}
}
// curl
$ curl -X POST --data '{ "method" : "getinfo", "params" : [], "id" : "1" }' http://127.0.0.1

Related

Python Fastapi. No headers after redirect

I'm doing a test via "test_client" and testing the "/me" endpoint which redirects, depending on the incoming token, to the secure endpoint either /users/user_id or /operators/operator_id. But after redirection headers become None`.
If I doing it via swagger - all works correctly, if I doing this via curl - I'm don't getting an answer at all.
So, the main problem is that headers are becoming are empty (no token) after redirection.
There is my code:
#app.api_route(path="/me",
status_code=308,
methods=["GET", "DELETE", "PATCH"],
tags=['me'],)
def get_my_profile(request: fastapi.Request, token: str = fastapi.Depends(config.oauth2_scheme), ):
token_payload = services_get_token_payload(token=token)
if token_payload['role'] == config.USER_ROLE:
url = f"{request.url.scheme}://{config.APP_IP}:{config.APP_PORT}/users/{token_payload['sub']}"
elif token_payload['role'] == config.OPERATOR_ROLE:
url = f"{request.url.scheme}://{config.APP_IP}:{config.APP_PORT}/operators/{token_payload['sub']}"
elif token_payload['role'] == config.EMPLOYEE_ROLE:
url = f"{request.url.scheme}://{config.APP_IP}:{config.APP_PORT}/employees/{token_payload['sub']}"
else:
raise fastapi.HTTPException('Can not redirect, some error occurred')
return fastapi.responses.RedirectResponse(url=url, status_code=308, headers=request.headers) # No matter `request.headers` or `dict(request.headers)`
#router.get(path="/{user_id}", status_code=200, response_model=schemas.UserOut)
async def get_user(
user_id: int = Path(default=...),
token: str = Depends(config.oauth2_scheme), # error during checking, no headers
postgres_session: AsyncSession = Depends(database.get_db),):
token_payload = services.get_token_payload(token=token)
services.verify_access(role=token_payload["role"], true_conds=token_payload['sub'] == user_id)
user = await crud.read_user(statement=users.select().where(users.c.id == user_id), db_session=postgres_session)
return schemas.UserOut(**user)
Proof with curl:
david#david-ThinkPad-E480:~$ curl -X 'GET' \
> 'http://127.0.0.1:8000/me' \
> -H 'accept: */*' \
> -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiI2IiwiZXhwIjoxNjIyNjc3NzgwLCJyb2xlIjoxLCJjaGF0X3Rva2VuIjp7ImlkZW50aXR5Ijo2LCJ0b2tlbiI6ImV5SjBlWEFpT2lKS1YxUWlMQ0poYkdjaU9pSklVekkxTmlJc0ltTjBlU0k2SW5SM2FXeHBieTFtY0dFN2RqMHhJbjAuZXlKcWRHa2lPaUpUU3pWaFpEVXpZV1UzTWpnek1XWTJORGRoWm1RNU1qQmlabUl6TW1Gak1XRTBMVEUyTWpFMU9UYzNPREFpTENKbmNtRnVkSE1pT25zaWRtbGtaVzhpT250OUxDSmphR0YwSWpwN0luTmxjblpwWTJWZmMybGtJam9pU1ZNelkyVTFNR1l3TnpnNE9HWTBaV1ZrT1dJM05ESXpZMlExWTJaa01EbG1OQ0o5TENKcFpHVnVkR2wwZVNJNk5uMHNJbWx6Y3lJNklsTkxOV0ZrTlROaFpUY3lPRE14WmpZME4yRm1aRGt5TUdKbVlqTXlZV014WVRRaUxDSmxlSEFpT2pFMk1qRTJNREV6T0RBc0ltNWlaaUk2TVRZeU1UVTVOemM0TUN3aWMzVmlJam9pUVVObU9ESTNORFJrTkRCbVlUTmxNbUZpTlRkbU56WTBOREV4TmpCaU5UUmlaQ0o5LnBzeVpJbE9OeTRRX2pNeHNnREswUUI2X3l4T20xcUZ6eVEyajczSHVvaWMifX0.c66ACoJpTLRXYMeysEYwUIQJSqIMEXegXRn4vc0sIMw'
david#david-ThinkPad-E480:~$

Lua Envoy upstream proxy

im looking to replace some login logic in on kong, for permission checks on a specific url (like upstream) to an envoy filter in istio.
apiVersion: networking.istio.io/v1alpha3
kind: EnvoyFilter
metadata:
name: api-auth
namespace: api
spec:
workloadLabels:
app: api
filters:
- listenerMatch:
listenerType: SIDECAR_INBOUND
listenerProtocol: HTTP
filterName: envoy.lua
filterType: HTTP
filterConfig:
inlineCode: |
function version()
return "v1"
end
function log(handle, value)
handle:logInfo(version() .. ": " .. value)
end
function dump(o)
if type(o) == 'table' then
local s = '{ '
for k,v in pairs(o) do
if type(k) ~= 'number' then k = '"'..k..'"' end
s = s .. '['..k..'] = ' .. dump(v) .. ','
end
return s .. '} '
else
return tostring(o)
end
end
function is_empty(value)
return value == nil or value == ""
end
function get_header(handle, header)
return handle:headers():get(header)
end
function envoy_on_request(request_handle)
local auth_host = "auth-service.services.svc.cluster.local"
local path = "/api/v1/has-permission"
local cluster = "outbound|8080||" .. auth_host
local request_headers = {
[":method"] = "POST",
[":path"] = path,
[":authority"] = auth_host,
["Authorization"] = get_header(request_handle, "Authorization")
}
local request_body = ""
local timeout = 5000 --ms
log(request_handle, "Sending auth request, headers: " .. dump(request_headers) .. ", request_body: " .. request_body .. ", timeout: " .. timeout)
local response_headers, response_body = request_handle:httpCall(
tostring(cluster),
request_headers,
request_body,
timeout
)
log(request_handle, "response_headers: " .. dump(response_headers))
log(request_handle, "response_body: " .. dump(response_body))
if tonumber(response_headers[":status"]) ~= 200 then
log(request_handle, "Key Authentication Failed")
request_handle:respond(
{[":status"] = response_headers[":status"]},
response_body
)
do return end
end
end
so this is my lua, but im still missing something, i need to send extra parameters on my body post request.
example working curl:
curl -i 'https://foo-api.com/list' \
-H 'Connection: keep-alive' \
-H 'Pragma: no-cache' \
-H 'Cache-Control: no-cache' \
-H 'AuthCode: cmdpby50ZWl4ZWlyYUBqdW1pYS5jb20iLCJleHAiOjE1ODUwNDg2MjIsImlzcyI6ImZpcmV3b3JrcyJ9.JkvIhmQuumS32HhSzKuAhpPvjLVwOrRJXwajMjBU9Ag' \
-H 'Accept-Language: en' \
-H 'Authorization: Bearer 6InNlcmdpby50ZWl4ZWlyYUBqdW1pYS5jb20iLCJleHAiOjE1ODUwNDg2MjIsImlzcyI6ImZpcmV3b3JrcyJ9.JkvIhmQuumS32HhSzKuAhpPvjLVwOrRJXwajMjBU9Ag' \
-H 'Accept: application/json, text/plain, */*' \
-H 'Sec-Fetch-Dest: empty' \
-H 'application: COMPANYCODE'
how iam supposed to send this kind of content inside the post using lua?
thanks and best regards

How to resolve octave machine learning submission error?

How to resolve submission error:
curl: (6) Couldn't resolve host 'www-origin.coursera.org'
m = 15
error] submission with curl() was not successful
!! Submission failed: Grader sent no response
Function: submitWithConfiguration>validateResponse
FileName: C:\Users\admin\Desktop\ex7\lib\submitWithConfiguration.m
LineNumber: 158
This error is probably caused because your computer is not able to connect to the internet at the moment.
This is the function working behind collecting the data from the online grader.
function response = validateResponse(resp)
% test if the response is json or an HTML page
isJson = length(resp) > 0 && resp(1) == '{';
isHtml = findstr(lower(resp), '<html');
if (isJson)
response = resp;
elseif (isHtml)
% the response is html, so its probably an error message
printHTMLContents(resp);
error('Grader response is an HTML message');
else
error('Grader sent no response');
end
end
Now the statement: "Grader sent no response" is printed when the response is null.
And the response can be null when the computer is not connected.
Hope this is the reason behind your error, if not then let me know.
Matlab?
This steps may helpful:
open file: submitWithConfiguration.m ; and: goto line 131 and 134;
then change:
line131: json_command = sprintf('echo jsonBody=%s | curl -k -X POST -d #- %s', body, url);
line134: json_command = sprintf('echo ''jsonBody=%s'' | curl -k -X POST -d #- %s', body, url);
to:
line131: json_command = sprintf('echo jsonBody=%s | curl -k -X POST -s -d #- %s', body, url);
line134: json_command = sprintf('echo ''jsonBody=%s'' | curl -k -X POST -s -d #- %s', body, url);
(both add -s)
it looks like the code is fine and the problem is that your computer can't connect to the internet.
You can simply solve this by using a VPN.
Good luck!

Writing Curl POST in R

What is the correct way of writing this Curl POST in R?
I would like to have R read the contents of a file as "values" in the post form.
curl -X POST https://api.priceapi.com/jobs \
-d "token=token" \
-d "country=country" \
-d "source=source" \
-d "currentness=currentness" \
-d "completeness=completeness" \
-d "key=key" \
-d 'values=<values>'
So far I have this-
library(RCurl)
library(RJSONIO)
url = "https://api.priceapi.com/jobs"
file.name = ".../output 1 .txt"
results = postForm(url, token="token",
country="country,
source="source",
currentness="currentness",
completeness="completeness,
key="key",
values=fileUpload(filename = file.name))
It returns "Error: Bad Request"
I also tried it using httr post request-
r = POST(url, body = list(token="token",
country="country,
source="source",
currentness="currentness",
completeness="completeness,
key="key",
values=upload_file(file.name)) )
Here upload_file is not uploading the contents of the file but, I am guessing it is passing the path to the file (as a string) into the "values" parmeter.
Naturally that does not return the correct results.
The result of the httr POST request is;
Response [https://api.priceapi.com/jobs]
Date: 2016-12-13 10:11
Status: 400
Content-Type: application/json; charset=utf-8
Size: 228 B
{
"success": false,
"reason": "parameter value invalid",
"parameter": "value",
"valid values": "An array or a string containing values separated by newline",
"comment": "Make sure the parameter 'value' has a valid value!"
I could solve this by using
file=readLines(".../output 1.txt")
inputValues <- paste(file,collapse="\n")
and then passing inputValues in the values parameter.

Openresty torch GPU module loading issue

I am using openresty with lua. When I was loading torch without cutorch,cunn(GPU) it was working fine. But it is unable to load cutorch module.
Here is my nginx.conf
error_log stderr notice;
daemon off;
events{}
http {
lua_code_cache on;
#lua_package_cpath '/usr/local/cuda/include/?.so;;';
init_by_lua_file 'evaluate.lua';
server {
listen 6788;
lua_code_cache on;
lua_need_request_body on;
client_body_buffer_size 10M;
client_max_body_size 10M;
location /ff {
# only permit POST requests
if ($request_method !~ ^(POST)$ ) {
return 403;
}
content_by_lua '
local body = ngx.req.get_body_data()
if body == nil then
ngx.say("empty body")
else
local resp =FeedForward(body)
ngx.say(cjson.encode({result=resp}))
end
';
}
}
}
Here is my evaluate.lua code
-- load the required lua models
torch = require("torch")
nn = require("nn")
gm = require("image")
cutorch = require "cutorch"
cunn = require "cunn"
cjson = require("cjson")
-- model
modelPath='./model.t7'
ninputs = 3
model = nn.Sequential()
model:add(nn.Linear(ninputs,2))
-- let’s save a dummy model
-- to demonstrate the functionality
torch.save(modelPath, model)
-- load a pretrained model
net = torch.load(modelPath)
net:cuda()
net:evaluate()
-- our end point function
-- this function is called by the ngx server
-- accepts a json body
function FeedForward(json)
print("starting")
-- decode and extract field x
local data = cjson.decode(json)
local input = torch.Tensor(data.x)
local response = {}
-- example checks
if input == nil then
print("No input given")
elseif input:size(1) ~=ninputs then
print("Wrong input size")
else
-- evaluat the input and create a response
local output = net:forward(input:cuda()):float()
-- from tensor to table
for i=1,output:size(1) do
response[i] = output[i]
end
end
-- return response
return response
end
I am trying to run the model using following commands
/usr/local/openresty/nginx/sbin/nginx -p "$(pwd)" -c "nginx.conf"
It's starting up fine but when I am sending curl request like
curl -H "Content-Type: application/json" -X POST -d '{"x":[1,2,3]}' http://localhost:6788/ff
I am getting following error.
2016/09/29 12:59:59 [notice] 10355#0: *1 [lua] evaluate.lua:28: FeedForward(): starting, client: 127.0.0.1, server: , request: "POST /ff HTTP/1.1", host: "localhost:6788"
THCudaCheck FAIL file=/tmp/luarocks_cutorch-scm-1-700/cutorch/lib/THC/generic/THCStorage.cu line=40 error=3 : initialization error
2016/09/29 12:59:59 [error] 10355#0: *1 lua entry thread aborted: runtime error: unknown reason
stack traceback:
coroutine 0:
[C]: in function 'resize'
/home/ubuntu/torch/install/share/lua/5.1/cutorch/Tensor.lua:14: in function 'cuda'
/rootpath/evaluate.lua:41: in function 'FeedForward'
content_by_lua(nginx.conf:31):7: in function <content_by_lua(nginx.conf:31):1>, client: 127.0.0.1, server: , request: "POST /ff HTTP/1.1", host: "localhost:6788"
Without cutorch the model is running fine like If I remove
net:cuda()
and replace line
local output = net:forward(input:cuda()):float()
by
local output = net:forward(input):float()
It's working fine. Also I tried to run evaluate.lua using th and it's working fine there with cutorch and cunn packages.

Resources