Why is the variable modified when nginx Lua processes the request? - nginx

I just started studying Lua.
Every time I request, I want to check the name parameter in the request parameter. However, it is actually found that the self.name has changed occasionally.
For example,
request A with params: request_id = 123 & name = ABC,
request B with params: request_id = 321 & name = EFG,
in the log, I found that there are requests_id = 123, but name = EFG.
Why is that? Is my class incorrectly written?
Here is the sample code:
main.lua:
local checker = require "checker"
local ch = checker:new()
if ch:check_name() then
ngx.header["Content-Type"] = "application/json"
ngx.status = ngx.HTTP_FORBIDDEN
ngx.exit(ngx.HTTP_FORBIDDEN)
end
checker.lua:
local utils = require "utils"
local _M = {}
function _M:new()
local o = {}
setmetatable(o, self)
self.__index = self
self.args = utils.get_req_args() or {}
local name = self.args["name"] or ""
local request_id = self.args["request_id"] or ""
self.name = name
return o
end
function _M:check_name()
ngx.log(ngx.ERR, "request_id: ", self.request_id, " name: ", self.name)
-- do some check ...
end
utils.lua
local json = require "cjson"
local _M = {}
function _M.new(self)
return self
end
function _M.get_req_args()
-- GET
local args = nil
if ngx.var.request_method == "GET" then
args = ngx.req.get_uri_args()
-- POST
elseif ngx.var.request_method == "POST" then
ngx.req.read_body()
local data = ngx.req.get_body_data()
args = json.decode(data)
end
return args
end

Related

ESP8266 do not send back http resppond

I have some problem. If I have old firmware (NodeMCU 0.9.5 build 20150318 powered by Lua 5.1.4) then it works and I receive back response to phone. But if I have new one ( build 2022-09-07 powered by Lua 5.1.4 on SDK 3.0.1-dev(fce080e)) then I do not receive response (but I get request “who” on ESP8266). What is the problem it could be?
Code:
srv = net.createServer(net.TCP)
srv:listen(80,function(conn)
conn:on("receive", function(client,request)
local buf = "";
buf = buf.."HTTP/1.1 200 OK\n\n"
local _, _, method, path, vars = string.find(request, "([A-Z]+) (.+)?(.+) HTTP");
if(method == nil)then
_, _, method, path = string.find(request, "([A-Z]+) (.+) HTTP");
end
local _GET = {}
if (vars ~= nil)then
for k, v in string.gmatch(vars, "(%w+)=(%w+)&*") do
_GET[k] = v
end
end
if(_GET.pin == "ON1")then
print(“On”)
elseif(_GET.pin == "OFF1") then
print(“Off”)
end
if(_GET.question == "who") then
buf=""
buf = buf.."HTTP/1.1 200 OK\n\n"
print(buf)
end
client:send(buf)
client:close()
collectgarbage()
end)
end)

Is it possible to randomly sample YouTube comments with YouTube API V3?

I have been trying to download all the YouTube comments on popular videos using python requests, but it has been throwing up the following error after about a quarter of the total comments:
{'error': {'code': 400, 'message': "The API server failed to successfully process the request. While this can be a transient error, it usually indicates that the request's input is invalid. Check the structure of the commentThread resource in the request body to ensure that it is valid.", 'errors': [{'message': "The API server failed to successfully process the request. While this can be a transient error, it usually indicates that the request's input is invalid. Check the structure of the commentThread resource in the request body to ensure that it is valid.", 'domain': 'youtube.commentThread', 'reason': 'processingFailure', 'location': 'body', 'locationType': 'other'}]}}
I found this thread detailing the same issue, and it seems that it is not possible to download all the comments on popular videos.
This is my code:
import argparse
import urllib
import requests
import json
import time
start_time = time.time()
class YouTubeApi():
YOUTUBE_COMMENTS_URL = 'https://www.googleapis.com/youtube/v3/commentThreads'
comment_counter = 0
with open("API_keys.txt", "r") as f:
key_list = f.readlines()
key_list = [key.strip('/n') for key in key_list]
def format_comments(self, results, likes_required):
comments_list = []
try:
for item in results["items"]:
comment = item["snippet"]["topLevelComment"]
likes = comment["snippet"]["likeCount"]
if likes < likes_required:
continue
author = comment["snippet"]["authorDisplayName"]
text = comment["snippet"]["textDisplay"]
str = "Comment by {}:\n \"{}\"\n\n".format(author, text)
str = str.encode('ascii', 'replace').decode()
comments_list.append(str)
self.comment_counter += 1
print("Comments downloaded:", self.comment_counter, end="\r")
except(KeyError):
print(results)
return comments_list
def get_video_comments(self, video_id, likes_required):
with open("API_keys.txt", "r") as f:
key_list = f.readlines()
key_list = [key.strip('/n') for key in key_list]
if self.comment_counter <= 900000:
key = self.key_list[0]
elif self.comment_counter <= 1800000:
key = self.key_list[1]
elif self.comment_counter <= 2700000:
key = self.key_list[2]
elif self.comment_counter <= 3600000:
key = self.key_list[3]
elif self.comment_counter <= 4500000:
key = self.key_list[4]
params = {
'part': 'snippet,replies',
'maxResults': 100,
'videoId': video_id,
'textFormat': 'plainText',
'key': key
}
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36'
}
try:
#data = self.openURL(self.YOUTUBE_COMMENTS_URL, params)
comments_data = requests.get(self.YOUTUBE_COMMENTS_URL, params=params, headers=headers)
except ChunkedEncodingError:
tries = 5
print("Chunked Error. Retrying...")
for n in range(tries):
try:
x = 0
x += 1
print("Trying", x, "times")
response = session.post("https://www.youtube.com/comment_service_ajax", params=params, data=data, headers=headers)
comments_data = json.loads(response.text)
except ChunkedEncodingError as c:
print(c)
results = comments_data.json()
nextPageToken = results.get("nextPageToken")
commments_list = []
commments_list += self.format_comments(results, likes_required)
while nextPageToken:
params.update({'pageToken': nextPageToken})
try:
comments_data = requests.get(self.YOUTUBE_COMMENTS_URL, params=params, headers=headers)
except ChunkedEncodingError as c:
tries = 5
print("Chunked Error. Retrying...")
for n in range(tries):
try:
x = 0
x += 1
print("Trying", x, "times")
response = session.post("https://www.youtube.com/comment_service_ajax", params=params, data=data, headers=headers)
comments_data = json.loads(response.text)
except ChunkedEncodingError as c:
print(c)
results = comments_data.json()
nextPageToken = results.get("nextPageToken")
commments_list += self.format_comments(results, likes_required)
return commments_list
def get_video_id_list(self, filename):
try:
with open(filename, 'r') as file:
URL_list = file.readlines()
except FileNotFoundError:
exit("File \"" + filename + "\" not found")
list = []
for url in URL_list:
if url == "\n": # ignore empty lines
continue
if url[-1] == '\n': # delete '\n' at the end of line
url = url[:-1]
if url.find('='): # get id
id = url[url.find('=') + 1:]
list.append(id)
else:
print("Wrong URL")
return list
def main():
yt = YouTubeApi()
parser = argparse.ArgumentParser(add_help=False, description=("Download youtube comments from many videos into txt file"))
required = parser.add_argument_group("required arguments")
optional = parser.add_argument_group("optional arguments")
here: https://console.developers.google.com/apis/credentials")
optional.add_argument("--likes", '-l', help="The amount of likes a comment needs to be saved", type=int)
optional.add_argument("--input", '-i', help="URL list file name")
optional.add_argument("--output", '-o', help="Output file name")
optional.add_argument("--help", '-h', help="Help", action='help')
args = parser.parse_args()
# --------------------------------------------------------------------- #
likes = 0
if args.likes:
likes = args.likes
input_file = "URL_list.txt"
if args.input:
input_file = args.input
output_file = "Comments.txt"
if args.output:
output_file = args.output
list = yt.get_video_id_list(input_file)
if not list:
exit("No URLs in input file")
try:
vid_counter = 0
with open(output_file, "a") as f:
for video_id in list:
vid_counter += 1
print("Downloading comments for video ", vid_counter, ", id: ", video_id, sep='')
comments = yt.get_video_comments(video_id, likes)
if comments:
for comment in comments:
f.write(comment)
print('\nDone!')
except KeyboardInterrupt:
exit("User Aborted the Operation")
# --------------------------------------------------------------------- #
if __name__ == '__main__':
main()
The next best method would be to randomly sample them. Does anyone know if this is possible with the API V3?
Even if the API returns a processingFailure error, you could still catch that (or any other API error for that matter) for to terminate gracefully your pagination loop. This way your script will provide the top-level comments that it fetched from of the API prior to the occurrence of the first API error.
The error response provided by the YouTube Data API is (usually) of the following form:
{
"error": {
"errors": [
{
"domain": <string>,
"reason": <string>,
"message": <string>,
"locationType": <string>,
"location": <string>
}
],
"code": <integer>,
"message": <string>
}
}
Hence, you could have defined the following function:
def is_error_response(response):
error = response.get('error')
if error is None:
return False
print("API Error: "
f"code={error['code']} "
f"domain={error['errors'][0]['domain']} "
f"reason={error['errors'][0]['reason']} "
f"message={error['errors'][0]['message']!r}")
return True
that you'll invoke after each statement of form results = comments_data.json(). In case of the first occurrence of that statement, you'll have:
results = comments_data.json()
if is_error_response(results):
return []
nextPageToken = results.get("nextPageToken")
For the second instance of that statement:
results = comments_data.json()
if is_error_response(results):
return comments_list
nextPageToken = results.get("nextPageToken")
Notice that the function is_error_response above prints out an error message on stdout in case its argument in an API error response; this is for the purpose of having the user of your script informed about the API call failure.

Issue with SMTP lib in python3.6

I have a strange issue with my python monitoring script:-
I've written a script with a number of alerts for any server. In that I have a function that gathers Network bytes/sec in and out.
Now the issue is when I print the alert outside my mail function it prints the current output, but for some reason when it triggers the mail for the alert, the mail body is empty. If I trigger the mail with another alert which isn't in the Network function it works properly.
Also is there a way to get smtplib to use port 587 instead of 465, any pointers on formatting the alert would be appreciated too.
Please find my script below:-
#!/usr/bin/env python3
#Module psutil needs to be installed via pip3 first.
#Python script to Monitor Server Resources.
import time
import psutil
import smtplib
from email.message import EmailMessage
project_and_instance_name = 'test-stage' #Edit the name of the project name and environment
sender = '<sender email>' #Email Address of the sender
receivers = ['recepient email'] #comma seperated list of recipients enclosed in ''
cpu_thresh = 50.0
cpu_pct = psutil.cpu_percent(interval=1)
if cpu_pct >= cpu_thresh:
cpu_alert = "CPU Warning, CPU at ",cpu_pct, "percent"
else:
cpu_alert = ""
mem = psutil.virtual_memory()
mem_thresh = 1024 * 1024 * 1024 #change the end value to choose the amount of MB
if mem_thresh >= mem.available:
mem_alert = "Memory Usage Warning only", round((mem.available /1024 /1024), 2), "MB available"
else:
mem_alert = ""
partition1 = '/'
disk1 = psutil.disk_usage(partition1)
disk_thresh = 85.0
if disk_thresh <= disk1[3]:
disk_alert = f"Root volume usage warning {disk1[3]} % used"
else:
disk_alert = ""
def net_usage(inf = "eth0"): #change the inf variable according to the interface
global net_in_alert
global net_out_alert
net_in_ps1 = psutil.net_io_counters(pernic=True, nowrap=True)[inf]
net_in_1 = net_in_ps1.bytes_recv
net_out_1 = net_in_ps1.bytes_sent
time.sleep(1)
net_in_ps2 = psutil.net_io_counters(pernic=True, nowrap=True)[inf]
net_in_2 = net_in_ps2.bytes_recv
net_out_2 = net_in_ps2.bytes_sent
net_in_res = round((net_in_2 - net_in_1) /1024 /1024, 2)
net_out_res = round((net_out_2 - net_out_1) /1024 /1024, 2)
net_in_thresh = 1.5
net_out_thresh = 1.5
if net_in_res >= net_in_thresh:
net_in_alert = f"Current net-usage:IN: {net_in_res} MB/s"
else:
net_in_alert = ""
if net_out_res <= net_out_thresh:
net_out_alert = f"Current net-usage:OUT: {net_out_res} MB/s"
else:
net_out_alert = ""
net_usage()
message_list = []
if cpu_alert == "" :
pass
else:
message_list.append(cpu_alert)
if mem_alert == "" :
pass
else:
message_list.append(mem_alert)
if disk_alert == "" :
pass
else:
message_list.append(disk_alert)
if net_in_alert == "" :
pass
else:
message_list.append(net_in_alert)
if net_out_alert == "" :
pass
else:
message_list.append(net_out_alert)
msg = '\n'.join(message_list)
print(msg)
def alerts():
server = smtplib.SMTP_SSL('smtp.gmail.com', 465)
server.login(sender, "<password>")
server.sendmail(sender,receivers,msg)
if msg == "":
pass
else:
alerts()
Got the answer by changing the SMTP format for any who are stuck here's the code:-
#!/usr/bin/env python3
#Module psutil needs to be installed via pip3 first.
#Python script to Monitor Server Resources.
import time
import psutil
import smtplib, ssl
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
project_and_instance_name = 'test-stage' #Edit the name of the project name and environment
sender = '<senders email>' #Email Address of the sender
receivers = ['recepient email'] #comma seperated list of recipients enclosed in ''
cpu_thresh = 50.0
cpu_pct = psutil.cpu_percent(interval=1)
if cpu_pct >= cpu_thresh:
cpu_alert = "CPU Warning, CPU at ",cpu_pct, "percent"
else:
cpu_alert = ""
mem = psutil.virtual_memory()
mem_thresh = 1024 * 1024 * 1024 #change the end value to choose the amount of MB
if mem_thresh >= mem.available:
mem_alert = "Memory Usage Warning only", round((mem.available /1024 /1024), 2), "MB available"
else:
mem_alert = ""
partition1 = '/'
disk1 = psutil.disk_usage(partition1)
disk_thresh = 85.0
if disk_thresh <= disk1[3]:
disk_alert = f"Root volume usage warning {disk1[3]} % used"
else:
disk_alert = ""
def net_usage(inf = "eth0"): #change the inf variable according to the interface
global net_in_alert
global net_out_alert
net_in_ps1 = psutil.net_io_counters(pernic=True, nowrap=True)[inf]
net_in_1 = net_in_ps1.bytes_recv
net_out_1 = net_in_ps1.bytes_sent
time.sleep(1)
net_in_ps2 = psutil.net_io_counters(pernic=True, nowrap=True)[inf]
net_in_2 = net_in_ps2.bytes_recv
net_out_2 = net_in_ps2.bytes_sent
net_in_res = round((net_in_2 - net_in_1) /1024 /1024, 2)
net_out_res = round((net_out_2 - net_out_1) /1024 /1024, 2)
net_in_thresh = 1.5
net_out_thresh = 1.5
if net_in_res >= net_in_thresh:
net_in_alert = f"Current net-usage:IN: {net_in_res} MB/s"
else:
net_in_alert = ""
if net_out_res >= net_out_thresh:
net_out_alert = f"Current net-usage:OUT: {net_out_res} MB/s"
else:
net_out_alert = ""
net_usage()
message_list = []
if cpu_alert == "" :
pass
else:
message_list.append(cpu_alert)
if mem_alert == "" :
pass
else:
message_list.append(mem_alert)
if disk_alert == "" :
pass
else:
message_list.append(disk_alert)
if net_in_alert == "" :
pass
else:
message_list.append(net_in_alert)
if net_out_alert == "" :
pass
else:
message_list.append(net_out_alert)
msg = '\n'.join(message_list)
print(msg)
def alerts():
msg_template = MIMEMultipart()
msg_template['From'] = sender
msg_template['To'] = ', '.join(receivers)
msg_template['Subject'] = f"{project_and_instance_name} Alert"
msg_template.attach(MIMEText(msg, 'plain'))
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(sender, "<password>")
server.sendmail(sender,receivers,msg_template.as_string())
server.quit()
if msg == "":
pass
else:
alerts()
Something helpful for me in understanding this answer -
receivers is a list of emails
receivers = ['email1', 'email2']
message template ["To"] is a joined string
msg_template['To'] = ', '.join(receivers)
server.sendmail() is the list of emails
server.sendmail(sender,receivers,msg_template.as_string())

body_filter_by_lua_block nginx lua

The code below is Nginx Lua:
body_filter_by_lua_block {
function check_access_file(file)
local fh = io.open(file,'rb')
if fh then
fh:close()
return true
else
return false
end
return false
end
function write_file(file, str)
local check_access_file = check_access_file(file)
if check_access_file then
local fh = io.open(file, "a")
fh:write(str, "\n")
fh:close()
end
end
write_file('/etc/nginx/111', '1234567='..ngx.arg[1])
}
When you run it once, 2 replies are written to the file
1234567 = test
1234567 =
Why with each request two responses are written to file 111?

One Esp8266 Client One Esp8266 Server

I did Server-Client Application with lua using one Esp8266. I wanna do this with two Esp8266. I wanna use one of these Esp8266 is Server and the other other one is Client. You can see below first code using for get RSSI from one AP and second code is using for writing these RSSI in a Server. How can i placed these two codes in two Esp8266?
i=5
tmr.alarm(1,10000,1, function()
print(wifi.sta.getap(scan_cfg, 1, listap))
if i>1 then
print(i)
i=i-1
else
tmr.stop(1)
print("Timer Durdu")
end
end
)
function listap(t)
for bssid,v in pairs(t) do
local ssid = string.match(v, "([^,]+)")
l=string.format("%-10s",ssid)
stringtoarray = {}
index = 1
for value in string.gmatch(v,"%w+") do
stringtoarray [index] = value
index = index + 1
end
print(l)
print(stringtoarray[2])
end
end
scan_cfg = {}
scan_cfg.ssid = "VSP250s"
scan_cfg.bssid = "00:09:df:8e:03:b4"
scan_cfg.channel = 0
scan_cfg.show_hidden = 1
Second code:
srv=net.createServer(net.TCP)
srv:listen(80,function(conn)
conn:on("receive", function(client,request)
local buf = "";
local _, _, method, path, vars = string.find(request, "([A-Z]+) (.+)?(.+) HTTP");
if(method == nil)then
_, _, method, path = string.find(request, "([A-Z]+) (.+) HTTP");
end
local _GET = {}
if (vars ~= nil)then
for k, v in string.gmatch(vars, "(%w+)=(%w+)&*") do
_GET[k] = v
end
end
buf = buf.."<!DOCTYPE html><html><div id='container'><font size='5'>"
buf = buf..'<style>body{width:auto;height:auto;background-color:#ffffff;}'
buf = buf..'.button {font-size: 20px;}</style>'
buf = buf.."<head> <meta http-equiv='refresh' content=3> "
buf = buf.."<p><h1>RSSI meter<br> ESP8266</h1>";
--buf = buf.."<p>Refresh : <button class='button'>ON</button>&nbsp</p>";
--buf = buf.."<p>Relay Switch : <button class='button'>ON</button>  "
--buf = buf.."<button class='button'>OFF</button><br>"
buf = buf..'<B>Voltage :<font color=red>'..string.format('%s',l)..' V</font></b><br>'
buf = buf..'<B>Current :<B><font color=blue>'..string.format('%g',stringtoarray[2])..' A</font></b><br>'
--buf = buf..'<B>Power Consumption :<B><font color=DeepSkyBlue>'..'Not Available'..'</font></b><br><BR>'
-- buf = buf..'<p>Function Button :<B><font color=BlueViolet>'..button_status..'</font></b><br></p>';
buf = buf..'</head>'
buf = buf..'<br><br><details><summary><font color=red>BURAK IPEK</font><p>'
buf = buf..'<summary><p>Vestel Electronics </p></details>'
buf = buf.."</body></font></div></html>"
client:send(buf);
client:close();
collectgarbage();
end)
end)
Put each code into a lua file. Include both from init.lua with typing
dofile("client.lua");
dofile("server.lua");
To make things easier, write methods.
Good luck.

Resources