I was wondering if anyone could tell me what the default HTTP request timeout is when using express.
What I mean by this is: after how many seconds of dealing with a http request will the Express / Node.js server close the connection, when the browser nor server closed the connection manually?
How do I alter this timeout for a single route? I would like to set it to about 15 minutes for a special audio conversion route.
Thanks a lot.
Tom
req.connection.setTimeout(ms); appears to set the request timeout for a HTTP server in Node.js.
req.connection.setTimeout(ms); might be a bad idea since multiple requests can be sent over the same socket.
Try connect-timeout or use this:
var errors = require('./errors');
const DEFAULT_TIMEOUT = 10000;
const DEFAULT_UPLOAD_TIMEOUT = 2 * 60 * 1000;
/*
Throws an error after the specified request timeout elapses.
Options include:
- timeout
- uploadTimeout
- errorPrototype (the type of Error to throw)
*/
module.exports = function(options) {
//Set options
options = options || {};
if(options.timeout == null)
options.timeout = DEFAULT_TIMEOUT;
if(options.uploadTimeout == null)
options.uploadTimeout = DEFAULT_UPLOAD_TIMEOUT;
return function(req, res, next) {
//timeout is the timeout timeout for this request
var tid, timeout = req.is('multipart/form-data') ? options.uploadTimeout : options.timeout;
//Add setTimeout and clearTimeout functions
req.setTimeout = function(newTimeout) {
if(newTimeout != null)
timeout = newTimeout; //Reset the timeout for this request
req.clearTimeout();
tid = setTimeout(function() {
if(options.throwError && !res.finished)
{
//throw the error
var proto = options.error == null ? Error : options.error;
next(new proto("Timeout " + req.method + " " + req.url) );
}
}, timeout);
};
req.clearTimeout = function() {
clearTimeout(tid);
};
req.getTimeout = function() {
return timeout;
};
//proxy end to clear the timeout
var oldEnd = res.end;
res.end = function() {
req.clearTimeout();
res.end = oldEnd;
return res.end.apply(res, arguments);
}
//start the timer
req.setTimeout();
next();
};
}
The default request timeout in Node v0.9+ is 2 minutes. That is what express uses.
You can increase it for a single route using:
app.get('/longendpoint', function (req, res) {
req.setTimeout(360000); // 5 minutes
...
});
connect-timeout or any other generally don't work.
What always works is setting the timeout in nginx domain/subdomain level like this:
location / {
proxy_read_timeout 300;
proxy_connect_timeout 300;
proxy_send_timeout 300;
proxy_pass http://xxxx:3009;
}
Related
The Current code looks like does Cache first Strategy, How to modify it use Network first and than fallback to cache if network fails ?
async function onFetch(event) {
let cachedResponse = null;
if (event.request.method === 'GET') {
// For all navigation requests, try to serve index.html from cache
// If you need some URLs to be server-rendered, edit the following check to exclude those URLs
//const shouldServeIndexHtml = event.request.mode === 'navigate';
console.log("onFetch : " + event.request.url.toLowerCase());
const shouldServeIndexHtml = event.request.mode === 'navigate';
const request = shouldServeIndexHtml ? 'index.html' : event.request;
const cache = await caches.open(cacheName);
cachedResponse = await cache.match(request);
}
return cachedResponse || fetch(event.request);
}
if (event.request.url.indexOf('/api') != -1) {
try {
// Network first
var response = await fetch(event.request);
// Update or add cache
await cache.put(event.request, response.clone());
// Change return value
cachedResponse = response;
}
catch (e)
{
}
}
You can add something like this after:
cachedResponse = await cache.match(request);
This should always load api requests from the network first, since it's not part of the cache initially. Every time the cache is renewed for this request. If the request fails, the cached value will be used.
I'm using Signalr 2.2.1 with a successful websocket connection.
Here are the events for different states : ( simplified for brevity)
var hub = $.connection.moveShapeHub;
$.connection.hub.start().done(function ()
{
console.log("hub started successfully");
}).fail(function () { console.log('Could not Connect!'); });
$.connection.hub.disconnected(function ()
{
$.connection.hub.start();
console.log('Connection disconnected')
});
My app is working fine as expected.
But look what happen when I disable the network card ( I access my computer not via localhost but via dynamic dns which goes to the world and then comes back to my computer)
At first you can see websocket connection error (I see it multiple times)
WebSocket connection to
'ws://xxxxxx.ddns.net/signalr/reconnect?transport=webSockets&messageId=d-C68A95E5-g%2C1&clientProtocol=1.5&connectionToken=%2FDJL8eAtVtSA3XKeap4Js3IrbkCm56C%2FWKCQtApGiMroWAgnzNoRHmJ0Y2LpIdWWWL%2BfY3dXvJqYHFfby1XYii0ibPpKM55PQuZyf9aH4k9JHIT79lWoMWBasIpa9Gjk&connectionData=%5B%5D&tid=2'
failed: Error in connection establishment:
net::ERR_INTERNET_DISCONNECTED
And then you see endless calls(!!!) to the negotiate
http://xxxx.ddns.net/signalr/negotiate?clientProtocol=1.5&connectionToken=%2FDJL8eAtVtSA3XKeap4Js3IrbkCm56C%2FWKCQtApGiMroWAgnzNoRHmJ0Y2LpIdWWWL%2BfY3dXvJqYHFfby1XYii0ibPpKM55PQuZyf9aH4k9JHIT79lWoMWBasIpa9Gjk&connectionData=%5B%7B%22name%22%3A%22moveshapehub%22%7D%5D&_=1485811277855
Wait ~15 seconds to see the endless loop :
Question
How can I fix those endless calls ? Or alternatvly - increase delay in those "negotiate calls" -say every 2 seconds ( instead of blazing fast endlessly 0.1 seconds)
Edit
I've changed this code :
$.connection.hub.disconnected(function ()
{
$.connection.hub.start();
console.log('Connection disconnected')
});
to this (remove hub start):
$.connection.hub.disconnected(function ()
{
console.log('Connection disconnected')
});
And now I see only this message :
But now I'm losing all the basic idea of "trying restart connecting" in case of disconnect. So I ask again is there any reasonable solution or at least trying "restart the connection every 2 seconds" ?
negotiate is the first request a SignalR client sends to establish a connection. You are trying to start the connection as soon as it gets disconnected in the disconnected event handler. Because the network is down negotiate fails and the disconnected event is invoked and you try to start the connection again.
The documentation shows how to do it with the timeout:
$.connection.hub.disconnected(function() {
setTimeout(function() {
$.connection.hub.start();
}, 5000); // Restart connection after 5 seconds.
});
The answer to all issues of SR connections is check and persist connection status on each server request. I have created a method that takes the proxy method as a parameter and fires the proxy method after establishing the hub connection is available.
call the method using SR_Connection.execute.
When it has established the connection is commits the execution of the request.
function cancel(){
SR_Connection.execute('SRProxy.server.Cancel', uniqueID);
}
var SR_Connection = (function () {
//Start of the Return Statement
return {
execute: function (method, params) {
if ($.connection.hub && $.connection.hub.state === $.signalR.connectionState.disconnected) {
$.connection.hub.start().done(function () {
SR_Connection.commit(method, params);
});
}
else {
SR_Connection.commit(method, params);
}
},
commit: function (method, params) {
var namespaces = method.split("."),
context;
if (typeof (window) == "undefined") {
context = global;
} else {
context = window;
}
var functionToExecute = namespaces.pop();
for (var i = 0; i < namespaces.length; i++) {
context = context[namespaces[i]];
}
context[functionToExecute](params);
}
};//End of the Return Statement
})();
I am using node-http-proxy. However, in addition to relaying HTTP requests, I also need to listen to the incoming and outgoing data.
Intercepting the response data is where I'm struggling. Node's ServerResponse object (and more generically the WritableStream interface) doesn't broadcast a 'data' event. http-proxy seems to create it's own internal request, which produces a ClientResponse object (which does broadcast the 'data' event) however this object is not exposed publically outside the proxy.
Any ideas how to solve this without monkey-patching node-http-proxy or creating a wrapper around the response object?
Related issue in issues of node-http-proxy on Github seems to imply this is not possible. For future attempts by others, here is how I hacked the issue:
you'll quickly find out that the proxy is only calling writeHead(), write() and end() methods of the res object
since res is already an EventEmitter, you can start emitting new custom events
listen for these new events to assemble the response data and then use it
var eventifyResponse = function(res) {
var methods = ['writeHead', 'write', 'end'];
methods.forEach(function(method){
var oldMethod = res[method]; // remember original method
res[method] = function() { // replace with a wrapper
oldMethod.apply(this, arguments); // call original method
arguments = Array.prototype.slice.call(arguments, 0);
arguments.unshift("method_" + method);
this.emit.apply(this, arguments); // broadcast the event
};
});
};
res = eventifyResponse(res), outputData = '';
res.on('method_writeHead', function(statusCode, headers) { saveHeaders(); });
res.on('method_write', function(data) { outputData += data; });
res.on('method_end', function(data) { use_data(outputData + data); });
proxy.proxyRequest(req, res, options)
This is a simple proxy server sniffing the traffic and writing it to console:
var http = require('http'),
httpProxy = require('http-proxy');
//
// Create a proxy server with custom application logic
//
var proxy = httpProxy.createProxyServer({});
// assign events
proxy.on('proxyRes', function (proxyRes, req, res) {
// collect response data
var proxyResData='';
proxyRes.on('data', function (chunk) {
proxyResData +=chunk;
});
proxyRes.on('end',function () {
var snifferData =
{
request:{
data:req.body,
headers:req.headers,
url:req.url,
method:req.method},
response:{
data:proxyResData,
headers:proxyRes.headers,
statusCode:proxyRes.statusCode}
};
console.log(snifferData);
});
// console.log('RAW Response from the target', JSON.stringify(proxyRes.headers, true, 2));
});
proxy.on('proxyReq', function(proxyReq, req, res, options) {
// collect request data
req.body='';
req.on('data', function (chunk) {
req.body +=chunk;
});
req.on('end', function () {
});
});
proxy.on('error',
function(err)
{
console.error(err);
});
// run the proxy server
var server = http.createServer(function(req, res) {
// every time a request comes proxy it:
proxy.web(req, res, {
target: 'http://localhost:4444'
});
});
console.log("listening on port 5556")
server.listen(5556);
I tried your hack but it didn't work for me. My use case is simple: I want to log the in- and outgoing traffic from an Android app to our staging server which is secured by basic auth.
https://github.com/greim/hoxy/
was the solution for me. My node-http-proxy always returned 500 (while the direct request to stage did not). Maybe the authorization headers would not be forwarded correctly or whatever.
Hoxy worked fine right from the start.
npm install hoxy [-g]
hoxy --port=<local-port> --stage=<your stage host>:<port>
As rules for logging I specified:
request: $aurl.log()
request: #log-headers()
request: $method.log()
request: $request-body.log()
response: $url.log()
response: $status-code.log()
response: $response-body.log()
Beware, this prints any binary content.
I am developing a node.js proxy server application and I want it to support HTTP and HTTPS(SSL) protocols (as server).
I'm currently using node-http-proxy like this:
const httpProxy = require('http-proxy'),
http = require('http');
var server = httpProxy.createServer(9000, 'localhost', function(req, res, proxy) {
console.log(req.url);
proxy.proxyRequest(req, res);
});
http.createServer(function(req, res) {
res.end('hello!');
}).listen(9000);
server.listen(8000);
I setup my browser to use HTTP proxy on localhost:8000 and it works. I also want to catch HTTPS requests (ie. setup my browser to use localhost:8000 as HTTPS proxy as well and catch the requests in my application). Could you please help me how can I do that?
PS:
If I subscribe to upgrade event of httpProxy server object I can get the requests but I don't know how to forward the request and send response to client:
server.on('upgrade', function(req, socket, head) {
console.log(req.url);
// I don't know how to forward the request and send the response to client
});
Any helps would be appreciated.
Solutions barely exist for this, and the documentation is poor at best for supporting both on one server. The trick here is to understand that client proxy configurations may send https requests to an http proxy server. This is true for Firefox if you specify an HTTP proxy and then check "same for all protocols".
You can handle https connections sent to an HTTP server by listening for the "connect" event. Note that you won't have access to the response object on the connect event, only the socket and bodyhead. Data sent over this socket will remain encrypted to you as the proxy server.
In this solution, you don't have to make your own certificates, and you won't have certificate conflicts as a result. The traffic is simply proxied, not intercepted and rewritten with different certificates.
// Install npm dependencies first
// npm init
// npm install --save url#0.10.3
// npm install --save http-proxy#1.11.1
var httpProxy = require("http-proxy");
var http = require("http");
var url = require("url");
var net = require('net');
var server = http.createServer(function (req, res) {
var urlObj = url.parse(req.url);
var target = urlObj.protocol + "//" + urlObj.host;
console.log("Proxy HTTP request for:", target);
var proxy = httpProxy.createProxyServer({});
proxy.on("error", function (err, req, res) {
console.log("proxy error", err);
res.end();
});
proxy.web(req, res, {target: target});
}).listen(8080); //this is the port your clients will connect to
var regex_hostport = /^([^:]+)(:([0-9]+))?$/;
var getHostPortFromString = function (hostString, defaultPort) {
var host = hostString;
var port = defaultPort;
var result = regex_hostport.exec(hostString);
if (result != null) {
host = result[1];
if (result[2] != null) {
port = result[3];
}
}
return ( [host, port] );
};
server.addListener('connect', function (req, socket, bodyhead) {
var hostPort = getHostPortFromString(req.url, 443);
var hostDomain = hostPort[0];
var port = parseInt(hostPort[1]);
console.log("Proxying HTTPS request for:", hostDomain, port);
var proxySocket = new net.Socket();
proxySocket.connect(port, hostDomain, function () {
proxySocket.write(bodyhead);
socket.write("HTTP/" + req.httpVersion + " 200 Connection established\r\n\r\n");
}
);
proxySocket.on('data', function (chunk) {
socket.write(chunk);
});
proxySocket.on('end', function () {
socket.end();
});
proxySocket.on('error', function () {
socket.write("HTTP/" + req.httpVersion + " 500 Connection error\r\n\r\n");
socket.end();
});
socket.on('data', function (chunk) {
proxySocket.write(chunk);
});
socket.on('end', function () {
proxySocket.end();
});
socket.on('error', function () {
proxySocket.end();
});
});
Here is my NO-dependencies solution (pure NodeJS system libraries):
const http = require('http')
const port = process.env.PORT || 9191
const net = require('net')
const url = require('url')
const requestHandler = (req, res) => { // discard all request to proxy server except HTTP/1.1 CONNECT method
res.writeHead(405, {'Content-Type': 'text/plain'})
res.end('Method not allowed')
}
const server = http.createServer(requestHandler)
const listener = server.listen(port, (err) => {
if (err) {
return console.error(err)
}
const info = listener.address()
console.log(`Server is listening on address ${info.address} port ${info.port}`)
})
server.on('connect', (req, clientSocket, head) => { // listen only for HTTP/1.1 CONNECT method
console.log(clientSocket.remoteAddress, clientSocket.remotePort, req.method, req.url)
if (!req.headers['proxy-authorization']) { // here you can add check for any username/password, I just check that this header must exist!
clientSocket.write([
'HTTP/1.1 407 Proxy Authentication Required',
'Proxy-Authenticate: Basic realm="proxy"',
'Proxy-Connection: close',
].join('\r\n'))
clientSocket.end('\r\n\r\n') // empty body
return
}
const {port, hostname} = url.parse(`//${req.url}`, false, true) // extract destination host and port from CONNECT request
if (hostname && port) {
const serverErrorHandler = (err) => {
console.error(err.message)
if (clientSocket) {
clientSocket.end(`HTTP/1.1 500 ${err.message}\r\n`)
}
}
const serverEndHandler = () => {
if (clientSocket) {
clientSocket.end(`HTTP/1.1 500 External Server End\r\n`)
}
}
const serverSocket = net.connect(port, hostname) // connect to destination host and port
const clientErrorHandler = (err) => {
console.error(err.message)
if (serverSocket) {
serverSocket.end()
}
}
const clientEndHandler = () => {
if (serverSocket) {
serverSocket.end()
}
}
clientSocket.on('error', clientErrorHandler)
clientSocket.on('end', clientEndHandler)
serverSocket.on('error', serverErrorHandler)
serverSocket.on('end', serverEndHandler)
serverSocket.on('connect', () => {
clientSocket.write([
'HTTP/1.1 200 Connection Established',
'Proxy-agent: Node-VPN',
].join('\r\n'))
clientSocket.write('\r\n\r\n') // empty body
// "blindly" (for performance) pipe client socket and destination socket between each other
serverSocket.pipe(clientSocket, {end: false})
clientSocket.pipe(serverSocket, {end: false})
})
} else {
clientSocket.end('HTTP/1.1 400 Bad Request\r\n')
clientSocket.destroy()
}
})
I tested this code with Firefox Proxy Settings (it even asks for username and password!). I entered IP address of machine where this code is runned and 9191 port as you can see in the code. I also set "Use this proxy server for all protocols". I run this code locally and on VPS - in both cases works!
You can test your NodeJS proxy with curl:
curl -x http://username:password#127.0.0.1:9191 https://www.google.com/
I have created a http/https proxy with the aid of the http-proxy module: https://gist.github.com/ncthis/6863947
Code as of now:
var fs = require('fs'),
http = require('http'),
https = require('https'),
httpProxy = require('http-proxy');
var isHttps = true; // do you want a https proxy?
var options = {
https: {
key: fs.readFileSync('key.pem'),
cert: fs.readFileSync('key-cert.pem')
}
};
// this is the target server
var proxy = new httpProxy.HttpProxy({
target: {
host: '127.0.0.1',
port: 8080
}
});
if (isHttps)
https.createServer(options.https, function(req, res) {
console.log('Proxying https request at %s', new Date());
proxy.proxyRequest(req, res);
}).listen(443, function(err) {
if (err)
console.log('Error serving https proxy request: %s', req);
console.log('Created https proxy. Forwarding requests from %s to %s:%s', '443', proxy.target.host, proxy.target.port);
});
else
http.createServer(options.https, function(req, res) {
console.log('Proxying http request at %s', new Date());
console.log(req);
proxy.proxyRequest(req, res);
}).listen(80, function(err) {
if (err)
console.log('Error serving http proxy request: %s', req);
console.log('Created http proxy. Forwarding requests from %s to %s:%s', '80', proxy.target.host, proxy.target.port);
});
The node-http-proxy docs contain examples of this. Look for "Proxying to HTTPS from HTTPS" at https://github.com/nodejitsu/node-http-proxy The configuration process is slightly different in every browser. Some have the option to use your proxy settings for all protocols; some you need to configure the SSL proxy separately.
I'm using node.js but I have a feeling this isn't necessarily related to just node - anyway
I'm writing a url shortener in node, and i want to hit the shortened url to get the page title - this works in most cases, usually follows redirects properly, etc.
But when I hit gmail.com, it goes into an infinite redirect loop - http://gmail.com redirects to https://www.google.com/accounds/ServiceLogin?service=mail&passive=true&rm=false&continue=....... which in turn redirects to itself forever.
my code is basically like
var http = require('http'),
https = require('https'),
URL = require('url'),
querystring = require('url');
var http_client = {};
function _makeRequest(url, callback) {
var urlInfo = URL.parse(url);
var reqUrl = urlInfo.pathname || '/';
reqUrl += urlInfo.search || '';
reqUrl += urlInfo.hash || '';
var opts = {
host: urlInfo.hostname,
port: urlInfo.port || (urlInfo.protocol == 'https' ? 443 : 80),
path = reqUrl,
method: 'GET'
};
var protocol = (urlInfo.protocol == 'https' ? https : http);
var req = protocol.request(opts, function(res) {
var content = '';
res.setEncoding('utf8');
res.addListener('data', function(chunk) {
content += chunk;
});
res.addListener('end', function() {
_requestReceived(content, res.headers, callback);
});
});
req.end();
};
function _requestReceived(content, headers, callback) {
var redirect = false;
if(headers.location) {
newLocation = headers.location
redirect = true;
}
if(redirect) {
console.log('redirecting to :'+newLocation);
_makeRequest(newLocation, callback)
} else {
callback(null, content);
}
};
yep!
ahh okay, got it!
my check for https was like
var protocol = (urlInfo.protocol == 'https' ? https : http);
but node adds a colon to the protocol, so it should have been
var protocol = (urlInfo.protocol == 'https:' ? https : http);
Because of this it kept using http and gmail would redirect be to https forever