Writing an HTTP simple server on top of Net node.js module, not using HTTP module.
I have a server listening at localhost:port with a socket opened.
socket.on('data', function(data){
clientMsg += data;
});
Once I type the address in the browser I can see the GET request is in clientMsg.
In order to return a response I use:
socket.on('close', function(){ something response generating here});
But this is not working well as it sends the response only once I click ESC or STOP in the browser.
So the question is, how can I know the browser finished sending the message and waits for a response, without closing the connection?
You would use the event connection instead of close.
Event: 'connection'
Also, this is the structure that is documented for such a server:
var net = require('net');
var server = net.createServer(function(c) { //'connection' listener
console.log('server connected');
c.on('end', function() {
console.log('server disconnected');
});
c.write('hello\r\n');
c.pipe(c);
});
server.listen(8124, function() { //'listening' listener
console.log('server bound');
});
Related
I host a very simple node socket IO application on my Window Server, below are the code sample.
// socket.io 3.1.2"
const port = 30080;
const httpServer = require("http").createServer();
const io = require("socket.io")(httpServer, {
cors: {
origin: '*',
methods: ["GET", "POST"],
allowedHeaders: ["Access-Control-Allow-Origin"],
credentials: false
}
});
io.on("connection", socket => {
console.log('On Connection');
io.emit("message", 'Welcome to Socket Io.');
});
And I wrote some code to try connect to my socket IO server in a HTML File and work well. below are the code sample.
// <script src="https://cdn.socket.io/3.1.3/socket.io.min.js"></script>
const socket = io("http://myserverip:30080", {
withCredentials: false,
extraHeaders: {
"Access-Control-Allow-Origin": "*"
}
});
socket.on("connect", () => {
console.log('connect');
});
socket.on("message", (message) => {
console.log(message);
});
But when I try to use those above code in my .NET Core web application, I get the error "ERR_SSL_PROTOCOL_ERROR". Even I publish my web application on the Window Server still getting the same error message.
I have tried http, https, ws and wss protocol. None of these work. How can I get this possibly working?
I do not see the following in your server side code:
httpServer.listen()
Do you have a reverse proxy between your client and the server?
I would expect no SSL related error based on you code.
I would also use socket.io version 4 just for future maintenance reasons.
I've got some devices (think heart rate monitor) that are going to send raw TCP packets to my Meteor server. When it receives data, it'll write to the mongodb & then I'll use Meteor to publish and invalidate that data to the client.
As I understand it, sockJS can't do raw TCP packets, so I set up a net server to receive them. The code works great in pure node, but when I use it with npmRequire I get the following error:
Exception while invoking method 'startNet' TypeError: Object #<Object> has no method 'createServer'
Here's my code:
Meteor.methods({
'startNet': function (port) {
var net = Meteor.npmRequire('net');
net.createServer(function (socket) {
console.log("connected");
socket.on('data', function (data) {
console.log(data.toString());
});
}).listen(port);
}
});
Any ideas why the net variable returns an empty object?
Moving the require out of the method should work:
net = Meteor.npmRequire('net');
Meteor.methods({
'startNet': function (port) {
net.createServer(function (socket) {
console.log("connected");
socket.on('data', function (data) {
console.log(data.toString());
});
}).listen(port);
}
});
I am using node-http-proxy. However, in addition to relaying HTTP requests, I also need to listen to the incoming and outgoing data.
Intercepting the response data is where I'm struggling. Node's ServerResponse object (and more generically the WritableStream interface) doesn't broadcast a 'data' event. http-proxy seems to create it's own internal request, which produces a ClientResponse object (which does broadcast the 'data' event) however this object is not exposed publically outside the proxy.
Any ideas how to solve this without monkey-patching node-http-proxy or creating a wrapper around the response object?
Related issue in issues of node-http-proxy on Github seems to imply this is not possible. For future attempts by others, here is how I hacked the issue:
you'll quickly find out that the proxy is only calling writeHead(), write() and end() methods of the res object
since res is already an EventEmitter, you can start emitting new custom events
listen for these new events to assemble the response data and then use it
var eventifyResponse = function(res) {
var methods = ['writeHead', 'write', 'end'];
methods.forEach(function(method){
var oldMethod = res[method]; // remember original method
res[method] = function() { // replace with a wrapper
oldMethod.apply(this, arguments); // call original method
arguments = Array.prototype.slice.call(arguments, 0);
arguments.unshift("method_" + method);
this.emit.apply(this, arguments); // broadcast the event
};
});
};
res = eventifyResponse(res), outputData = '';
res.on('method_writeHead', function(statusCode, headers) { saveHeaders(); });
res.on('method_write', function(data) { outputData += data; });
res.on('method_end', function(data) { use_data(outputData + data); });
proxy.proxyRequest(req, res, options)
This is a simple proxy server sniffing the traffic and writing it to console:
var http = require('http'),
httpProxy = require('http-proxy');
//
// Create a proxy server with custom application logic
//
var proxy = httpProxy.createProxyServer({});
// assign events
proxy.on('proxyRes', function (proxyRes, req, res) {
// collect response data
var proxyResData='';
proxyRes.on('data', function (chunk) {
proxyResData +=chunk;
});
proxyRes.on('end',function () {
var snifferData =
{
request:{
data:req.body,
headers:req.headers,
url:req.url,
method:req.method},
response:{
data:proxyResData,
headers:proxyRes.headers,
statusCode:proxyRes.statusCode}
};
console.log(snifferData);
});
// console.log('RAW Response from the target', JSON.stringify(proxyRes.headers, true, 2));
});
proxy.on('proxyReq', function(proxyReq, req, res, options) {
// collect request data
req.body='';
req.on('data', function (chunk) {
req.body +=chunk;
});
req.on('end', function () {
});
});
proxy.on('error',
function(err)
{
console.error(err);
});
// run the proxy server
var server = http.createServer(function(req, res) {
// every time a request comes proxy it:
proxy.web(req, res, {
target: 'http://localhost:4444'
});
});
console.log("listening on port 5556")
server.listen(5556);
I tried your hack but it didn't work for me. My use case is simple: I want to log the in- and outgoing traffic from an Android app to our staging server which is secured by basic auth.
https://github.com/greim/hoxy/
was the solution for me. My node-http-proxy always returned 500 (while the direct request to stage did not). Maybe the authorization headers would not be forwarded correctly or whatever.
Hoxy worked fine right from the start.
npm install hoxy [-g]
hoxy --port=<local-port> --stage=<your stage host>:<port>
As rules for logging I specified:
request: $aurl.log()
request: #log-headers()
request: $method.log()
request: $request-body.log()
response: $url.log()
response: $status-code.log()
response: $response-body.log()
Beware, this prints any binary content.
This error happens whenever the node process make a http request to get user's information from a web API.
The scenario is :
I'm running a TCP server using node, and when it get "login" request from a client, it will send a http GET request to another web API to retrieve the user's information.
While users increasing, the node process starts to throw the "ETIMEOUT" error when retrieving user's info. And once if the error happened, all the request after that will throw the same error.
I've tried to perform the same request with wget but everything is fine, so I think maybe it's not a network problem.
And strangely, after increasing the open file limit to 10,0000 using ulimit -n, it goes well until the next level user increment.
The fetch function is here:
fetchUserInfo = function(callback) {
var http = require('http');
var opt = {
agent: false,
host: 'www.someapi.net',
port: 80,
path: '/userInfo.php'
}
var body = '';
var req = http.request(opt, function(res) {
res.setEncoding('utf8');
res.on('data', function(chunk){
body += chunk;
});
res.on('end', function() {
if(callback) {
callback(body);
}
});
});
req.on('error', function(e) {
sys.log("User info fetch error: " + e.message);
if(callback) {
callback();
}
});
req.end();
}
My environment is Debian GNU/Linux 6.0 with node v0.4.10.
If I run it in the command line, the program will stop after client.destroy();
var client = http.get(options,
function(res) {
console.log(res.statusCode);
client.destroy();
}
);
However, it is not the case when I put the client.destroy() inside the res.on('end'):
var client = http.get(options,
function(res) {
console.log(res.statusCode);
res.on('end', function() {
console.log("done");
client.destroy();
});
}
);
It will throw exception because the http.socket is null. so, I can't destroy it.
IN this case, the program execution will hang there and will not end. What can I do to stop it? (other than process.exit());
if it's single request you can just set shouldKeepAlive to false
var request = http.get(options,
function(res) {
console.log(res.statusCode);
}
);
request.shouldKeepAlive = false
or send Connection: close header
You can't access socket because it is detached in the request 'end' event handler
Call res.req.destroy().
See: https://nodejs.org/api/http.html#requestdestroyerror
To your question:
request created by http.get or http.request will close its socket(connection) by default. You don't have to close it.
To your code example:
Nodejs has registered res.on('end', responseOnEnd) before your "res.on('end', yourCb)".
responseOnEnd will close the socket. Because responseOnEnd is called before yourCb,so you can't destory the socket in res.on('end', yourCb) .