Does Node.js handle the client requests one by one? I have a project, which is using Node.js as a server-side proxy. According to my understanding, if you use a callback for a response, Node.js should respond to the new request without any delay, but actually the Node.js won't respond to the new request until it has finished the last callback. Is this the correct action or is there some incorrect code usage? Please help me on this. Thank you very much. Below is the code for re-sending requests to the back-end service.
var request = require('request');
var http = require('http');
function apiService(){}
apiService.prototype.get = function (context, payload, callback, url){
return this.requestAPI(context, payload, callback, url, "GET");
}
apiService.prototype.post = function(context, payload, callback, url){
return this.requestAPI(context, payload, callback, url, "POST");
}
apiService.prototype.del = function(context, payload, callback, url){
return this.requestAPI(context, payload, callback, url, "DELETE");
}
apiService.prototype.requestAPI = function(context, payload, callback, url, method){
var config = context.config;
var targetUrl = config.APIHost
+ (config.port == null ? "": (":" + config.port))
+ "/"
+ config.baseDir
+ "/"
+ url;
var requestObj = {
url : targetUrl,
json : true,
pool: new http.Agent()
}
if (config.proxy != null){
requestObj.proxy = config.proxy;
}
switch (method){
case "POST":
requestObj.body = payload;
request.post(requestObj, function (err, resp, body){
if (err){
callback(err);
return;
}
callback(null, body);
});
break;
case "GET":
var queryString = "";
for (att in payload){
if (queryString != "")
queryString += "&";
queryString += att.toString() + "=" + payload[att];
}
if (queryString!="")
requestObj.url += "?" + queryString;
request.get(requestObj, function (err, resp, body){
if (err){
callback(err);
return;
}
callback(null, body);
});
break;
case "DELETE":
requestObj.body = payload;
request.del(requestObj, function (err, resp, body){
if (err){
callback(err);
return;
}
callback(null, body);
});
break;
}
}
Current process
client request1 -> Node
client request2 -> Node
Node server request1 ->backend
Node (waiting for event loop)
Node <- server response1 backend
client <- response1 Node
Node server request2 ->backend
Node <- server response2 backend
client <- response2 Node
What I think it should be
client request1 -> Node
client request2 -> Node
Node server request1 -> backend
Node (won't waiting for event loop)
Node server request2 -> backend
Node <- server response2 backend
client <-response2 Node
Node <- server response1 backend
client <-response1 Node
Does Node.js handle the client requests one by one? I have a project, which is using Node.js as a server-side proxy. According to my understanding, if you use a callback for a response, Node.js should respond to the new request without any delay, but actually the Node.js won't respond to the new request until it has finished the last callback. Is this the correct action or is there some incorrect code usage? Please help me on this. Thank you very much. Below is the code for re-sending requests to the back-end service.
var request = require('request');
var http = require('http');
function apiService(){}
apiService.prototype.get = function (context, payload, callback, url){
return this.requestAPI(context, payload, callback, url, "GET");
}
apiService.prototype.post = function(context, payload, callback, url){
return this.requestAPI(context, payload, callback, url, "POST");
}
apiService.prototype.del = function(context, payload, callback, url){
return this.requestAPI(context, payload, callback, url, "DELETE");
}
apiService.prototype.requestAPI = function(context, payload, callback, url, method){
var config = context.config;
var targetUrl = config.APIHost
+ (config.port == null ? "": (":" + config.port))
+ "/"
+ config.baseDir
+ "/"
+ url;
var requestObj = {
url : targetUrl,
json : true,
pool: new http.Agent()
}
if (config.proxy != null){
requestObj.proxy = config.proxy;
}
switch (method){
case "POST":
requestObj.body = payload;
request.post(requestObj, function (err, resp, body){
if (err){
callback(err);
return;
}
callback(null, body);
});
break;
case "GET":
var queryString = "";
for (att in payload){
if (queryString != "")
queryString += "&";
queryString += att.toString() + "=" + payload[att];
}
if (queryString!="")
requestObj.url += "?" + queryString;
request.get(requestObj, function (err, resp, body){
if (err){
callback(err);
return;
}
callback(null, body);
});
break;
case "DELETE":
requestObj.body = payload;
request.del(requestObj, function (err, resp, body){
if (err){
callback(err);
return;
}
callback(null, body);
});
break;
}
}
Current process
client request1 -> Node
client request2 -> Node
Node server request1 ->backend
Node (waiting for event loop)
Node <- server response1 backend
client <- response1 Node
Node server request2 ->backend
Node <- server response2 backend
client <- response2 Node
What I think it should be
client request1 -> Node
client request2 -> Node
Node server request1 -> backend
Node (won't waiting for event loop)
Node server request2 -> backend
Node <- server response2 backend
client <-response2 Node
Node <- server response1 backend
client <-response1 Node
Updated:
var params = {
action : 'list'
};
$http.post('../../service/buildingBlockService', params)
.success(function(buildingBlocks){
callback(null, buildingBlocks);
}).error(function(error){
callback(error);
})
The client requests are from Angular $http
. On one page, there are several requests at the same time. According to my Fiddler monitor, the requests from the browser are sent without waiting, but the server re-send is one by one.
app.post('/service/buildingBlockService', function (ctx, payload, req, res) {
var service1 = require('./service/buildingBlockService');
var service = new service1();
service.service(ctx, payload, function(error, result){
res.send(200, result);
});
});
Updated: The middle layer of server request.
var service = require('./apiService');
function BuildingBlockService(){
}
BuildingBlockService.prototype.init = function(){}
BuildingBlockService.prototype.service = function(context, payload, callback) {
var apiService = new service()
var params = payload;
switch (params.action){
case "list":
default:
apiService.get(context, null, callback, "BuildingBlocks");
break;
}
};
module.exports = BuildingBlockService;
Share
Improve this question
edited Aug 18, 2015 at 8:04
espider
asked Aug 18, 2015 at 3:21
espiderespider
1331 silver badge6 bronze badges
5
- If you launch multiple requests to the same server in parallel, how the responses e back will be determined by the server that you sent the request to. Some servers will work on multiple requests in parallel (in threads, for example) and other servers will work on one request at a time. That depends upon the server design and the particular type of request. Your node.js server will process the responses in the order they are received so whichever response gets back to your server first is the first one to start being processed. – jfriend00 Commented Aug 18, 2015 at 3:24
- You've shown some relevant code, but you've not shown the code that actually calls the shown code to produce the log you are asking about. Please show the calling code. – jfriend00 Commented Aug 18, 2015 at 3:27
- Thank you very much for you quick response. I have updated the client side request code. I think the Node.js is using non-block I/O, is that including the Web Request? Or the non-blocking I/O just means for the whole process of one client request? I have used the Eclipse to debug Node.js. Yes, the server side request won't block the other codes, but it blocked the new client request. Is there some Node.js configuration need to be changed to support the function? – espider Commented Aug 18, 2015 at 3:43
- It seems you're asking about the behavior of the code that handles ining proxy requests to your node.js server when you get multiple client requests in flight at the same time. You aren't showing that proxy code so I can't really ment on that code without seeing it. You also aren't showing the client code that actually launches multiple requests in flight at the same time. The client code you have included seems to launch just one client request. – jfriend00 Commented Aug 18, 2015 at 4:11
- Thank you very much. The client side requests are initialed by angular client controls when they are loading. I think the client codes have no problems, because from the Fiddler Monitor all the requests are sent to the Node.js server. Yes, what I am asking is the behavior for Node.js to handle multiple requests at the same time. Does it process one after finished last request or it push the callback into event loop and process to the next, and when the proxy work finished, it trigger the callback in the event loop, then send back the response. – espider Commented Aug 18, 2015 at 4:23
1 Answer
Reset to default 15Does Node.js handle client requests one by one?
Yes and No. node.js runs your JS single threaded. That means that only one JS thread of execution is running at any given time. So, if you had two requests like this:
// don't use this in real code, it's to simulate a point
function spin(t) {
var start = Date.now();
while (Date.now() < start + t) {}
}
app.post("/route1", req, res) {
console.log("starting processing of /route1 request");
// simulate taking several seconds of pure CPU to make a response
spin(2000);
res.send("done 1");
console.log("finished processing of /route1 request");
}
app.post("/route2", req, res) {
console.log("starting processing of /route2 request");
// simulate taking several seconds of pure CPU to make a response
spin(2000);
res.send("done 2");
console.log("finished processing of /route2 request");
}
And a /route1 request was immediately followed by a /route2 request, then the node.js server would process the /route1 request and not be able to do anything else until it was done with that request because the CPU was kept busy the entire time.
So, this would produce a log like this:
starting processing of /route1 request
finished processing of /route1 request
starting processing of /route2 request
finished processing of /route2 request
But, it is relatively rare that requests take a long time purely for CPU reasons. Often requests involve some sort of I/O (files to read, database queries, other servers to contact, etc...). If that I/O is done in an async fashion using async IO and not using synchronous IO, then multiple requests can easily be in flight at the same time and will be in flight at the same time because while the node.js server is waiting for I/O requests to plete, it is free to serve other requests and will serve other requests.
So, if you had this server code:
app.post("/route1", req, res) {
console.log("starting processing of /route1 request");
// simulate taking several seconds of pure CPU to make a response
request('http://www.google.', function (error, response, body) {
if (!error && response.statusCode == 200) {
res.send("done 1");
console.log("finished processing of /route1 request");
}
});
}
app.post("/route2", req, res) {
console.log("starting processing of /route2 request");
// simulate taking several seconds of pure CPU to make a response
request('http://www.google.', function (error, response, body) {
if (!error && response.statusCode == 200) {
res.send("done 2");
console.log("finished processing of /route2 request");
}
});
}
And a /route1 request was immediately followed by a /route2 request, then you would likely see this log (the order that the /route1 and /route2 responses finishes is not guaranteed - they could be in any order), but both responses will be processed in parallel:
starting processing of /route1 request
starting processing of /route2 request
finished processing of /route1 request
finished processing of /route2 request
If your node.js proxy server appears to be exhibiting serial processing behavior rather than parallel processing behavior, then that it could be that there is some sort of implementation issue in how you are implementing the proxy as it is certainly capable of having multiple requests in flight at the same time.