For nearly 2,000 web pages, 20 concurrent requests were initiated at the same time, and the outgoing request failed.

  node.js, question

The code is not long, as follows, I crawled through all the chapters of the Shrew Line in the snow.

var http = require('http');
 var $ = require('cheerio');
 var async = require('async');
 var iconv = require('iconv-lite');
 var fs = require('fs');
 var chapterNo = 1;
 
 var url =  'http://www.biquku.com/0/761/' ,
 hrefList = {};
 
 var curCount = 0;
 
 var getChapter = function(url, cb) {
 Gagakurcount;
 
 Log ('read:' plus url plus' and' plus curCount plus' concurrent');
 
 var req = http.request(url, function(res) {
 var buffer_arr = [];
 var buffer_len = 0;
 if (res.statusCode == 200) {
 res.on('data', function(chunk) {
 buffer_arr.push(chunk);
 Buffer _ lenga = chunk.length;
 });
 res.on('end', function() {
 var $content = $(iconv.decode(Buffer.concat(buffer_arr, buffer_len), 'gbk')).find('#content').text();
 -- curCount;
 cb(null , $content);
 })
 } else {
 Log ("status:" plus res.statusCode ");
 getChapter(url, cb);
 bracket
 });
 
 req.on('error', function(err) {
 console.log('request-err');
 console.error(err);
 })
 
 req.end();
 bracket
 
 var req = http.request(url, function(res) {
 var buffer_arr = [];
 var buffer_len = 0;
 res.on('data', function(chunk) {
 buffer_arr.push(chunk);
 Buffer _ lenga = chunk.length;
 });
 res.on('end', function() {
 var $html = $(iconv.decode(Buffer.concat(buffer_arr, buffer_len), 'gbk'));
 var $urls = $html.find('#list>dl>dd>a');
 var $a = '';
 
 for (var i = 0;   i < $urls.length;  I plus)
 $a = $($urls[i]);
 
 hrefList[$a.text()] = (function(url) {
 return function(cb) {
 setTimeout(function() {
 getChapter(url, cb);
 }, 0)
 bracket
 })(url.concat($a.attr('href')));
 bracket
 
 console.time('novel');
 
 async.parallelLimit(hrefList, 20, function(err, res) {
 if (err) {
 console.log("parallel-err:");
 console.error(err);
 } else {
 for (var key of Object.keys(res)) {
 Var fileName = './' plus key plus'. txt';
 (function(key){
 fs.writeFile(fileName, res[key], function(err) {
 if (err) {
 console.log('writefile-err:');
 console.error(err);
 } else {
 Log (key plus': success');
 bracket
 })
 })(key)
 bracket
 console.timeEnd('novel');
 bracket
 })
 
 })
 })
 
 req.on('error', function(e) {
 console.error(e);
 })
 
 req.end();

Then the following is the place where the request was made
图片描述

I hope you can tell me, how to solve it? Is there any concurrent request, because I initiated the http request through setTimeout(func,0)?
After later tests, it was found that more than 200 sheets could be read successfully, and errors would occur at 300 sheets. .

This is really a concurrent request. Could 20 requests be a little too many, perhaps this server can’t handle it. Let’s make it a little smaller, say, 10 or 5 concurrent attempts?

Update: I just ran your code on my computer and there was no error in the description of the problem. Perhaps there is another reason why your net is not working.