The crawler applet written by nodesaync requests expert guidance.

  node.js, question
var cnodeUrl =  "https://segmentfault.com/" ;
 
 //Store links to all topics
 var topicUrls = [];
 
 /**
 * After all url requests are completed, ep control ends asynchronously and enters each topic
 */
 ep.after('topic_html', topicUrls.length, function(topics) {
 
 var concurrencyCount = 0;  //Record Concurrency Number
 
 /**
 * Go to the topic and get the topic
 * @callback topics [{title:''}]
 */
 var fetchUrl = function(myurl, callback) {
 var fetchStart = new Date().getTime();
 ConcurrencyCount plus;
 Log ('Current concurrencyCount', Concurrency Count,', Grabbing', myurl');
 
 superagent.get(myurl).end(function(err, ssres) {
 if (err) {
 Callback(err, myurl plus 'errorhappened!'  );
 bracket
 var time = new Date().getTime() - fetchStart;
 Console.log ('grab' plus myurl plus' success',' time plus' milliseconds');
 concurrencyCount--;
 
 var $ = cheerio.load(ssres.text);
 var reslut = {
 title: $('.question__author>a>strong').text(),
 answer: $('#answers-title').text()
 };
 callback(null, result);
 })
 bracket
 
 //Control the maximum concurrency to be 5, and take out the entire result array returned by callback from the result.
 async.mapLimit(topicUrls, 5, function (myurl, callback) {
 fetchUrl(myurl, callback);
 }, function (err, result) {
 console.log('===== result: ======\n', result);
 //res.send(result);
 });
 })
 
 //Get links to all topics topicUrls
 superagent.get(cnodeUrl).end(function(err, sres) {
 if(err) {
 return next(err);
 bracket
 var $ = cheerio.load(sres.text);
 $('.stream-list').each(function(idx, element) {
 var $element = $(element).find('.title>a');
 var href = url.resolve(cnodeUrl, $element.attr('href'));
 topicUrls.push(href);
 })
 console.log('get authorUrls successful!  \n', topicUrls);
 ep.emit('topic_html', 'get topicUrls successful');
 })

The result is:

clipboard.png

May I know what went wrong?

Rewrite the following:

var async = require('async');
 var cheerio = require('cheerio');
 var superagent = require('superagent');
 
 
 var url = require('url');
 var cnodeUrl =  "https://segmentfault.com/" ;
 
 
 //Store links to all topics
 var topicUrls = [];
 
 //Get links to all topics topicUrls
 superagent.get(cnodeUrl).end(function(err, sres) {
 if(err) {
 return next(err);
 bracket
 var $ = cheerio.load(sres.text);
 $('.stream-list__item').each(function(idx, element) {
 var $element = $(element).find('.title>a');
 var href = url.resolve(cnodeUrl, $element.attr('href'));
 topicUrls.push(href);
 })
 //Control the maximum concurrency to be 5, and take out the entire result array returned by callback from the result.
 async.mapLimit(topicUrls, 5, function (myurl, callback) {
 fetchUrl(myurl, callback);
 }, function (err, result) {
 console.log('===== result: ======', result);
 });
 })
 
 
 function fetchUrl(myurl,callback) {
 var fetchStart = new Date().getTime();
 superagent.get(myurl).end(function(err, ssres) {
 if (err) {
 Callback(err, myurl plus 'errorhappened!'  );
 bracket
 var time = new Date().getTime() - fetchStart;
 Console.log ('grab' plus myurl plus' success',' time plus' milliseconds');
 // concurrencyCount--;
 
 var $ = cheerio.load(ssres.text);
 var reslut = {
 title: $('.question__author>a>strong').text(),
 answer: $('#answers-title').text()
 };
 callback(null, reslut);
 })
 bracket

The code above you, I feel you are a reference.

https://github.com/alsotang/node-lessons/blob/master/lesson4/app.js

This course was written, you should have used the wrong eventproxy method.