我用node-crawler进行爬虫,爬取一段时间后会出现这个错误,有大神知道如何解决不?
发布于 19 天前 作者 zhoujinhai 328 次浏览 来自 问答

<— Last few GCs —>

[9852:000001B18947FDB0] 770113 ms: Mark-sweep 1401.2 (1457.9) -> 1398.7 (1457.9) MB, 16.4 / 0.0 ms (+ 0.0 ms in 0 steps since start of marking, biggest step 0.0 ms, walltime since start of marking 18 ms) allocation failure GC in old space requested [9852:000001B18947FDB0] 770136 ms: Mark-sweep 1398.7 (1457.9) -> 1398.6 (1426.4) MB, 21.0 / 0.0 ms last resort [9852:000001B18947FDB0] 770158 ms: Mark-sweep 1398.6 (1426.4) -> 1398.6 (1425.9) MB, 22.5 / 0.0 ms last resort

<— JS stacktrace —>

==== JS stack trace =========================================

Security context: 0000003FB9CA9891 <JS Object> 2: new constructor(aka Request) [E:\nodecrawler\node_modules\request\request.js:128] [pc=0000004DC395835B](this=000000583D4E4AD1 <a Request with map 0000035C122A1B51>,options=000000583D4E4DC9 <an Object with map 0000025475FDD539>) 4: request(aka request) [E:\nodecrawler\node_modules\request\index.js:54] [pc=0000004DC3957474](this=000000530CD82311 <undefined>,uri=000000583D4E8241 <an O…

FATAL ERROR: CALL_AND_RETRY_LAST Allocation failed - JavaScript heap out of memory

3 回复

这是我的代码,是不是由于循环太多

var crawler = require('crawler');
var fs = require('fs');
var debug = require('debug')('nightmare:crawler');

console.time("程序运行时间");//开始运行时间
//从videoList文件中获取视频列表
var read = fs.readFileSync(__dirname+'/videoList.json',function(err){
	if(err){
		console.log(err);
	}
	debug('读取videoList文件');
	console.log('successed!');
});
var videoList = JSON.parse(read);

var informationList = [];
var c = new crawler({
	// maxConnection : 1000,
	//rateLimit : 1000,
	forceUTF8 : true,
	callback : function(error,res,done){
		if(error){
			console.log(error);
		}else{	
			information(res,done);
			done();
		}
		// console.log(informationList);
		// console.log(informationList.length);
	}
});

// //爬取订阅号列表

for(i=0;i<videoList.length;i++){
	c.queue({
		uri: videoList[i].url,
		proxy: 'http://127.0.0.1:61481'
	});
}

c.on('drain',function(){
	console.log(informationList);
	console.log(informationList.length);
	console.log('finished!');
	console.timeEnd("程序运行时间");//结束运行时间
});

http请求的结果全保存在内存?

@godghdai 嗯啊 是不是这样造成内存满了

回到顶部