抓获声卡数据,通过websocket推送到h5页面,为什么声音会失真呢?
发布于 7 年前 作者 Einsy 4840 次浏览 来自 分享

抓获声卡数据,通过websocket推送到h5页面,为什么声音会失真呢?

app.js 服务器端代码,3000是web服务器监听端口。 record.exe捕获系统内放的声音,通过tcp client推送到8888端口 tcpserver监听8888端口,只接受一个连接,接受另外一个程序record.exe 推送的pcm流(同时在录音保存程.wav文件到本地) 9090是websocket服务,负责将声音流推送到前端网页上。 整个流程是实现跑通了的, 但是有个问题,为什么前端播放的声音有噪音呢?听着很勉强,很不干净,而record.exe保存的声音文件播放却无噪音,很正常。

/**

  • Module dependencies. */

var express = require(‘express’); var routes = require(’./routes’); var user = require(’./routes/user’); var http = require(‘http’); var path = require(‘path’); var ws = require(‘ws’); var app = express();

// all environments app.set(‘port’, process.env.PORT || 3000); app.set(‘views’, path.join(__dirname, ‘views’)); app.set(‘view engine’, ‘ejs’); app.use(express.favicon()); app.use(express.logger(‘dev’)); app.use(express.json()); app.use(express.urlencoded()); app.use(express.methodOverride()); app.use(app.router); app.use(express.static(path.join(__dirname, ‘public’)));

// development only if (‘development’ == app.get(‘env’)) { app.use(express.errorHandler()); }

app.get(’/’, routes.index); app.get(’/users’, user.list);

http.createServer(app).listen(app.get(‘port’), function(){ console.log('Express server listening on port ’ + app.get(‘port’)); });

//##################################################################################################### //https://www.hongweipeng.com/index.php/archives/825/

var net = require(‘net’); var sourceSocket; var wsocketObject; var tcpServer = net.createServer(function(socket){ console.log(“客户端已连接”); console.log(“socket.address()”,socket.address()); //fs.writeFileSync("./sound.wav", new Buffer("")); if(socket.address().address=="::ffff:127.0.0.1"){ sourceSocket=socket; }

socket.on("data",function(data){
	if(data.toString()=="source"){
		sourceSocket=socket;
		return;
	}
	if(wsocketObject){
		var buf = new Buffer(data);
		wsocketObject.send(data);
		console.log("send Buffer "+data.length);
	}
});

socket.on('timeout', function(){
	console.log("socket timeout");
});

var fn=function(error){
	console.log(error);	
}

socket.on("end",fn);
socket.on("error",fn);
socket.on("close",fn);

var wsServer = new ws.Server({port:9090});
wsServer.on('connection', function(wsocket) {
	wsocketObject=wsocket;
	wsocket.on('message', function(data) {
		console.log(['message',data]);
	});
});
console.log("ws server id running 9090");

});

tcpServer.on(‘error’, function(err){ throw err; });

tcpServer.listen(8888); console.log(“audio tcpServer run 8888”);

前端网页代码: <!DOCTYPE html> <html> <head> <meta charset=“utf-8”> <meta name=“viewport” content=“width=320, initial-scale=1” /> <title>audio</title> <style type=“text/css”> body { background: #333; text-align: center; margin-top: 10%; } #videoCanvas { width: 640px; height: 480px; } </style> <script src=‘jquery-1.11.1.min.js’></script> <script src=‘base64.js’></script> <script src=‘audio.js’></script> </head> <body> <audio id=“audio” controls autoplay> </audio> <p> </p>

<script type="text/javascript">
	var audio = document.querySelector('audio');
	window.AudioContext = window.AudioContext || window.webkitAudioContext;
	var ws = new WebSocket("ws://"+location.hostname+":9090");
	ws.binaryType = "arraybuffer";
	ws.onmessage = function(message) {
		console.log("message.data",message.data);
		AudioClass.PlaySound(message.data);
	};
	
	AudioClass.init();
</script>

</body>

</html>

audio.js的代码

var AudioClass={};

AudioClass.init=function(){ this.audioContext = new AudioContext(); this.FileReader = new FileReader(); }

//duilie AudioClass.PlaySound=function(evtdata,fn){ var self=this; self.decodeAudioData(evtdata,function(evt){ console.log(“evt”,evt); }); };

AudioClass.decodeAudioData=function(result,fn){ var self=this; var data=AudioClass.wavify(result, 2, 44100); // 44100 self.audioContext.decodeAudioData(data, function(buffer) { var ABSNode = self.audioContext.createBufferSource(); ABSNode.buffer = buffer; ABSNode.connect(self.audioContext.destination); ABSNode.start(0); fn&&fn(); }, function(evt) { console.log(“error”,evt); }); }

/**

  • [concat 合并buffer]
  • @param {[type]} buffer1 [description]
  • @param {[type]} buffer2 [description] */ AudioClass.concat = function(buffer1, buffer2) { var tmp = new Uint8Array(buffer1.byteLength + buffer2.byteLength); tmp.set(new Uint8Array(buffer1), 0); tmp.set(new Uint8Array(buffer2), buffer1.byteLength); return tmp.buffer; };

/**

  • [wavify ]

  • @param {[type]} data [description]

  • @param {[type]} numberOfChannels [description]

  • @param {[type]} sampleRate [description]

  • @return {[type]} [description] */ AudioClass.wavify = function(data, numberOfChannels, sampleRate) { var header = new ArrayBuffer(44); var d = new DataView(header);a

    d.setUint8(0, ‘R’.charCodeAt(0)); d.setUint8(1, ‘I’.charCodeAt(0)); d.setUint8(2, ‘F’.charCodeAt(0)); d.setUint8(3, ‘F’.charCodeAt(0));

    d.setUint32(4, data.byteLength / 2 + 44, true);

    d.setUint8(8, ‘W’.charCodeAt(0)); d.setUint8(9, ‘A’.charCodeAt(0)); d.setUint8(10, ‘V’.charCodeAt(0)); d.setUint8(11, ‘E’.charCodeAt(0)); d.setUint8(12, ‘f’.charCodeAt(0)); d.setUint8(13, ‘m’.charCodeAt(0)); d.setUint8(14, ‘t’.charCodeAt(0)); d.setUint8(15, ’ '.charCodeAt(0));

    d.setUint32(16, 16, true); d.setUint16(20, 1, true); d.setUint16(22, numberOfChannels, true); d.setUint32(24, sampleRate, true); d.setUint32(28, sampleRate * 1 * 2); d.setUint16(32, numberOfChannels * 2); d.setUint16(34, 16, true);

    d.setUint8(36, ‘d’.charCodeAt(0)); d.setUint8(37, ‘a’.charCodeAt(0)); d.setUint8(38, ‘t’.charCodeAt(0)); d.setUint8(39, ‘a’.charCodeAt(0)); d.setUint32(40, data.byteLength, true);

    return AudioClass.concat(header, data); };

现在的问题,为什么有噪音呢?有兴趣的朋友可以交流交流。 欢迎大神指点

2 回复

顶一下,哪位大神看看

  1. 你先把代码贴好吧。
  2. wav 不是流媒体格式吧?直接播放你存好的那个 wav 文件正常不?
回到顶部