我已经使用网络音频 api 将麦克风连接到卷积器到分析仪到 flot gui 以绘制频谱。为了测试,我将卷积器的缓冲区设置为统一,但我没有得到任何输出。如果我绕过卷积器并将麦克风直接连接到分析仪,它就可以工作。你能帮忙吗?
在下面的代码中use_convolver
确定是否绕过卷积器。
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta charset="utf-8">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script>
<script src="http://www.flotcharts.org/flot/jquery.flot.js" type="text/javascript"></script>
</head>
<body>
<h1>Audio Spectrum</h1>
<div id="placeholder" style="width:400px; height:200px; display: inline-block;">
</div>
<script>
var microphone;
var analyser;
var convolver;
//user media
navigator.getUserMedia = (navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia);
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
navigator.getUserMedia(
// constraints - only audio needed for this app
{
audio : true,
echoCancellation : true
},
// Success callback
user_media_setup,
// Error callback
function(err) {
console.log('The following gUM error occured: ' + err);
});
} else {
console.log('getUserMedia not supported on your browser!');
};
function user_media_setup(stream) {
console.log('user media setup');
// set up forked web audio context, for multiple browsers
// window. is needed otherwise Safari explodes
audioCtx = new (window.AudioContext || window.webkitAudioContext)();
//microphone
microphone = audioCtx.createMediaStreamSource(stream);
//analyser
analyser = audioCtx.createAnalyser();
analyser.fftSize = 1024;
analyser.smoothingTimeConstant = 0.85;
//convolver
convolver = audioCtx.createConvolver();
convolver.normalize = true;
convolverBuffer = audioCtx.createBuffer(1, 1, audioCtx.sampleRate);
// convolverBuffer[0] = 1; //wrong
convolverChannel = convolverBuffer.getChannelData(0);
convolverChannel[0] = 1;
convolver.buffer = convolverBuffer;
//connectivity
var use_convolver = false;
if (use_convolver) {
//through convolver:
microphone.connect(convolver);
convolver.connect(analyser);
} else {
//direct:
microphone.connect(analyser);
}
visualize();
}
function visualize() {
console.log('visualize');
dataArray = new Float32Array(analyser.frequencyBinCount);
draw = function() {
analyser.getFloatFrequencyData(dataArray);
var data = [];
for (var i = 0; i < dataArray.length; i++) {
freq = audioCtx.sampleRate * i / dataArray.length / 2;
data.push([freq, dataArray[i]]);
}
var options = {
yaxis : {
min : -200,
max : 0
}
};
$.plot("#placeholder", [data], options);
window.requestAnimationFrame(draw);
};
window.requestAnimationFrame(draw);
}
</script>
</body>
</html>