0
This code below is from a web page that makes audio recording. I have a problem that I can’t solve I’ve tried many ways and nothing... The problem is this when I run the code on a desktop it works normal, the audio comes out clean the real problem happens when I do the same test on android"6.0 , 7.0", it can do the whole process but the audio comes out with a voice failing, I’ve tried to fix more I can’t. Will someone who already has a greater experience can help me.
NOTE: I use "https" on the page.
var audioContext = null;
var context = null;
var volume = null;
var audioInput = null;
var recorder = null;
var recordingLength = 0;
var leftchannel = [];
var rightchannel = [];
var bufferSize = 2048;
var sampleRate = 48000;
var requestStreamReadPermission = {
audio: true
};
$scope.canRecordAudio = false;
// -
navigator.getUserMedia = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
// -
var writeUTFBytes = function(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
};
// -
var interleave = function(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
};
// -
var mergeBuffers = function(channelBuffer, recordingLength) {
var result = new Float32Array(recordingLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
};
// -
var errorGetUserMedia = function(error) {
console.log(error);
GeneralMessageService.clean();
GeneralMessageService.addMessage($filter('translate')('chat.mobile.checkorwaitforconnection'));
};
// -
var successGetUserMedia = function(stream) {
audioContext = window.AudioContext || window.webkitAudioContext;
context = new audioContext();
// recupera a taxa de amostragem atual a ser usada para a embalagem WAV
sampleRate = context.sampleRate;
volume = context.createGain();
audioInput = context.createMediaStreamSource(stream);
audioInput.connect(volume);
//recorder = context.createScriptProcessor(bufferSize, 2, 2);
var numberOfInputChannels = 2;
var numberOfOutputChannels = 2;
if (context.createScriptProcessor) {
recorder = context.createScriptProcessor(bufferSize, numberOfInputChannels, numberOfOutputChannels);
} else {
recorder = context.createJavaScriptNode(bufferSize, numberOfInputChannels, numberOfOutputChannels);
}
recorder.onaudioprocess = function(stream) {
if (!$scope.recordingAudio) {
return;
}
var left = stream.inputBuffer.getChannelData(0);
var right = stream.inputBuffer.getChannelData(1);
leftchannel.push(new Float32Array(left));
rightchannel.push(new Float32Array(right));
recordingLength += bufferSize;
};
volume.connect(recorder);
recorder.connect(context.destination);
$scope.canRecordAudio = true;
};
// -
if (!!navigator.getUserMedia) {
navigator.getUserMedia(
requestStreamReadPermission,
successGetUserMedia,
errorGetUserMedia
);
} else {
errorGetUserMedia('UserMedia is empty');
}
// -
var startRecording = function() {
leftchannel.length = rightchannel.length = 0;
recordingLength = 0;
};
// -
var stopRecording = function() {
var leftBuffer = mergeBuffers(leftchannel, recordingLength);
var rightBuffer = mergeBuffers(rightchannel, recordingLength);
var interleaved = interleave(leftBuffer, rightBuffer);
var buffer = new ArrayBuffer(44 + interleaved.length * 2);
var view = new DataView(buffer);
writeUTFBytes(view, 0, 'RIFF');
view.setUint32(4, 44 + interleaved.length * 2, true);
writeUTFBytes(view, 8, 'WAVE');
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
view.setUint16(22, 2, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 4, true);
view.setUint16(32, 4, true);
view.setUint16(34, 16, true);
writeUTFBytes(view, 36, 'data');
view.setUint32(40, interleaved.length * 2, true);
var lng = interleaved.length;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
var s = Math.max(-1, Math.min(1, interleaved[i]));
view.setInt16(index, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
index += 2;
}
var blob = new Blob([view], {
type: 'audio/mp3'
});
var url = (window.URL || window.webkitURL).createObjectURL(blob);
addAudioMessage(url);
clear();
};
function clear(){
recordingLength = 0;
leftchannel = [];
rightchannel = [];
}
It seems that it is a typical android problem, high write latency in Androids is the cause of your problem, try to change these two parameters
var bufferSize = 2048;
 var sampleRate = 48000;
forvar bufferSize = 4096;
 var sampleRate = 8000;
– ederwander
I tested here now and gave a slight improved but still gets half catching on android... I changed the extension to wav instead of mp3 but also happens the crashes
– Mickael fraga
put the
buffersize
major type 8192 and see if it improves, increasing this value, vc can decrease the samplerate to 2000, but I do not advise this samplerate is too small and you will be losing important frequencies during recording– ederwander
another thing, you are recording in stereo, change the code to record in mono, this can improve performance ...
– ederwander
I tested here with bufferSize = 32768 and sampleRate = 8000 and 2000 plus the problem still insists on happening, and I made a modification to mono
– Mickael fraga
android has mts latency problems, think will not have solution, write a native code that will work ...
– ederwander
is I’ll keep searching to see if I can find a solution to this code and Thanks for the tips
– Mickael fraga