implement a ringbuffer in the audioworklet to optimize runtimes
This commit is contained in:
parent
cc32e28b36
commit
00c5467a89
@ -3,7 +3,11 @@ class OwrxAudioProcessor extends AudioWorkletProcessor {
|
||||
super(options);
|
||||
this.maxLength = options.processorOptions.maxLength;
|
||||
this.reduceToLength = options.processorOptions.reduceToLength;
|
||||
this.audio_buffers = [];
|
||||
// initialize ringbuffer, make sure it aligns with the expected buffer size of 128
|
||||
this.bufferSize = Math.round(sampleRate * this.maxLength / 128) * 128
|
||||
this.audioBuffer = new Float32Array(this.bufferSize);
|
||||
this.inPos = 0;
|
||||
this.outPos = 0;
|
||||
this.port.addEventListener('message', (m) => {
|
||||
if (typeof(m.data) === 'string') {
|
||||
const json = JSON.parse(m.data);
|
||||
@ -11,48 +15,39 @@ class OwrxAudioProcessor extends AudioWorkletProcessor {
|
||||
this.reportBuffers();
|
||||
}
|
||||
} else {
|
||||
this.audio_buffers.push(new Float32Array(m.data));
|
||||
// the ringbuffer size is aligned to the output buffer size, which means that the input buffers might
|
||||
// need to wrap around the end of the ringbuffer, back to the start.
|
||||
// it is better to have this processing here instead of in the time-critical process function.
|
||||
if (this.inPos + m.data.length <= this.bufferSize) {
|
||||
// we have enough space, so just copy data over.
|
||||
this.audioBuffer.set(m.data, this.inPos);
|
||||
} else {
|
||||
// we don't have enough space, so we need to split the data.
|
||||
const remaining = this.bufferSize - this.inPos;
|
||||
this.audioBuffer.set(m.data.subarray(0, remaining), this.inPos);
|
||||
this.audioBuffer.set(m.data.subarray(remaining));
|
||||
}
|
||||
this.inPos = (this.inPos + m.data.length) % this.bufferSize;
|
||||
}
|
||||
});
|
||||
this.port.addEventListener('messageerror', console.error);
|
||||
this.port.start();
|
||||
}
|
||||
process(inputs, outputs, parameters) {
|
||||
//console.time('audio::process');
|
||||
const samples = Math.min(128, this.remaining());
|
||||
outputs[0].forEach((output) => {
|
||||
let total = 0;
|
||||
while (this.audio_buffers.length) {
|
||||
const b = this.audio_buffers.shift();
|
||||
const newLength = total + b.length;
|
||||
const ol = output.length;
|
||||
// not enough space to fit all data, so splice and put back in the queue
|
||||
if (newLength > ol) {
|
||||
const tokeep = b.slice(0, ol - total);
|
||||
output.set(tokeep, total);
|
||||
const tobuffer = b.slice(ol - total, b.length);
|
||||
this.audio_buffers.unshift(tobuffer);
|
||||
break;
|
||||
} else {
|
||||
output.set(b, total);
|
||||
}
|
||||
total = newLength;
|
||||
}
|
||||
output.set(this.audioBuffer.subarray(this.outPos, this.outPos + samples));
|
||||
});
|
||||
//console.timeEnd('audio::process');
|
||||
this.outPos = (this.outPos + samples) % this.bufferSize;
|
||||
return true;
|
||||
}
|
||||
bufferLength() {
|
||||
return this.audio_buffers.map(function(b){ return b.length; }).reduce(function(a, b){ return a + b; }, 0);
|
||||
remaining() {
|
||||
const mod = (this.inPos - this.outPos) % this.bufferSize;
|
||||
if (mod >= 0) return mod;
|
||||
return mod + this.bufferSize;
|
||||
}
|
||||
reportBuffers() {
|
||||
var we_have_more_than = (sec) => {
|
||||
return sec * sampleRate < this.bufferLength();
|
||||
};
|
||||
if (we_have_more_than(this.maxLength)) while (we_have_more_than(this.reduceToLength)) {
|
||||
this.audio_buffers.shift();
|
||||
}
|
||||
|
||||
this.port.postMessage(JSON.stringify({buffersize: this.bufferLength()}));
|
||||
this.port.postMessage(JSON.stringify({buffersize: this.remaining()}));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1586,8 +1586,6 @@ function audio_onprocess(e) {
|
||||
}
|
||||
}
|
||||
|
||||
var audio_buffer_progressbar_update_disabled = false;
|
||||
|
||||
var audio_buffer_total_average_level = 0;
|
||||
var audio_buffer_total_average_level_length = 0;
|
||||
|
||||
@ -1596,8 +1594,11 @@ function audio_buffers_total_length() {
|
||||
}
|
||||
|
||||
function audio_buffer_progressbar_update(reportedValue) {
|
||||
if (audio_buffer_progressbar_update_disabled) return;
|
||||
var audio_buffer_value = (reportedValue || audio_buffers_total_length()) / audio_context.sampleRate;
|
||||
var audio_buffer_value = reportedValue;
|
||||
if (typeof(audio_buffer_value) === 'undefined') {
|
||||
audio_buffer_value = audio_buffers_total_length();
|
||||
}
|
||||
audio_buffer_value /= audio_context.sampleRate;
|
||||
audio_buffer_total_average_level_length++;
|
||||
audio_buffer_total_average_level = (audio_buffer_total_average_level * ((audio_buffer_total_average_level_length - 1) / audio_buffer_total_average_level_length)) + (audio_buffer_value / audio_buffer_total_average_level_length);
|
||||
var overrun = audio_buffer_value > audio_buffer_maximal_length_sec;
|
||||
@ -1609,14 +1610,7 @@ function audio_buffer_progressbar_update(reportedValue) {
|
||||
if (underrun) {
|
||||
text = "underrun";
|
||||
}
|
||||
if (overrun || underrun) {
|
||||
audio_buffer_progressbar_update_disabled = true;
|
||||
window.setTimeout(function () {
|
||||
audio_buffer_progressbar_update_disabled = false;
|
||||
audio_buffer_progressbar_update();
|
||||
}, 1000);
|
||||
}
|
||||
progressbar_set(e("openwebrx-bar-audio-buffer"), (underrun) ? 1 : audio_buffer_value, "Audio " + text + " [" + (audio_buffer_value).toFixed(1) + " s]", overrun || underrun);
|
||||
progressbar_set(e("openwebrx-bar-audio-buffer"), audio_buffer_value, "Audio " + text + " [" + (audio_buffer_value).toFixed(1) + " s]", overrun || underrun);
|
||||
}
|
||||
|
||||
|
||||
@ -1748,8 +1742,7 @@ function audio_init() {
|
||||
}, audio_flush_interval_ms);
|
||||
audio_node.port.addEventListener('message', function(m){
|
||||
var json = JSON.parse(m.data);
|
||||
if (json.buffersize) {
|
||||
audio_buffer_progressbar_update_disabled = false;
|
||||
if (typeof(json.buffersize) !== 'undefined') {
|
||||
audio_buffer_progressbar_update(json.buffersize);
|
||||
}
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user