Skip to content

Commit

Permalink
Update audio processing sample for latest WebCodecs API (webrtc#1484)
Browse files Browse the repository at this point in the history
Update audio processing sample for latest WebCodecs API

Fixes webrtc#1483
  • Loading branch information
dogben authored Sep 29, 2021
1 parent 82b6ec8 commit 821b162
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 11 deletions.
4 changes: 1 addition & 3 deletions src/content/insertable-streams/audio-processing/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,7 @@ <h1><a href="//webrtc.github.io/samples/" title="WebRTC samples homepage">WebRTC

<p>
<b>Note</b>: This sample is using an experimental API that has not yet been standardized. As
of 2021-02-09, this API is available in Chrome M90 if the experimental code is enabled on
the command line with
<code>--enable-blink-features=WebCodecs,MediaStreamInsertableStreams</code>.
of 2021-09-29, this API is available in Chrome M94.
</p>
<a href="https://github.com/webrtc/samples/tree/gh-pages/src/content/insertable-streams/audio-processing"
title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
Expand Down
34 changes: 26 additions & 8 deletions src/content/insertable-streams/audio-processing/js/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

'use strict';

/* global MediaStreamTrackProcessor, MediaStreamTrackGenerator */
/* global MediaStreamTrackProcessor, MediaStreamTrackGenerator, AudioData */
if (typeof MediaStreamTrackProcessor === 'undefined' ||
typeof MediaStreamTrackGenerator === 'undefined') {
alert(
Expand All @@ -26,6 +26,12 @@ try {
'at the bottom of the page.');
}

if (typeof AudioData === 'undefined') {
alert(
'Your browser does not support WebCodecs. See the note at the bottom ' +
'of the page.');
}

// Put variables in global scope to make them available to the browser console.

// Audio element
Expand Down Expand Up @@ -69,18 +75,22 @@ const constraints = window.constraints = {

// Returns a low-pass transform function for use with TransformStream.
function lowPassFilter() {
const format = 'f32-planar';
let lastValuePerChannel = undefined;
return (frame, controller) => {
return (data, controller) => {
const rc = 1.0 / (cutoff * 2 * Math.PI);
const dt = 1.0 / frame.buffer.sampleRate;
const dt = 1.0 / data.sampleRate;
const alpha = dt / (rc + dt);
const nChannels = frame.buffer.numberOfChannels;
const nChannels = data.numberOfChannels;
if (!lastValuePerChannel) {
console.log(`Audio stream has ${nChannels} channels.`);
lastValuePerChannel = Array(nChannels).fill(0);
}
const buffer = new Float32Array(data.numberOfFrames * nChannels);
for (let c = 0; c < nChannels; c++) {
const samples = frame.buffer.getChannelData(c);
const offset = data.numberOfFrames * c;
const samples = buffer.subarray(offset, offset + data.numberOfFrames);
data.copyTo(samples, {planeIndex: c, format});
let lastValue = lastValuePerChannel[c];

// Apply low-pass filter to samples.
Expand All @@ -89,10 +99,16 @@ function lowPassFilter() {
samples[i] = lastValue;
}

frame.buffer.copyToChannel(samples, c);
lastValuePerChannel[c] = lastValue;
}
controller.enqueue(frame);
controller.enqueue(new AudioData({
format,
sampleRate: data.sampleRate,
numberOfFrames: data.numberOfFrames,
numberOfChannels: nChannels,
timestamp: data.timestamp,
data: buffer
}));
};
}

Expand All @@ -101,7 +117,9 @@ async function start() {
try {
stream = await navigator.mediaDevices.getUserMedia(constraints);
} catch (error) {
const errorMessage = 'navigator.MediaDevices.getUserMedia error: ' + error.message + ' ' + error.name;
const errorMessage =
'navigator.MediaDevices.getUserMedia error: ' + error.message + ' ' +
error.name;
document.getElementById('errorMsg').innerText = errorMessage;
console.log(errorMessage);
}
Expand Down

0 comments on commit 821b162

Please sign in to comment.