fix: wip for ios

This commit is contained in:
MAZE 2025-02-18 19:53:20 +03:30
parent 54b46123b4
commit ad57f082ca

View file

@ -30,8 +30,9 @@ declare global {
} }
/** /**
* Patches Howler's master gain node to stream its output into a hidden HTML audio element. * Patches Howler's master gain node to route its output into a hidden HTML audio element.
* This helps prevent iOS from suspending audio when the app goes into the background. * An intermediate splitter node is used in an attempt to reduce the banging noise observed on iOS.
* Also adds a listener to resume the AudioContext when the document becomes visible.
*/ */
export function setupAudioStream(): void { export function setupAudioStream(): void {
if ( if (
@ -40,34 +41,52 @@ export function setupAudioStream(): void {
!window.__howlerStreamPatched !window.__howlerStreamPatched
) { ) {
const audioCtx = Howler.ctx; const audioCtx = Howler.ctx;
const masterGain = Howler.masterGain;
// Create a MediaStream destination node to capture the AudioContext output. // Create a MediaStream destination node to capture the output.
const streamDestination = audioCtx.createMediaStreamDestination(); const streamDestination = audioCtx.createMediaStreamDestination();
// Disconnect the master gain from its default destination. // Create a splitter gain node to help split the signal cleanly.
masterGain.disconnect(); const splitter = audioCtx.createGain();
// Reconnect the master gain to both the default destination and the stream destination. // Disconnect the master gain.
masterGain.connect(audioCtx.destination); Howler.masterGain.disconnect();
masterGain.connect(streamDestination);
// Reconnect masterGain: one branch to the AudioContext's default destination,
// and one branch through the splitter to the MediaStream destination.
Howler.masterGain.connect(audioCtx.destination);
Howler.masterGain.connect(splitter);
splitter.connect(streamDestination);
// Create a hidden HTML audio element to play the captured stream. // Create a hidden HTML audio element to play the captured stream.
const audioElement = document.createElement('audio'); const audioElement = document.createElement('audio');
audioElement.setAttribute('playsinline', 'true'); // essential for iOS audioElement.setAttribute('playsinline', 'true'); // crucial for iOS playback
audioElement.srcObject = streamDestination.stream; audioElement.srcObject = streamDestination.stream;
audioElement.style.display = 'none'; audioElement.style.display = 'none';
document.body.appendChild(audioElement); document.body.appendChild(audioElement);
// Attempt to play the audio element. Note that iOS requires a user gesture. // Attempt to start playback (must be triggered by a user gesture).
audioElement.play().catch((err: unknown) => { audioElement.play().catch((err: unknown) => {
console.error('Failed to play background stream:', err); console.error('Failed to play background stream:', err);
}); });
// Mark the stream as patched so we dont run this code again. // Listen for visibility changes: if the document becomes visible and the AudioContext is suspended, resume it.
document.addEventListener('visibilitychange', () => {
if (
document.visibilityState === 'visible' &&
audioCtx.state === 'suspended'
) {
audioCtx
.resume()
.catch((err: unknown) =>
console.error('Error resuming AudioContext:', err),
);
}
});
window.__howlerStreamPatched = true; window.__howlerStreamPatched = true;
} }
} }
/** /**
* ========================================= * =========================================
*/ */