diff --git a/packages/webgal/src/Core/gameScripts/vocal/index.ts b/packages/webgal/src/Core/gameScripts/vocal/index.ts index 5d2cd0017..a080b957b 100644 --- a/packages/webgal/src/Core/gameScripts/vocal/index.ts +++ b/packages/webgal/src/Core/gameScripts/vocal/index.ts @@ -6,9 +6,11 @@ import { getBooleanArgByKey, getNumberArgByKey, getStringArgByKey } from '@/Core import { IStageState } from '@/store/stageInterface'; import { audioContextWrapper, + ensureAudioContextReady, getAudioLevel, performBlinkAnimation, performMouthAnimation, + resetMaxAudioLevel, updateThresholds, } from '@/Core/gameScripts/vocal/vocalAnimation'; import { match } from '../../util/match'; @@ -64,7 +66,7 @@ export const playVocal = (sentence: ISentence) => { return { arrangePerformPromise: new Promise((resolve) => { // 播放语音 - setTimeout(() => { + setTimeout(async () => { let VocalControl: any = document.getElementById('currentVocal'); // 设置语音音量 webgalStore.dispatch(setStage({ key: 'vocalVolume', value: volume })); @@ -102,10 +104,20 @@ export const playVocal = (sentence: ISentence) => { stopTimeout: undefined, // 暂时不用,后面会交给自动清除 }; WebGAL.gameplay.performController.arrangeNewPerform(perform, sentence, false); + const finishPerform = () => { + for (const e of WebGAL.gameplay.performController.performList) { + if (e.performName === performInitName) { + isOver = true; + e.stopFunction(); + WebGAL.gameplay.performController.unmountPerform(e.performName); + } + } + }; + key = key ? key : `fig-${pos}`; const animationItem = figureAssociatedAnimation.find((tid) => tid.targetId === key); if (animationItem) { - let maxAudioLevel = 0; + resetMaxAudioLevel(); const foundFigure = freeFigure.find((figure) => figure.key === key); @@ -113,53 +125,50 @@ export const playVocal = (sentence: ISentence) => { pos = foundFigure.basePosition; } - if (!audioContextWrapper.audioContext) { - let audioContext: AudioContext | null; - audioContext = new AudioContext(); - audioContextWrapper.analyser = audioContext.createAnalyser(); - audioContextWrapper.analyser.fftSize = 256; - audioContextWrapper.dataArray = new Uint8Array(audioContextWrapper.analyser.frequencyBinCount); - } - - if (!audioContextWrapper.analyser) { - audioContextWrapper.analyser = audioContextWrapper.audioContext.createAnalyser(); - audioContextWrapper.analyser.fftSize = 256; - } - - bufferLength = audioContextWrapper.analyser.frequencyBinCount; - audioContextWrapper.dataArray = new Uint8Array(bufferLength); - let vocalControl = document.getElementById('currentVocal') as HTMLMediaElement; - - if (!audioContextWrapper.source || audioContextWrapper.source.mediaElement !== vocalControl) { - if (audioContextWrapper.source) { - audioContextWrapper.source.disconnect(); + const isAudioContextReady = await ensureAudioContextReady(); + if (isAudioContextReady && audioContextWrapper.audioContext) { + if (!audioContextWrapper.analyser) { + audioContextWrapper.analyser = audioContextWrapper.audioContext.createAnalyser(); + audioContextWrapper.analyser.fftSize = 256; } - audioContextWrapper.source = audioContextWrapper.audioContext.createMediaElementSource(vocalControl); - audioContextWrapper.source.connect(audioContextWrapper.analyser!); - } - audioContextWrapper.analyser.connect(audioContextWrapper.audioContext.destination); + bufferLength = audioContextWrapper.analyser.frequencyBinCount; + audioContextWrapper.dataArray = new Uint8Array(bufferLength); + let vocalControl = document.getElementById('currentVocal') as HTMLMediaElement; - // Lip-snc Animation - audioContextWrapper.audioLevelInterval = setInterval(() => { - const audioLevel = getAudioLevel( - audioContextWrapper.analyser!, - audioContextWrapper.dataArray!, - bufferLength, - ); - const { OPEN_THRESHOLD, HALF_OPEN_THRESHOLD } = updateThresholds(audioLevel); + if (!audioContextWrapper.source || audioContextWrapper.source.mediaElement !== vocalControl) { + if (audioContextWrapper.source) { + audioContextWrapper.source.disconnect(); + } + audioContextWrapper.source = audioContextWrapper.audioContext.createMediaElementSource(vocalControl); + audioContextWrapper.source.connect(audioContextWrapper.analyser); + } - performMouthAnimation({ - audioLevel, - OPEN_THRESHOLD, - HALF_OPEN_THRESHOLD, - currentMouthValue, - lerpSpeed, - key, - animationItem, - pos, - }); - }, 50); + audioContextWrapper.analyser.connect(audioContextWrapper.audioContext.destination); + + // Lip-sync Animation + audioContextWrapper.audioLevelInterval = setInterval(() => { + const audioLevel = getAudioLevel( + audioContextWrapper.analyser!, + audioContextWrapper.dataArray!, + bufferLength, + ); + const { OPEN_THRESHOLD, HALF_OPEN_THRESHOLD } = updateThresholds(audioLevel); + + performMouthAnimation({ + audioLevel, + OPEN_THRESHOLD, + HALF_OPEN_THRESHOLD, + currentMouthValue, + lerpSpeed, + key, + animationItem, + pos, + }); + }, 50); + } else { + logger.warn('AudioContext is not ready, skip lip-sync analyzer for this vocal.'); + } // blinkAnimation let animationEndTime: number; @@ -174,17 +183,16 @@ export const playVocal = (sentence: ISentence) => { }, 10000); } - VocalControl?.play(); + const playPromise = VocalControl?.play(); - VocalControl.onended = () => { - for (const e of WebGAL.gameplay.performController.performList) { - if (e.performName === performInitName) { - isOver = true; - e.stopFunction(); - WebGAL.gameplay.performController.unmountPerform(e.performName); - } - } - }; + if (playPromise?.catch) { + playPromise.catch((error: unknown) => { + logger.warn('Vocal play was blocked by browser autoplay policy or audio activation state.', error); + finishPerform(); + }); + } + + VocalControl.onended = finishPerform; } }, 1); }), diff --git a/packages/webgal/src/Core/gameScripts/vocal/vocalAnimation.ts b/packages/webgal/src/Core/gameScripts/vocal/vocalAnimation.ts index e46637006..73e2bb744 100644 --- a/packages/webgal/src/Core/gameScripts/vocal/vocalAnimation.ts +++ b/packages/webgal/src/Core/gameScripts/vocal/vocalAnimation.ts @@ -1,7 +1,7 @@ import { WebGAL } from '@/Core/WebGAL'; interface IAudioContextWrapper { - audioContext: AudioContext; + audioContext: AudioContext | null; source: MediaElementAudioSourceNode | null; analyser: AnalyserNode | undefined; dataArray: Uint8Array | undefined; @@ -12,7 +12,7 @@ interface IAudioContextWrapper { // Initialize the object based on the interface export const audioContextWrapper: IAudioContextWrapper = { - audioContext: new AudioContext(), + audioContext: null, source: null, analyser: undefined, dataArray: undefined, @@ -21,6 +21,34 @@ export const audioContextWrapper: IAudioContextWrapper = { maxAudioLevel: 0, }; +export const ensureAudioContextReady = async (): Promise => { + if (!audioContextWrapper.audioContext) { + const AudioContextCtor = + window.AudioContext ?? + (window as unknown as { webkitAudioContext?: typeof AudioContext }).webkitAudioContext; + + if (!AudioContextCtor) { + return false; + } + + audioContextWrapper.audioContext = new AudioContextCtor(); + } + + if (audioContextWrapper.audioContext.state === 'suspended') { + try { + await audioContextWrapper.audioContext.resume(); + } catch { + return false; + } + } + + return audioContextWrapper.audioContext.state === 'running'; +}; + +export const resetMaxAudioLevel = () => { + audioContextWrapper.maxAudioLevel = 0; +}; + export const updateThresholds = (audioLevel: number) => { audioContextWrapper.maxAudioLevel = Math.max(audioLevel, audioContextWrapper.maxAudioLevel); return { @@ -52,8 +80,12 @@ export const performBlinkAnimation = (params: { }; // Updated getAudioLevel function -export const getAudioLevel = (analyser: AnalyserNode, dataArray: Uint8Array, bufferLength: number): number => { - analyser.getByteFrequencyData(dataArray); +export const getAudioLevel = ( + analyser: AnalyserNode, + dataArray: Uint8Array, + bufferLength: number, +): number => { + analyser.getByteFrequencyData(dataArray as any); let sum = 0; for (let i = 0; i < bufferLength; i++) { sum += dataArray[i];