From 5efa60a0de1d0c3cf2e3c858a3d2869c88a5acb4 Mon Sep 17 00:00:00 2001 From: unanmed <1319491857@qq.com> Date: Wed, 15 Jan 2025 21:29:29 +0800 Subject: [PATCH] fix: All bug for opus and ogg decoder --- public/_server/table/data.comment.js | 4 +- public/project/data.js | 5 +- src/module/audio/bgmLoader.ts | 28 ++++ src/module/audio/decoder.ts | 45 ++++++ src/module/audio/index.ts | 10 ++ src/module/audio/player.ts | 36 ++++- src/module/audio/source.ts | 229 ++++++++++++++++++--------- src/module/audio/support.ts | 18 +-- src/module/loader/stream.ts | 31 ++-- src/source/data.d.ts | 5 +- 10 files changed, 303 insertions(+), 108 deletions(-) create mode 100644 src/module/audio/bgmLoader.ts create mode 100644 src/module/audio/decoder.ts diff --git a/public/_server/table/data.comment.js b/public/_server/table/data.comment.js index 6f18550..4f2e0fe 100644 --- a/public/_server/table/data.comment.js +++ b/public/_server/table/data.comment.js @@ -38,7 +38,7 @@ var data_comment_c456ea59_6018_45ef_8bcc_211a24c627dc = { "_range": "editor.mode.checkImages(thiseval, './project/images/')", "_directory": "./project/images/", "_transform": (function (one) { - if (one.endsWith('.png') || one.endsWith('.jpg') || one.endsWith('.jpeg') || one.endsWith('.gif')) + if (one.endsWith('.png') || one.endsWith('.jpg') || one.endsWith('.jpeg') || one.endsWith('.gif') || one.endsWith('.webp')) return one; return null; }).toString(), @@ -96,7 +96,7 @@ var data_comment_c456ea59_6018_45ef_8bcc_211a24c627dc = { "_range": "editor.mode.checkUnique(thiseval)", "_directory": "./project/bgms/", "_transform": (function (one) { - if (one.endsWith('.mp3') || one.endsWith('.ogg') || one.endsWith('.wav') || one.endsWith('.m4a') || one.endsWith('.flac')) + if (one.endsWith('.mp3') || one.endsWith('.ogg') || one.endsWith('.wav') || one.endsWith('.m4a') || one.endsWith('.flac') || one.endsWith('.opus')) return one; return null; }).toString(), diff --git a/public/project/data.js b/public/project/data.js index 70b6a97..2b9711c 100644 --- a/public/project/data.js +++ b/public/project/data.js @@ -193,13 +193,14 @@ var data_a1e2fb4a_e986_4524_b0da_9b7ba7c0874d = "zone" ], "bgms": [ - "beforeBoss.mp3", + "beforeBoss.opus", "cave.mp3", "escape.mp3", "escape2.mp3", "grass.mp3", - "mount.mp3", + "mount.opus", "night.mp3", + "output6.ogg", "palaceCenter.mp3", "palaceNorth.mp3", "palaceSouth.mp3", diff --git a/src/module/audio/bgmLoader.ts b/src/module/audio/bgmLoader.ts new file mode 100644 index 0000000..5677d74 --- /dev/null +++ b/src/module/audio/bgmLoader.ts @@ -0,0 +1,28 @@ +import { StreamLoader } from '../loader'; +import { audioPlayer, AudioRoute } from './player'; +import { AudioType, guessTypeByExt } from './support'; + +export function loadAllBgm() { + const loading = Mota.require('var', 'loading'); + loading.once('coreInit', () => { + const data = data_a1e2fb4a_e986_4524_b0da_9b7ba7c0874d; + for (const bgm of data.main.bgms) { + const type = guessTypeByExt(bgm); + + if (type === AudioType.Opus || type === AudioType.Ogg) { + const source = audioPlayer.createStreamSource(); + const stream = new StreamLoader(`project/bgms/${bgm}`); + stream.pipe(source); + source.setLoop(true); + const route = new AudioRoute(source, audioPlayer); + audioPlayer.addRoute(`bgms.${bgm}`, route); + } else { + const source = audioPlayer.createElementSource(); + source.setSource(`project/bgms/${bgm}`); + source.setLoop(true); + const route = new AudioRoute(source, audioPlayer); + audioPlayer.addRoute(`bgms.${bgm}`, route); + } + } + }); +} diff --git a/src/module/audio/decoder.ts b/src/module/audio/decoder.ts new file mode 100644 index 0000000..537abc9 --- /dev/null +++ b/src/module/audio/decoder.ts @@ -0,0 +1,45 @@ +import { OggVorbisDecoder } from '@wasm-audio-decoders/ogg-vorbis'; +import { IAudioDecodeData, IAudioDecoder } from './source'; +import { OggOpusDecoder } from 'ogg-opus-decoder'; + +export class VorbisDecoder implements IAudioDecoder { + decoder?: OggVorbisDecoder; + + async create(): Promise { + this.decoder = new OggVorbisDecoder(); + await this.decoder.ready; + } + + destroy(): void { + this.decoder?.free(); + } + + async decode(data: Uint8Array): Promise { + return this.decoder?.decode(data); + } + + async flush(): Promise { + return await this.decoder?.flush(); + } +} + +export class OpusDecoder implements IAudioDecoder { + decoder?: OggOpusDecoder; + + async create(): Promise { + this.decoder = new OggOpusDecoder(); + await this.decoder.ready; + } + + destroy(): void { + this.decoder?.free(); + } + + async decode(data: Uint8Array): Promise { + return this.decoder?.decode(data); + } + + async flush(): Promise { + return await this.decoder?.flush(); + } +} diff --git a/src/module/audio/index.ts b/src/module/audio/index.ts index 1798591..a5ac962 100644 --- a/src/module/audio/index.ts +++ b/src/module/audio/index.ts @@ -1,4 +1,14 @@ +import { loadAllBgm } from './bgmLoader'; +import { OpusDecoder, VorbisDecoder } from './decoder'; +import { AudioStreamSource } from './source'; +import { AudioType } from './support'; + +loadAllBgm(); +AudioStreamSource.registerDecoder(AudioType.Ogg, VorbisDecoder); +AudioStreamSource.registerDecoder(AudioType.Opus, OpusDecoder); + export * from './support'; export * from './effect'; export * from './player'; export * from './source'; +export * from './bgmLoader'; diff --git a/src/module/audio/player.ts b/src/module/audio/player.ts index 194f3e8..d52e4ae 100644 --- a/src/module/audio/player.ts +++ b/src/module/audio/player.ts @@ -187,10 +187,40 @@ export class AudioPlayer extends EventEmitter { * @param id 音频名称 * @param when 从音频的哪个位置开始播放,单位秒 */ - play(id: string, when?: number) { + play(id: string, when: number = 0) { this.getRoute(id)?.play(when); } + /** + * 暂停音频播放 + * @param id 音频名称 + * @returns 当音乐真正停止时兑现 + */ + pause(id: string) { + const route = this.getRoute(id); + if (!route) return Promise.resolve(); + else return route.pause(); + } + + /** + * 停止音频播放 + * @param id 音频名称 + * @returns 当音乐真正停止时兑现 + */ + stop(id: string) { + const route = this.getRoute(id); + if (!route) return Promise.resolve(); + else return route.stop(); + } + + /** + * 继续音频播放 + * @param id 音频名称 + */ + resume(id: string) { + this.getRoute(id)?.resume(); + } + /** * 设置听者位置,x正方形水平向右,y正方形垂直于地面向上,z正方向垂直屏幕远离用户 * @param x 位置x坐标 @@ -299,7 +329,7 @@ export class AudioRoute * 开始播放这个音频 * @param when 从音频的什么时候开始播放,单位秒 */ - play(when?: number) { + play(when: number = 0) { if (this.source.playing) return; this.link(); if (this.effectRoute.length > 0) { @@ -430,3 +460,5 @@ export class AudioRoute this.effectRoute.forEach(v => v.end()); } } + +export const audioPlayer = new AudioPlayer(); diff --git a/src/module/audio/source.ts b/src/module/audio/source.ts index 168e3a7..d7406f8 100644 --- a/src/module/audio/source.ts +++ b/src/module/audio/source.ts @@ -80,23 +80,25 @@ export interface IAudioDecoder { * 解码流数据 * @param data 流数据 */ - decode(data: Uint8Array): Promise; + decode(data: Uint8Array): Promise; /** * 当音频解码完成后,会调用此函数,需要返回之前还未解析或未返回的音频数据。调用后,该解码器将不会被再次使用 */ - flush(): Promise; + flush(): Promise; } -const fileSignatures: Map = new Map([ - ['49 44 33', AudioType.Mp3], - ['4F 67 67 53', AudioType.Ogg], - ['52 49 46 46', AudioType.Wav], - ['66 4C 61 43', AudioType.Flac], - ['4F 70 75 73', AudioType.Opus], - ['FF F1', AudioType.Aac], - ['FF F9', AudioType.Aac] -]); +const fileSignatures: [AudioType, number[]][] = [ + [AudioType.Mp3, [0x49, 0x44, 0x33]], + [AudioType.Ogg, [0x4f, 0x67, 0x67, 0x53]], + [AudioType.Wav, [52, 0x49, 0x46, 0x46]], + [AudioType.Flac, [0x66, 0x4c, 0x61, 0x43]], + [AudioType.Aac, [0xff, 0xf1]], + [AudioType.Aac, [0xff, 0xf9]] +]; +const oggHeaders: [AudioType, number[]][] = [ + [AudioType.Opus, [0x4f, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64]] +]; const mimeTypeMap: Record = { [AudioType.Aac]: 'audio/aac', @@ -112,7 +114,8 @@ function isOggPage(data: any): data is OggPage { } export class AudioStreamSource extends AudioSource implements IStreamReader { - static readonly decoderMap: Map = new Map(); + static readonly decoderMap: Map IAudioDecoder> = + new Map(); output: AudioBufferSourceNode; /** 音频数据 */ @@ -138,6 +141,8 @@ export class AudioStreamSource extends AudioSource implements IStreamReader { /** 开始播放时刻 */ private lastStartTime: number = 0; + /** 上一次播放的缓存长度 */ + private lastBufferSamples: number = 0; /** 是否已经获取到头文件 */ private headerRecieved: boolean = false; @@ -152,12 +157,14 @@ export class AudioStreamSource extends AudioSource implements IStreamReader { /** 缓存音频数据,每 bufferChunkSize 秒钟组成一个 Float32Array,用于流式解码 */ private audioData: Float32Array[][] = []; + private errored: boolean = false; + /** * 注册一个解码器 * @param type 要注册的解码器允许解码的类型 * @param decoder 解码器对象 */ - static registerDecoder(type: AudioType, decoder: IAudioDecoder) { + static registerDecoder(type: AudioType, decoder: new () => IAudioDecoder) { if (this.decoderMap.has(type)) { logger.warn(47, type); return; @@ -184,42 +191,60 @@ export class AudioStreamSource extends AudioSource implements IStreamReader { } async pump(data: Uint8Array | undefined, done: boolean): Promise { - if (!data) return; + if (!data || this.errored) return; if (!this.headerRecieved) { - // 检查头文件获取音频类型 - const toCheck = [...data.slice(0, 16)]; - const hexArray = toCheck.map(v => v.toString(16).padStart(2, '0')); - const hex = hexArray.join(' '); - for (const [key, value] of fileSignatures) { - if (hex.startsWith(key)) { - this.audioType = value; + // 检查头文件获取音频类型,仅检查前256个字节 + const toCheck = data.slice(0, 256); + for (const [type, value] of fileSignatures) { + if (value.every((v, i) => toCheck[i] === v)) { + this.audioType = type; break; } } + if (this.audioType === AudioType.Ogg) { + // 如果是ogg的话,进一步判断是不是opus + for (const [key, value] of oggHeaders) { + const has = toCheck.some((_, i) => { + return value.every((v, ii) => toCheck[i + ii] === v); + }); + if (has) { + this.audioType = key; + break; + } + } + } if (!this.audioType) { - logger.error(25, hex); + logger.error( + 25, + [...toCheck] + .map(v => v.toString().padStart(2, '0')) + .join(' ') + .toUpperCase() + ); return; } // 创建解码器 - const decoder = AudioStreamSource.decoderMap.get(this.audioType); - this.decoder = decoder; - if (!decoder) { + const Decoder = AudioStreamSource.decoderMap.get(this.audioType); + if (!Decoder) { + this.errored = true; logger.error(24, this.audioType); return Promise.reject( `Cannot decode stream source type of '${this.audioType}', since there is no registered decoder for that type.` ); } + this.decoder = new Decoder(); // 创建数据解析器 const mime = mimeTypeMap[this.audioType]; const parser = new CodecParser(mime); this.parser = parser; - await decoder.create(); + await this.decoder.create(); this.headerRecieved = true; } const decoder = this.decoder; const parser = this.parser; if (!decoder || !parser) { + this.errored = true; return Promise.reject( 'No parser or decoder attached in this AudioStreamSource' ); @@ -234,13 +259,13 @@ export class AudioStreamSource extends AudioSource implements IStreamReader { * 检查采样率,如果还未解析出采样率,那么将设置采样率,如果当前采样率与之前不同,那么发出警告 */ private checkSampleRate(info: (OggPage | CodecFrame)[]) { - const first = info[0]; - if (first) { - const frame = isOggPage(first) ? first.codecFrames[0] : first; + for (const one of info) { + const frame = isOggPage(one) ? one.codecFrames[0] : one; if (frame) { const rate = frame.header.sampleRate; if (this.sampleRate === 0) { this.sampleRate = rate; + break; } else { if (rate !== this.sampleRate) { logger.warn(48); @@ -260,6 +285,7 @@ export class AudioStreamSource extends AudioSource implements IStreamReader { ) { // 解析音频数据 const audioData = await decoder.decode(data); + if (!audioData) return; // @ts-expect-error 库类型声明错误 const audioInfo = [...parser.parseChunk(data)] as ( | OggPage @@ -277,6 +303,7 @@ export class AudioStreamSource extends AudioSource implements IStreamReader { */ private async decodeFlushData(decoder: IAudioDecoder, parser: CodecParser) { const audioData = await decoder.flush(); + if (!audioData) return; // @ts-expect-error 库类型声明错误 const audioInfo = [...parser.flush()] as (OggPage | CodecFrame)[]; @@ -303,23 +330,33 @@ export class AudioStreamSource extends AudioSource implements IStreamReader { const chunk = this.sampleRate * this.bufferChunkSize; const sampled = this.bufferedSamples; const pushIndex = Math.floor(sampled / chunk); - const bufferIndex = sampled % (this.sampleRate * chunk); + const bufferIndex = sampled % chunk; const dataLength = data.channelData[0].length; - const restLength = chunk - bufferIndex; - // 把数据放入缓存 - for (let i = 0; i < channels; i++) { - const audioData = this.audioData[i]; - if (!audioData[pushIndex]) { - audioData.push(new Float32Array(chunk * this.sampleRate)); - } - audioData[pushIndex].set(data.channelData[i], bufferIndex); - if (restLength < dataLength) { - const nextData = new Float32Array(chunk * this.sampleRate); - nextData.set(data.channelData[i].slice(restLength), 0); - audioData.push(nextData); + let buffered = 0; + let nowIndex = pushIndex; + let toBuffer = bufferIndex; + while (buffered < dataLength) { + const rest = toBuffer !== 0 ? chunk - bufferIndex : chunk; + + for (let i = 0; i < channels; i++) { + const audioData = this.audioData[i]; + if (!audioData[nowIndex]) { + audioData.push(new Float32Array(chunk)); + } + const toPush = data.channelData[i].slice( + buffered, + buffered + rest + ); + + audioData[nowIndex].set(toPush, toBuffer); } + buffered += rest; + nowIndex++; + toBuffer = 0; } - this.buffered += info.reduce((prev, curr) => prev + curr.duration, 0); + + this.buffered += + info.reduce((prev, curr) => prev + curr.duration, 0) / 1000; this.bufferedSamples += info.reduce( (prev, curr) => prev + curr.samples, 0 @@ -330,71 +367,112 @@ export class AudioStreamSource extends AudioSource implements IStreamReader { * 检查已缓冲内容,并在未开始播放时播放 */ private checkBufferedPlay() { - if (this.playing || this.loaded) return; - const played = this.ac.currentTime - this.lastStartTime; + if (this.playing || this.sampleRate === 0) return; + const played = this.lastBufferSamples / this.sampleRate; const dt = this.buffered - played; + if (this.loaded) { + this.playAudio(played); + return; + } if (dt < this.bufferPlayDuration) return; + console.log(played, this.lastBufferSamples, this.sampleRate); + this.lastBufferSamples = this.bufferedSamples; // 需要播放 + this.mergeBuffers(); + if (!this.buffer) return; + if (this.playing) this.output.stop(); + this.createSourceNode(this.buffer); + this.output.loop = false; + this.output.start(0, played); + this.lastStartTime = this.ac.currentTime; + this.playing = true; + this.output.addEventListener('ended', () => { + this.playing = false; + this.checkBufferedPlay(); + }); + } + + private mergeBuffers() { const buffer = this.ac.createBuffer( this.audioData.length, this.bufferedSamples, this.sampleRate ); - this.buffer = buffer; const chunk = this.sampleRate * this.bufferChunkSize; - const bufferedChunks = Math.floor(this.buffered / chunk); - const restLength = this.buffered % chunk; + const bufferedChunks = Math.floor(this.bufferedSamples / chunk); + const restLength = this.bufferedSamples % chunk; for (let i = 0; i < this.audioData.length; i++) { const audio = this.audioData[i]; const data = new Float32Array(this.bufferedSamples); for (let j = 0; j < bufferedChunks; j++) { data.set(audio[j], chunk * j); } - if (restLength !== 0) data.set(audio[bufferedChunks], 0); + if (restLength !== 0) { + data.set( + audio[bufferedChunks].slice(0, restLength), + chunk * bufferedChunks + ); + } + buffer.copyToChannel(data, i, 0); } - this.createSourceNode(buffer); - this.output.start(played); - this.lastStartTime = this.ac.currentTime; - this.output.addEventListener('ended', () => { - this.checkBufferedPlay(); - }); + this.buffer = buffer; } - private mergeBuffers() {} - async start() { delete this.buffer; this.headerRecieved = false; this.audioType = ''; + this.errored = false; + this.buffered = 0; + this.sampleRate = 0; + this.bufferedSamples = 0; + this.duration = 0; + this.loaded = false; + if (this.playing) this.output.stop(); + this.playing = false; + this.lastStartTime = this.ac.currentTime; } end(done: boolean, reason?: string): void { - if (done) { + if (done && this.buffer) { this.loaded = true; delete this.controller; this.mergeBuffers(); - const played = this.ac.currentTime - this.lastStartTime; - this.output.stop(); - this.play(played); + // const played = this.lastBufferSamples / this.sampleRate; + // this.playAudio(played); + this.duration = this.buffered; + this.audioData = []; + this.decoder?.destroy(); + delete this.decoder; + delete this.parser; } else { logger.warn(44, reason ?? ''); } } + private playAudio(when?: number) { + if (!this.buffer) return; + this.lastStartTime = this.ac.currentTime; + if (this.playing) this.output.stop(); + this.emit('play'); + this.createSourceNode(this.buffer); + this.output.start(0, when); + this.playing = true; + console.log(when); + + this.output.addEventListener('ended', () => { + this.playing = false; + this.emit('end'); + if (this.loop && !this.output.loop) this.play(0); + }); + } + play(when?: number): void { - if (this.playing) return; + if (this.playing || this.errored) return; if (this.loaded && this.buffer) { this.playing = true; - this.lastStartTime = this.ac.currentTime; - this.emit('play'); - this.createSourceNode(this.buffer); - this.output.start(when); - this.output.addEventListener('ended', () => { - this.playing = false; - this.emit('end'); - if (this.loop && !this.output.loop) this.play(0); - }); + this.playAudio(when); } else { this.controller?.start(); } @@ -404,13 +482,16 @@ export class AudioStreamSource extends AudioSource implements IStreamReader { if (!this.target) return; const node = this.ac.createBufferSource(); node.buffer = buffer; + if (this.playing) this.output.stop(); + this.playing = false; this.output = node; node.connect(this.target.input); node.loop = this.loop; } stop(): number { - this.output.stop(); + if (this.playing) this.output.stop(); + this.playing = false; return this.ac.currentTime - this.lastStartTime; } @@ -453,7 +534,7 @@ export class AudioElementSource extends AudioSource { this.audio.src = url; } - play(when: number): void { + play(when: number = 0): void { if (this.playing) return; this.audio.currentTime = when; this.audio.play(); @@ -510,7 +591,7 @@ export class AudioBufferSource extends AudioSource { this.lastStartTime = this.ac.currentTime; this.emit('play'); this.createSourceNode(this.buffer); - this.output.start(when); + this.output.start(0, when); this.output.addEventListener('ended', () => { this.playing = false; this.emit('end'); diff --git a/src/module/audio/support.ts b/src/module/audio/support.ts index 0a1d369..64240ba 100644 --- a/src/module/audio/support.ts +++ b/src/module/audio/support.ts @@ -25,21 +25,21 @@ export function isAudioSupport(type: AudioType): boolean { } } -const typeMap = new Map([ - ['ogg', 'audio/ogg; codecs="vorbis"'], - ['mp3', 'audio/mpeg'], - ['wav', 'audio/wav; codecs="1"'], - ['flac', 'audio/flac'], - ['opus', 'audio/ogg; codecs="opus"'], - ['aac', 'audio/aac'] +const typeMap = new Map([ + ['ogg', AudioType.Ogg], + ['mp3', AudioType.Mp3], + ['wav', AudioType.Wav], + ['flac', AudioType.Flac], + ['opus', AudioType.Opus], + ['aac', AudioType.Aac] ]); /** * 根据文件名拓展猜测其类型 * @param file 文件名 */ -export function guessTypeByExt(file: string) { - const ext = /\.[a-zA-Z]$/.exec(file); +export function guessTypeByExt(file: string): AudioType | '' { + const ext = /\.[a-zA-Z\d]+$/.exec(file); if (!ext?.[0]) return ''; const type = ext[0].slice(1); return typeMap.get(type.toLocaleLowerCase()) ?? ''; diff --git a/src/module/loader/stream.ts b/src/module/loader/stream.ts index 63e8a77..2089021 100644 --- a/src/module/loader/stream.ts +++ b/src/module/loader/stream.ts @@ -82,6 +82,7 @@ export class StreamLoader return; } this.target.add(reader); + reader.piped(this); return this; } @@ -98,25 +99,21 @@ export class StreamLoader this.stream = stream; const reader = response.body?.getReader(); const targets = [...this.target]; - try { - await Promise.all( - targets.map(v => v.start(stream, this, response)) - ); + // try { + await Promise.all(targets.map(v => v.start(stream, this, response))); - // 开始流传输 - while (true) { - const { value, done } = await reader.read(); - await Promise.all( - targets.map(v => v.pump(value, done, response)) - ); - if (done) break; - } - - this.loading = false; - targets.forEach(v => v.end(true)); - } catch (e) { - logger.error(26, this.url, String(e)); + // 开始流传输 + while (true) { + const { value, done } = await reader.read(); + await Promise.all(targets.map(v => v.pump(value, done, response))); + if (done) break; } + + this.loading = false; + targets.forEach(v => v.end(true)); + // } catch (e) { + // logger.error(26, this.url, String(e)); + // } } cancel(reason?: string) { diff --git a/src/source/data.d.ts b/src/source/data.d.ts index b79325b..3bb17fa 100644 --- a/src/source/data.d.ts +++ b/src/source/data.d.ts @@ -210,13 +210,14 @@ type SoundIds = | 'zone.mp3' type BgmIds = - | 'beforeBoss.mp3' + | 'beforeBoss.opus' | 'cave.mp3' | 'escape.mp3' | 'escape2.mp3' | 'grass.mp3' - | 'mount.mp3' + | 'mount.opus' | 'night.mp3' + | 'output6.ogg' | 'palaceCenter.mp3' | 'palaceNorth.mp3' | 'palaceSouth.mp3'