feat: ogg opus 解码器

This commit is contained in:
unanmed 2025-01-15 17:22:53 +08:00
parent 5265b0a90e
commit 88c5e39f5c
8 changed files with 417 additions and 39 deletions

View File

@ -22,6 +22,7 @@
"ant-design-vue": "^3.2.20",
"axios": "^1.7.4",
"chart.js": "^4.4.3",
"codec-parser": "^2.5.0",
"eventemitter3": "^5.0.1",
"gl-matrix": "^3.4.3",
"gsap": "^3.12.5",

View File

@ -32,6 +32,9 @@ importers:
chart.js:
specifier: ^4.4.3
version: 4.4.3
codec-parser:
specifier: ^2.5.0
version: 2.5.0
eventemitter3:
specifier: ^5.0.1
version: 5.0.1

View File

@ -23,6 +23,9 @@
"21": "Incorrect render prop type is delivered. key: '$1', expected type: '$2', delivered type: '$3'",
"22": "Incorrect props for custom tag. Please ensure you have delivered 'item' prop and other required props.",
"23": "Cannot get reader when fetching '$1'.",
"24": "Cannot decode stream source type of '$1', since there is no registered decoder for that type.",
"25": "Unknown audio type. Header: '$1'",
"26": "Uncaught error when fetching stream data from '$1'. Error info: $2.",
"1101": "Shadow extension needs 'floor-hero' extension as dependency.",
"1201": "Floor-damage extension needs 'floor-binder' extension as dependency.",
"1301": "Portal extension need 'floor-binder' extension as dependency.",
@ -74,6 +77,9 @@
"43": "Cannot set icon of '$1', since it does not exists. Please ensure you have delivered correct icon id or number.",
"44": "Unexpected end when loading stream audio, reason: '$1'",
"45": "Audio route with id of '$1' has already existed. New route will override old route.",
"46": "Cannot pipe new StreamReader object when stream is loading.",
"47": "Audio stream decoder for audio type '$1' has already existed.",
"48": "Sample rate in stream audio must be constant.",
"1001": "Item-detail extension needs 'floor-binder' and 'floor-damage' extension as dependency.",
"1101": "Cannot add new effect to point effect instance, for there's no more reserve space for it. Please increase the max count of the instance."
}

View File

@ -192,8 +192,8 @@ export class DelayEffect extends AudioEffect {
}
export class EchoEffect extends AudioEffect {
output: DelayNode;
input: DelayNode;
output: GainNode;
input: GainNode;
/** 延迟节点 */
private readonly delay: DelayNode;
@ -207,8 +207,6 @@ export class EchoEffect extends AudioEffect {
constructor(ac: AudioContext) {
super(ac);
const delay = ac.createDelay();
this.input = delay;
this.output = delay;
const gain = ac.createGain();
gain.gain.value = 0.5;
delay.delayTime.value = 0.05;
@ -216,6 +214,8 @@ export class EchoEffect extends AudioEffect {
gain.connect(delay);
this.delay = delay;
this.gainNode = gain;
this.input = gain;
this.output = gain;
}
/**

View File

@ -100,6 +100,11 @@ export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
/**
*
* ```txt
* |----------|
* Input ----> | GainNode | ----> Output
* |----------|
* ```
*/
createVolumeEffect() {
return new VolumeEffect(this.ac);
@ -107,6 +112,11 @@ export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
/**
*
* ```txt
* |------------|
* Input ----> | PannerNode | ----> Output
* |------------|
* ```
*/
createStereoEffect() {
return new StereoEffect(this.ac);
@ -114,6 +124,15 @@ export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
/**
*
* ```txt
* |----------|
* -> | GainNode | \
* |--------------| / |----------| -> |------------|
* Input ----> | SplitterNode | ...... | MergerNode | ----> Output
* |--------------| \ |----------| -> |------------|
* -> | GainNode | /
* |----------|
* ```
*/
createChannelVolumeEffect() {
return new ChannelVolumeEffect(this.ac);
@ -121,6 +140,15 @@ export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
/**
*
* ```txt
* |----------|
* Input ----> | GainNode | ----> Output
* ^ |----------| |
* | |
* | |------------|
* |-- | Delay Node | <--
* |------------|
* ```
*/
createEchoEffect() {
return new EchoEffect(this.ac);

View File

@ -2,6 +2,9 @@ import EventEmitter from 'eventemitter3';
import { IStreamController, IStreamReader } from '../loader';
import { IAudioInput, IAudioOutput } from './effect';
import { logger } from '@/core/common/logger';
import { AudioType } from './support';
import CodecParser, { CodecFrame, MimeType, OggPage } from 'codec-parser';
import { isNil } from 'lodash-es';
interface AudioSourceEvent {
play: [];
@ -46,7 +49,70 @@ export abstract class AudioSource
abstract setLoop(loop: boolean): void;
}
export interface IAudioDecodeError {
/** 错误信息 */
message: string;
}
export interface IAudioDecodeData {
/** 每个声道的音频信息 */
channelData: Float32Array[];
/** 已经被解码的 PCM 采样数 */
samplesDecoded: number;
/** 音频采样率 */
sampleRate: number;
/** 解码错误信息 */
errors: IAudioDecodeError[];
}
export interface IAudioDecoder {
/**
*
*/
create(): Promise<void>;
/**
*
*/
destroy(): void;
/**
*
* @param data
*/
decode(data: Uint8Array): Promise<IAudioDecodeData>;
/**
* 使
*/
flush(): Promise<IAudioDecodeData>;
}
const fileSignatures: Map<string, AudioType> = new Map([
['49 44 33', AudioType.Mp3],
['4F 67 67 53', AudioType.Ogg],
['52 49 46 46', AudioType.Wav],
['66 4C 61 43', AudioType.Flac],
['4F 70 75 73', AudioType.Opus],
['FF F1', AudioType.Aac],
['FF F9', AudioType.Aac]
]);
const mimeTypeMap: Record<AudioType, MimeType> = {
[AudioType.Aac]: 'audio/aac',
[AudioType.Flac]: 'audio/flac',
[AudioType.Mp3]: 'audio/mpeg',
[AudioType.Ogg]: 'application/ogg',
[AudioType.Opus]: 'application/ogg',
[AudioType.Wav]: 'application/ogg'
};
function isOggPage(data: any): data is OggPage {
return !isNil(data.isFirstPage);
}
export class AudioStreamSource extends AudioSource implements IStreamReader {
static readonly decoderMap: Map<AudioType, IAudioDecoder> = new Map();
output: AudioBufferSourceNode;
/** 音频数据 */
@ -54,34 +120,263 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
/** 是否已经完全加载完毕 */
loaded: boolean = false;
/** 已经缓冲了多长时间,如果缓冲完那么跟歌曲时长一致 */
buffered: number = 0;
/** 已经缓冲的采样点数量 */
bufferedSamples: number = 0;
/** 歌曲时长,加载完毕之前保持为 0 */
duration: number = 0;
/** 在流传输阶段,至少缓冲多长时间的音频之后才开始播放,单位秒 */
bufferPlayDuration: number = 1;
/** 音频的采样率,未成功解析出之前保持为 0 */
sampleRate: number = 0;
private controller?: IStreamController;
private loop: boolean = false;
private target?: IAudioInput;
/** 开始播放时刻 */
private lastStartTime: number = 0;
/** 是否已经获取到头文件 */
private headerRecieved: boolean = false;
/** 音频类型 */
private audioType: AudioType | '' = '';
/** 音频解码器 */
private decoder?: IAudioDecoder;
/** 音频解析器 */
private parser?: CodecParser;
/** 每多长时间组成一个缓存 Float32Array */
private bufferChunkSize = 10;
/** 缓存音频数据,每 bufferChunkSize 秒钟组成一个 Float32Array用于流式解码 */
private audioData: Float32Array[][] = [];
/**
*
* @param type
* @param decoder
*/
static registerDecoder(type: AudioType, decoder: IAudioDecoder) {
if (this.decoderMap.has(type)) {
logger.warn(47, type);
return;
}
this.decoderMap.set(type, decoder);
}
constructor(context: AudioContext) {
super(context);
this.output = context.createBufferSource();
}
/**
* 10
* @param size
*/
setChunkSize(size: number) {
if (this.controller?.loading || this.loaded) return;
this.bufferChunkSize = size;
}
piped(controller: IStreamController): void {
this.controller = controller;
}
pump(data: Uint8Array | undefined): void {
async pump(data: Uint8Array | undefined, done: boolean): Promise<void> {
if (!data) return;
if (!this.headerRecieved) {
// 检查头文件获取音频类型
const toCheck = [...data.slice(0, 16)];
const hexArray = toCheck.map(v => v.toString(16).padStart(2, '0'));
const hex = hexArray.join(' ');
for (const [key, value] of fileSignatures) {
if (hex.startsWith(key)) {
this.audioType = value;
break;
}
}
if (!this.audioType) {
logger.error(25, hex);
return;
}
// 创建解码器
const decoder = AudioStreamSource.decoderMap.get(this.audioType);
this.decoder = decoder;
if (!decoder) {
logger.error(24, this.audioType);
return Promise.reject(
`Cannot decode stream source type of '${this.audioType}', since there is no registered decoder for that type.`
);
}
// 创建数据解析器
const mime = mimeTypeMap[this.audioType];
const parser = new CodecParser(mime);
this.parser = parser;
await decoder.create();
this.headerRecieved = true;
}
start(): void {
const decoder = this.decoder;
const parser = this.parser;
if (!decoder || !parser) {
return Promise.reject(
'No parser or decoder attached in this AudioStreamSource'
);
}
await this.decodeData(data, decoder, parser);
if (done) await this.decodeFlushData(decoder, parser);
this.checkBufferedPlay();
}
/**
*
*/
private checkSampleRate(info: (OggPage | CodecFrame)[]) {
const first = info[0];
if (first) {
const frame = isOggPage(first) ? first.codecFrames[0] : first;
if (frame) {
const rate = frame.header.sampleRate;
if (this.sampleRate === 0) {
this.sampleRate = rate;
} else {
if (rate !== this.sampleRate) {
logger.warn(48);
}
}
}
}
}
/**
*
*/
private async decodeData(
data: Uint8Array,
decoder: IAudioDecoder,
parser: CodecParser
) {
// 解析音频数据
const audioData = await decoder.decode(data);
// @ts-expect-error 库类型声明错误
const audioInfo = [...parser.parseChunk(data)] as (
| OggPage
| CodecFrame
)[];
// 检查采样率
this.checkSampleRate(audioInfo);
// 追加音频数据
this.appendDecodedData(audioData, audioInfo);
}
/**
*
*/
private async decodeFlushData(decoder: IAudioDecoder, parser: CodecParser) {
const audioData = await decoder.flush();
// @ts-expect-error 库类型声明错误
const audioInfo = [...parser.flush()] as (OggPage | CodecFrame)[];
this.checkSampleRate(audioInfo);
this.appendDecodedData(audioData, audioInfo);
}
/**
*
*/
private appendDecodedData(
data: IAudioDecodeData,
info: (CodecFrame | OggPage)[]
) {
const channels = data.channelData.length;
if (channels === 0) return;
if (this.audioData.length !== channels) {
this.audioData = [];
for (let i = 0; i < channels; i++) {
this.audioData.push([]);
}
}
// 计算出应该放在哪
const chunk = this.sampleRate * this.bufferChunkSize;
const sampled = this.bufferedSamples;
const pushIndex = Math.floor(sampled / chunk);
const bufferIndex = sampled % (this.sampleRate * chunk);
const dataLength = data.channelData[0].length;
const restLength = chunk - bufferIndex;
// 把数据放入缓存
for (let i = 0; i < channels; i++) {
const audioData = this.audioData[i];
if (!audioData[pushIndex]) {
audioData.push(new Float32Array(chunk * this.sampleRate));
}
audioData[pushIndex].set(data.channelData[i], bufferIndex);
if (restLength < dataLength) {
const nextData = new Float32Array(chunk * this.sampleRate);
nextData.set(data.channelData[i].slice(restLength), 0);
audioData.push(nextData);
}
}
this.buffered += info.reduce((prev, curr) => prev + curr.duration, 0);
this.bufferedSamples += info.reduce(
(prev, curr) => prev + curr.samples,
0
);
}
/**
*
*/
private checkBufferedPlay() {
if (this.playing || this.loaded) return;
const played = this.ac.currentTime - this.lastStartTime;
const dt = this.buffered - played;
if (dt < this.bufferPlayDuration) return;
// 需要播放
const buffer = this.ac.createBuffer(
this.audioData.length,
this.bufferedSamples,
this.sampleRate
);
this.buffer = buffer;
const chunk = this.sampleRate * this.bufferChunkSize;
const bufferedChunks = Math.floor(this.buffered / chunk);
const restLength = this.buffered % chunk;
for (let i = 0; i < this.audioData.length; i++) {
const audio = this.audioData[i];
const data = new Float32Array(this.bufferedSamples);
for (let j = 0; j < bufferedChunks; j++) {
data.set(audio[j], chunk * j);
}
if (restLength !== 0) data.set(audio[bufferedChunks], 0);
buffer.copyToChannel(data, i, 0);
}
this.createSourceNode(buffer);
this.output.start(played);
this.lastStartTime = this.ac.currentTime;
this.output.addEventListener('ended', () => {
this.checkBufferedPlay();
});
}
private mergeBuffers() {}
async start() {
delete this.buffer;
this.headerRecieved = false;
this.audioType = '';
}
end(done: boolean, reason?: string): void {
if (done) {
this.loaded = true;
delete this.controller;
this.mergeBuffers();
const played = this.ac.currentTime - this.lastStartTime;
this.output.stop();
this.play(played);
} else {
logger.warn(44, reason ?? '');
}
@ -93,29 +388,34 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
this.playing = true;
this.lastStartTime = this.ac.currentTime;
this.emit('play');
this.createSourceNode(this.buffer);
this.output.start(when);
this.output.addEventListener('ended', () => {
this.playing = false;
this.emit('end');
if (this.loop) this.play(0);
if (this.loop && !this.output.loop) this.play(0);
});
} else {
this.controller?.start();
}
}
private createSourceNode(buffer: AudioBuffer) {
if (!this.target) return;
const node = this.ac.createBufferSource();
node.buffer = buffer;
this.output = node;
node.connect(this.target.input);
node.loop = this.loop;
}
stop(): number {
this.output.stop();
return this.ac.currentTime - this.lastStartTime;
}
connect(target: IAudioInput): void {
if (!this.buffer) return;
const node = this.ac.createBufferSource();
node.buffer = this.buffer;
this.output = node;
node.connect(target.input);
node.loop = this.loop;
this.target = target;
}
setLoop(loop: boolean): void {
@ -185,6 +485,7 @@ export class AudioBufferSource extends AudioSource {
/** 播放开始时刻 */
private lastStartTime: number = 0;
private target?: IAudioInput;
constructor(context: AudioContext) {
super(context);
@ -204,28 +505,35 @@ export class AudioBufferSource extends AudioSource {
}
play(when?: number): void {
if (this.playing) return;
if (this.playing || !this.buffer) return;
this.playing = true;
this.lastStartTime = this.ac.currentTime;
this.emit('play');
this.createSourceNode(this.buffer);
this.output.start(when);
this.output.addEventListener('ended', () => {
this.playing = false;
this.emit('end');
if (this.loop) this.play(0);
if (this.loop && !this.output.loop) this.play(0);
});
}
private createSourceNode(buffer: AudioBuffer) {
if (!this.target) return;
const node = this.ac.createBufferSource();
node.buffer = buffer;
this.output = node;
node.connect(this.target.input);
node.loop = this.loop;
}
stop(): number {
this.output.stop();
return this.ac.currentTime - this.lastStartTime;
}
connect(target: IAudioInput): void {
if (!this.buffer) return;
const node = this.ac.createBufferSource();
node.buffer = this.buffer;
node.connect(target.input);
this.target = target;
}
setLoop(loop: boolean): void {

View File

@ -2,11 +2,20 @@ const audio = new Audio();
const supportMap = new Map<string, boolean>();
export const enum AudioType {
Mp3 = 'audio/mpeg',
Wav = 'audio/wav; codecs="1"',
Flac = 'audio/flac',
Opus = 'audio/ogg; codecs="opus"',
Ogg = 'audio/ogg; codecs="vorbis"',
Aac = 'audio/aac'
}
/**
*
* @param type
*/
export function isAudioSupport(type: string): boolean {
export function isAudioSupport(type: AudioType): boolean {
if (supportMap.has(type)) return supportMap.get(type)!;
else {
const support = audio.canPlayType(type);
@ -22,7 +31,7 @@ const typeMap = new Map<string, string>([
['wav', 'audio/wav; codecs="1"'],
['flac', 'audio/flac'],
['opus', 'audio/ogg; codecs="opus"'],
['acc', 'audio/acc']
['aac', 'audio/aac']
]);
/**
@ -36,11 +45,11 @@ export function guessTypeByExt(file: string) {
return typeMap.get(type.toLocaleLowerCase()) ?? '';
}
isAudioSupport('audio/ogg; codecs="vorbis"');
isAudioSupport('audio/mpeg');
isAudioSupport('audio/wav; codecs="1"');
isAudioSupport('audio/flac');
isAudioSupport('audio/ogg; codecs="opus"');
isAudioSupport('audio/acc');
isAudioSupport(AudioType.Ogg);
isAudioSupport(AudioType.Mp3);
isAudioSupport(AudioType.Wav);
isAudioSupport(AudioType.Flac);
isAudioSupport(AudioType.Opus);
isAudioSupport(AudioType.Aac);
console.log(supportMap);

View File

@ -2,6 +2,8 @@ import { logger } from '@/core/common/logger';
import EventEmitter from 'eventemitter3';
export interface IStreamController<T = void> {
readonly loading: boolean;
/**
*
*/
@ -20,7 +22,11 @@ export interface IStreamReader<T = any> {
* @param data
* @param done
*/
pump(data: Uint8Array | undefined, done: boolean): void;
pump(
data: Uint8Array | undefined,
done: boolean,
response: Response
): Promise<void>;
/**
*
@ -33,7 +39,11 @@ export interface IStreamReader<T = any> {
* @param stream
* @param controller
*/
start(stream: ReadableStream, controller: IStreamController<T>): void;
start(
stream: ReadableStream,
controller: IStreamController<T>,
response: Response
): Promise<void>;
/**
*
@ -56,7 +66,7 @@ export class StreamLoader
/** 读取流对象 */
private stream?: ReadableStream;
private loading: boolean = false;
loading: boolean = false;
constructor(public readonly url: string) {
super();
@ -67,6 +77,10 @@ export class StreamLoader
* @param reader
*/
pipe(reader: IStreamReader) {
if (this.loading) {
logger.warn(46);
return;
}
this.target.add(reader);
return this;
}
@ -83,17 +97,26 @@ export class StreamLoader
// 获取读取器
this.stream = stream;
const reader = response.body?.getReader();
this.target.forEach(v => v.start(stream, this));
const targets = [...this.target];
try {
await Promise.all(
targets.map(v => v.start(stream, this, response))
);
// 开始流传输
while (true) {
const { value, done } = await reader.read();
this.target.forEach(v => v.pump(value, done));
await Promise.all(
targets.map(v => v.pump(value, done, response))
);
if (done) break;
}
this.loading = false;
this.target.forEach(v => v.end(true));
targets.forEach(v => v.end(true));
} catch (e) {
logger.error(26, this.url, String(e));
}
}
cancel(reason?: string) {