refactor: 音频系统

This commit is contained in:
unanmed 2026-03-17 19:34:30 +08:00
parent 191ba8d1db
commit ee54148558
29 changed files with 2170 additions and 1273 deletions

View File

@ -1,6 +1,7 @@
{
"name": "@user/client-base",
"dependencies": {
"@motajs/audio": "workspace:*",
"@motajs/render": "workspace:*",
"@motajs/client-base": "workspace:*"
}

View File

@ -5,3 +5,5 @@ export function create() {
}
export * from './material';
export * from './ins';

View File

@ -0,0 +1,8 @@
import { BGMPlayer, MotaAudioContext, SoundPlayer } from '@motajs/audio';
/** 游戏全局音频上下文 */
export const audioContext = new MotaAudioContext();
/** 音效播放器 */
export const soundPlayer = new SoundPlayer(audioContext);
/** 音乐播放器 */
export const bgmPlayer = new BGMPlayer(audioContext);

View File

@ -1,268 +0,0 @@
import EventEmitter from 'eventemitter3';
import { audioPlayer, AudioPlayer, AudioRoute, AudioStatus } from './player';
import { guessTypeByExt, isAudioSupport } from './support';
import { logger } from '@motajs/common';
import { StreamLoader } from '../loader';
import { linear, sleep, Transition } from 'mutate-animate';
import { VolumeEffect } from './effect';
interface BgmVolume {
effect: VolumeEffect;
transition: Transition;
}
interface BgmControllerEvent {
play: [];
pause: [];
resume: [];
stop: [];
}
export class BgmController<
T extends string = BgmIds
> extends EventEmitter<BgmControllerEvent> {
/** bgm音频名称的前缀 */
prefix: string = 'bgms.';
/** 每个 bgm 的音量控制器 */
readonly gain: Map<T, BgmVolume> = new Map();
/** 正在播放的 bgm */
playingBgm?: T;
/** 是否正在播放 */
playing: boolean = false;
/** 是否已经启用 */
enabled: boolean = true;
/** 主音量控制器 */
private readonly mainGain: VolumeEffect;
/** 是否屏蔽所有的音乐切换 */
private blocking: boolean = false;
/** 渐变时长 */
private transitionTime: number = 2000;
constructor(public readonly player: AudioPlayer) {
super();
this.mainGain = player.createVolumeEffect();
}
/**
*
* @param time
*/
setTransitionTime(time: number) {
this.transitionTime = time;
for (const [, value] of this.gain) {
value.transition.time(time);
}
}
/**
*
*/
blockChange() {
this.blocking = true;
}
/**
*
*/
unblockChange() {
this.blocking = false;
}
/**
*
* @param volume
*/
setVolume(volume: number) {
this.mainGain.setVolume(volume);
}
/**
*
*/
getVolume() {
return this.mainGain.getVolume();
}
/**
*
* @param enabled
*/
setEnabled(enabled: boolean) {
if (enabled) this.resume();
else this.stop();
this.enabled = enabled;
}
/**
* bgm
*/
setPrefix(prefix: string) {
this.prefix = prefix;
}
private getId(name: T) {
return `${this.prefix}${name}`;
}
/**
* bgm AudioRoute
* @param id
*/
get(id: T) {
return this.player.getRoute(this.getId(id));
}
/**
* bgm
* @param id bgm
* @param url bgm
*/
addBgm(id: T, url: string = `project/bgms/${id}`) {
const type = guessTypeByExt(id);
if (!type) {
logger.warn(50, id.split('.').slice(0, -1).join('.'));
return;
}
const gain = this.player.createVolumeEffect();
if (isAudioSupport(type)) {
const source = audioPlayer.createElementSource();
source.setSource(url);
source.setLoop(true);
const route = new AudioRoute(source, audioPlayer);
route.addEffect([gain, this.mainGain]);
audioPlayer.addRoute(this.getId(id), route);
this.setTransition(id, route, gain);
} else {
const source = audioPlayer.createStreamSource();
const stream = new StreamLoader(url);
stream.pipe(source);
source.setLoop(true);
const route = new AudioRoute(source, audioPlayer);
route.addEffect([gain, this.mainGain]);
audioPlayer.addRoute(this.getId(id), route);
this.setTransition(id, route, gain);
}
}
/**
* bgm
* @param id bgm
*/
removeBgm(id: T) {
this.player.removeRoute(this.getId(id));
const gain = this.gain.get(id);
gain?.transition.ticker.destroy();
this.gain.delete(id);
}
private setTransition(id: T, route: AudioRoute, gain: VolumeEffect) {
const transition = new Transition();
transition
.time(this.transitionTime)
.mode(linear())
.transition('volume', 0);
const tick = () => {
gain.setVolume(transition.value.volume);
};
/**
* @param expect
*/
const setTick = async (expect: AudioStatus) => {
transition.ticker.remove(tick);
transition.ticker.add(tick);
const identifier = route.stopIdentifier;
await sleep(this.transitionTime + 500);
if (
route.status === expect &&
identifier === route.stopIdentifier
) {
transition.ticker.remove(tick);
if (route.status === AudioStatus.Playing) {
gain.setVolume(1);
} else {
gain.setVolume(0);
}
}
};
route.onStart(async () => {
transition.transition('volume', 1);
setTick(AudioStatus.Playing);
});
route.onEnd(() => {
transition.transition('volume', 0);
setTick(AudioStatus.Paused);
});
route.setEndTime(this.transitionTime);
this.gain.set(id, { effect: gain, transition });
}
/**
* bgm
* @param id bgm
*/
play(id: T, when?: number) {
if (this.blocking) return;
if (id !== this.playingBgm && this.playingBgm) {
this.player.pause(this.getId(this.playingBgm));
}
this.playingBgm = id;
if (!this.enabled) return;
this.player.play(this.getId(id), when);
this.playing = true;
this.emit('play');
}
/**
* bgm
*/
resume() {
if (this.blocking || !this.enabled || this.playing) return;
if (this.playingBgm) {
this.player.resume(this.getId(this.playingBgm));
}
this.playing = true;
this.emit('resume');
}
/**
* bgm
*/
pause() {
if (this.blocking || !this.enabled) return;
if (this.playingBgm) {
this.player.pause(this.getId(this.playingBgm));
}
this.playing = false;
this.emit('pause');
}
/**
* bgm
*/
stop() {
if (this.blocking || !this.enabled) return;
if (this.playingBgm) {
this.player.stop(this.getId(this.playingBgm));
}
this.playing = false;
this.emit('stop');
}
}
export const bgmController = new BgmController<BgmIds>(audioPlayer);
export function loadAllBgm() {
const { loading } = Mota.require('@user/data-base');
loading.once('coreInit', () => {
const data = data_a1e2fb4a_e986_4524_b0da_9b7ba7c0874d;
for (const bgm of data.main.bgms) {
bgmController.addBgm(bgm);
}
});
}

View File

@ -1,203 +0,0 @@
import { logger } from '@motajs/common';
import { OggVorbisDecoderWebWorker } from '@wasm-audio-decoders/ogg-vorbis';
import { OggOpusDecoderWebWorker } from 'ogg-opus-decoder';
import { AudioType, isAudioSupport } from './support';
import type { AudioPlayer } from './player';
const fileSignatures: [AudioType, number[]][] = [
[AudioType.Mp3, [0x49, 0x44, 0x33]],
[AudioType.Ogg, [0x4f, 0x67, 0x67, 0x53]],
[AudioType.Wav, [0x52, 0x49, 0x46, 0x46]],
[AudioType.Flac, [0x66, 0x4c, 0x61, 0x43]],
[AudioType.Aac, [0xff, 0xf1]],
[AudioType.Aac, [0xff, 0xf9]]
];
const oggHeaders: [AudioType, number[]][] = [
[AudioType.Opus, [0x4f, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64]]
];
export function checkAudioType(data: Uint8Array) {
let audioType: AudioType | '' = '';
// 检查头文件获取音频类型仅检查前256个字节
const toCheck = data.slice(0, 256);
for (const [type, value] of fileSignatures) {
if (value.every((v, i) => toCheck[i] === v)) {
audioType = type;
break;
}
}
if (audioType === AudioType.Ogg) {
// 如果是ogg的话进一步判断是不是opus
for (const [key, value] of oggHeaders) {
const has = toCheck.some((_, i) => {
return value.every((v, ii) => toCheck[i + ii] === v);
});
if (has) {
audioType = key;
break;
}
}
}
return audioType;
}
export interface IAudioDecodeError {
/** 错误信息 */
message: string;
}
export interface IAudioDecodeData {
/** 每个声道的音频信息 */
channelData: Float32Array<ArrayBuffer>[];
/** 已经被解码的 PCM 采样数 */
samplesDecoded: number;
/** 音频采样率 */
sampleRate: number;
/** 解码错误信息 */
errors: IAudioDecodeError[];
}
export abstract class AudioDecoder {
static readonly decoderMap: Map<AudioType, new () => AudioDecoder> =
new Map();
/**
*
* @param type
* @param decoder
*/
static registerDecoder(type: AudioType, decoder: new () => AudioDecoder) {
if (this.decoderMap.has(type)) {
logger.warn(47, type);
return;
}
this.decoderMap.set(type, decoder);
}
/**
*
* @param data
* @param player AudioPlayer实例
*/
static async decodeAudioData(data: Uint8Array, player: AudioPlayer) {
// 检查头文件获取音频类型仅检查前256个字节
const toCheck = data.slice(0, 256);
const type = checkAudioType(data);
if (type === '') {
logger.error(
25,
[...toCheck]
.map(v => v.toString(16).padStart(2, '0'))
.join(' ')
.toUpperCase()
);
return null;
}
if (isAudioSupport(type)) {
if (data.buffer instanceof ArrayBuffer) {
return player.ac.decodeAudioData(data.buffer);
} else {
return null;
}
} else {
const Decoder = this.decoderMap.get(type);
if (!Decoder) {
return null;
} else {
const decoder = new Decoder();
await decoder.create();
const decodedData = await decoder.decodeAll(data);
if (!decodedData) return null;
const buffer = player.ac.createBuffer(
decodedData.channelData.length,
decodedData.channelData[0].length,
decodedData.sampleRate
);
decodedData.channelData.forEach((v, i) => {
buffer.copyToChannel(v, i);
});
decoder.destroy();
return buffer;
}
}
}
/**
*
*/
abstract create(): Promise<void>;
/**
*
*/
abstract destroy(): void;
/**
*
* @param data
*/
abstract decode(data: Uint8Array): Promise<IAudioDecodeData | undefined>;
/**
*
* @param data
*/
abstract decodeAll(data: Uint8Array): Promise<IAudioDecodeData | undefined>;
/**
* 使
*/
abstract flush(): Promise<IAudioDecodeData | undefined>;
}
export class VorbisDecoder extends AudioDecoder {
decoder?: OggVorbisDecoderWebWorker;
async create(): Promise<void> {
this.decoder = new OggVorbisDecoderWebWorker();
await this.decoder.ready;
}
destroy(): void {
this.decoder?.free();
}
async decode(data: Uint8Array): Promise<IAudioDecodeData | undefined> {
return this.decoder?.decode(data) as Promise<IAudioDecodeData>;
}
async decodeAll(data: Uint8Array): Promise<IAudioDecodeData | undefined> {
return this.decoder?.decodeFile(data) as Promise<IAudioDecodeData>;
}
async flush(): Promise<IAudioDecodeData | undefined> {
return this.decoder?.flush() as Promise<IAudioDecodeData>;
}
}
export class OpusDecoder extends AudioDecoder {
decoder?: OggOpusDecoderWebWorker;
async create(): Promise<void> {
this.decoder = new OggOpusDecoderWebWorker({
speechQualityEnhancement: 'none'
});
await this.decoder.ready;
}
destroy(): void {
this.decoder?.free();
}
async decode(data: Uint8Array): Promise<IAudioDecodeData | undefined> {
return this.decoder?.decode(data) as Promise<IAudioDecodeData>;
}
async decodeAll(data: Uint8Array): Promise<IAudioDecodeData | undefined> {
return this.decoder?.decodeFile(data) as Promise<IAudioDecodeData>;
}
async flush(): Promise<IAudioDecodeData | undefined> {
return this.decoder?.flush() as Promise<IAudioDecodeData>;
}
}

View File

@ -1,18 +0,0 @@
import { loadAllBgm } from './bgm';
import { OpusDecoder, VorbisDecoder } from './decoder';
import { AudioType } from './support';
import { AudioDecoder } from './decoder';
export function createAudio() {
loadAllBgm();
AudioDecoder.registerDecoder(AudioType.Ogg, VorbisDecoder);
AudioDecoder.registerDecoder(AudioType.Opus, OpusDecoder);
}
export * from './support';
export * from './effect';
export * from './player';
export * from './source';
export * from './bgm';
export * from './decoder';
export * from './sound';

View File

@ -1,605 +0,0 @@
import EventEmitter from 'eventemitter3';
import {
AudioBufferSource,
AudioElementSource,
AudioSource,
AudioStreamSource
} from './source';
import {
AudioEffect,
ChannelVolumeEffect,
DelayEffect,
EchoEffect,
IAudioOutput,
StereoEffect,
VolumeEffect
} from './effect';
import { isNil } from 'lodash-es';
import { logger } from '@motajs/common';
import { sleep } from 'mutate-animate';
import { AudioDecoder } from './decoder';
interface AudioPlayerEvent {}
export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
/** 音频播放上下文 */
readonly ac: AudioContext;
/** 所有的音频播放路由 */
readonly audioRoutes: Map<string, AudioRoute> = new Map();
/** 音量节点 */
readonly gain: GainNode;
constructor() {
super();
this.ac = new AudioContext();
this.gain = this.ac.createGain();
this.gain.connect(this.ac.destination);
}
/**
*
* @param data
*/
decodeAudioData(data: Uint8Array) {
return AudioDecoder.decodeAudioData(data, this);
}
/**
*
* @param volume
*/
setVolume(volume: number) {
this.gain.gain.value = volume;
}
/**
*
*/
getVolume() {
return this.gain.gain.value;
}
/**
*
* @param Source
*/
createSource<T extends AudioSource>(
Source: new (ac: AudioContext) => T
): T {
return new Source(this.ac);
}
/**
* opus ogg
*/
createStreamSource() {
return new AudioStreamSource(this.ac);
}
/**
* audio
*/
createElementSource() {
return new AudioElementSource(this.ac);
}
/**
* AudioBuffer
*/
createBufferSource() {
return new AudioBufferSource(this.ac);
}
/**
*
*/
getDestination() {
return this.gain;
}
/**
*
* @param Effect
*/
createEffect<T extends AudioEffect>(
Effect: new (ac: AudioContext) => T
): T {
return new Effect(this.ac);
}
/**
*
* ```txt
* |----------|
* Input ----> | GainNode | ----> Output
* |----------|
* ```
*/
createVolumeEffect() {
return new VolumeEffect(this.ac);
}
/**
*
* ```txt
* |------------|
* Input ----> | PannerNode | ----> Output
* |------------|
* ```
*/
createStereoEffect() {
return new StereoEffect(this.ac);
}
/**
*
* ```txt
* |----------|
* -> | GainNode | \
* |--------------| / |----------| -> |------------|
* Input ----> | SplitterNode | ...... | MergerNode | ----> Output
* |--------------| \ |----------| -> |------------|
* -> | GainNode | /
* |----------|
* ```
*/
createChannelVolumeEffect() {
return new ChannelVolumeEffect(this.ac);
}
/**
*
* ```txt
* |-----------|
* Input ----> | DelayNode | ----> Output
* |-----------|
* ```
*/
createDelayEffect() {
return new DelayEffect(this.ac);
}
/**
*
* ```txt
* |----------|
* Input ----> | GainNode | ----> Output
* ^ |----------| |
* | |
* | |------------|
* |-- | Delay Node | <--
* |------------|
* ```
*/
createEchoEffect() {
return new EchoEffect(this.ac);
}
/**
*
* @param source
*/
createRoute(source: AudioSource) {
return new AudioRoute(source, this);
}
/**
*
* @param id
* @param route
*/
addRoute(id: string, route: AudioRoute) {
if (this.audioRoutes.has(id)) {
logger.warn(45, id);
}
this.audioRoutes.set(id, route);
}
/**
*
* @param id
*/
getRoute(id: string) {
return this.audioRoutes.get(id);
}
/**
*
* @param id
*/
removeRoute(id: string) {
const route = this.audioRoutes.get(id);
if (route) {
route.destroy();
}
this.audioRoutes.delete(id);
}
/**
*
* @param id
* @param when
*/
play(id: string, when: number = 0) {
const route = this.getRoute(id);
if (!route) {
logger.warn(53, 'play', id);
return;
}
route.play(when);
}
/**
*
* @param id
* @returns
*/
pause(id: string) {
const route = this.getRoute(id);
if (!route) {
logger.warn(53, 'pause', id);
return;
}
return route.pause();
}
/**
*
* @param id
* @returns
*/
stop(id: string) {
const route = this.getRoute(id);
if (!route) {
logger.warn(53, 'stop', id);
return;
}
return route.stop();
}
/**
*
* @param id
*/
resume(id: string) {
const route = this.getRoute(id);
if (!route) {
logger.warn(53, 'resume', id);
return;
}
route.resume();
}
/**
* x正方向水平向右y正方向垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setListenerPosition(x: number, y: number, z: number) {
const listener = this.ac.listener;
listener.positionX.value = x;
listener.positionY.value = y;
listener.positionZ.value = z;
}
/**
* x正方向水平向右y正方向垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setListenerOrientation(x: number, y: number, z: number) {
const listener = this.ac.listener;
listener.forwardX.value = x;
listener.forwardY.value = y;
listener.forwardZ.value = z;
}
/**
* x正方向水平向右y正方向垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setListenerUp(x: number, y: number, z: number) {
const listener = this.ac.listener;
listener.upX.value = x;
listener.upY.value = y;
listener.upZ.value = z;
}
}
export const enum AudioStatus {
Playing,
Pausing,
Paused,
Stoping,
Stoped
}
type AudioStartHook = (route: AudioRoute) => void;
type AudioEndHook = (time: number, route: AudioRoute) => void;
interface AudioRouteEvent {
updateEffect: [];
play: [];
stop: [];
pause: [];
resume: [];
}
export class AudioRoute
extends EventEmitter<AudioRouteEvent>
implements IAudioOutput
{
output: AudioNode;
/** 效果器路由图 */
readonly effectRoute: AudioEffect[] = [];
/** 结束时长,当音频暂停或停止时,会经过这么长时间之后才真正终止播放,期间可以做音频淡入淡出等效果 */
endTime: number = 0;
/** 当前播放状态 */
status: AudioStatus = AudioStatus.Stoped;
/** 暂停时刻 */
private pauseTime: number = 0;
/** 暂停时播放了多长时间 */
private pauseCurrentTime: number = 0;
/** 音频时长,单位秒 */
get duration() {
return this.source.duration;
}
/** 当前播放了多长时间,单位秒 */
get currentTime() {
if (this.status === AudioStatus.Paused) {
return this.pauseCurrentTime;
} else {
return this.source.currentTime;
}
}
set currentTime(time: number) {
this.source.stop();
this.source.play(time);
}
private shouldStop: boolean = false;
/**
*
*
*/
stopIdentifier: number = 0;
private audioStartHook?: AudioStartHook;
private audioEndHook?: AudioEndHook;
constructor(
public readonly source: AudioSource,
public readonly player: AudioPlayer
) {
super();
this.output = source.output;
source.on('end', () => {
if (this.status === AudioStatus.Playing) {
this.status = AudioStatus.Stoped;
}
});
source.on('play', () => {
if (this.status !== AudioStatus.Playing) {
this.status = AudioStatus.Playing;
}
});
}
/**
*
* @param time
*/
setEndTime(time: number) {
this.endTime = time;
}
/**
*
* @param fn
*/
onStart(fn?: AudioStartHook) {
this.audioStartHook = fn;
}
/**
*
* @param fn
*
*/
onEnd(fn?: AudioEndHook) {
this.audioEndHook = fn;
}
/**
*
* @param when
*/
async play(when: number = 0) {
if (this.status === AudioStatus.Playing) return;
this.link();
await this.player.ac.resume();
if (this.effectRoute.length > 0) {
const first = this.effectRoute[0];
this.source.connect(first);
const last = this.effectRoute.at(-1)!;
last.connect({ input: this.player.getDestination() });
} else {
this.source.connect({ input: this.player.getDestination() });
}
this.source.play(when);
this.status = AudioStatus.Playing;
this.pauseTime = 0;
this.audioStartHook?.(this);
this.startAllEffect();
this.emit('play');
}
/**
*
*/
async pause() {
if (this.status !== AudioStatus.Playing) return;
this.status = AudioStatus.Pausing;
this.stopIdentifier++;
const identifier = this.stopIdentifier;
if (this.audioEndHook) {
this.audioEndHook(this.endTime, this);
await sleep(this.endTime);
}
if (
this.status !== AudioStatus.Pausing ||
this.stopIdentifier !== identifier
) {
return;
}
this.pauseCurrentTime = this.source.currentTime;
const time = this.source.stop();
this.pauseTime = time;
if (this.shouldStop) {
this.status = AudioStatus.Stoped;
this.endAllEffect();
this.emit('stop');
this.shouldStop = false;
} else {
this.status = AudioStatus.Paused;
this.endAllEffect();
this.emit('pause');
}
}
/**
*
*/
resume() {
if (this.status === AudioStatus.Playing) return;
if (
this.status === AudioStatus.Pausing ||
this.status === AudioStatus.Stoping
) {
this.audioStartHook?.(this);
this.emit('resume');
return;
}
if (this.status === AudioStatus.Paused) {
this.play(this.pauseTime);
} else {
this.play(0);
}
this.status = AudioStatus.Playing;
this.pauseTime = 0;
this.audioStartHook?.(this);
this.startAllEffect();
this.emit('resume');
}
/**
*
*/
async stop() {
if (this.status !== AudioStatus.Playing) {
if (this.status === AudioStatus.Pausing) {
this.shouldStop = true;
}
return;
}
this.status = AudioStatus.Stoping;
this.stopIdentifier++;
const identifier = this.stopIdentifier;
if (this.audioEndHook) {
this.audioEndHook(this.endTime, this);
await sleep(this.endTime);
}
if (
this.status !== AudioStatus.Stoping ||
this.stopIdentifier !== identifier
) {
return;
}
this.source.stop();
this.status = AudioStatus.Stoped;
this.pauseTime = 0;
this.endAllEffect();
this.emit('stop');
}
/**
*
* @param effect
* @param index 0
*/
addEffect(effect: AudioEffect | AudioEffect[], index?: number) {
if (isNil(index)) {
if (effect instanceof Array) {
this.effectRoute.push(...effect);
} else {
this.effectRoute.push(effect);
}
} else {
if (effect instanceof Array) {
this.effectRoute.splice(index, 0, ...effect);
} else {
this.effectRoute.splice(index, 0, effect);
}
}
this.setOutput();
if (this.source.playing) this.link();
this.emit('updateEffect');
}
/**
*
* @param effect
*/
removeEffect(effect: AudioEffect) {
const index = this.effectRoute.indexOf(effect);
if (index === -1) return;
this.effectRoute.splice(index, 1);
effect.disconnect();
this.setOutput();
if (this.source.playing) this.link();
this.emit('updateEffect');
}
destroy() {
this.effectRoute.forEach(v => v.disconnect());
}
private setOutput() {
const effect = this.effectRoute.at(-1);
if (!effect) this.output = this.source.output;
else this.output = effect.output;
}
/**
*
*/
private link() {
this.effectRoute.forEach(v => v.disconnect());
this.effectRoute.forEach((v, i) => {
const next = this.effectRoute[i + 1];
if (next) {
v.connect(next);
}
});
}
private startAllEffect() {
this.effectRoute.forEach(v => v.start());
}
private endAllEffect() {
this.effectRoute.forEach(v => v.end());
}
}
export const audioPlayer = new AudioPlayer();
// window.audioPlayer = audioPlayer;

View File

@ -1,7 +1,7 @@
import { Patch, PatchClass } from '@motajs/legacy-common';
import { audioPlayer, bgmController, soundPlayer } from '../audio';
import { audioContext, bgmPlayer, soundPlayer } from '@user/client-base';
import { mainSetting } from '@motajs/legacy-ui';
import { sleep } from 'mutate-animate';
import { sleep } from '@motajs/common';
import { isNil } from 'lodash-es';
// todo: 添加弃用警告 logger.warn(56)
@ -10,10 +10,10 @@ export function patchAudio() {
const patch = new Patch(PatchClass.Control);
const play = (bgm: BgmIds, when?: number) => {
bgmController.play(bgm, when);
bgmPlayer.play(bgm, when);
};
const pause = () => {
bgmController.pause();
bgmPlayer.pause();
};
patch.add('playBgm', function (bgm, startTime) {
@ -23,13 +23,13 @@ export function patchAudio() {
pause();
});
patch.add('resumeBgm', function () {
bgmController.resume();
bgmPlayer.resume();
});
patch.add('checkBgm', function () {
if (bgmController.playing) return;
if (bgmPlayer.playing) return;
if (mainSetting.getValue('audio.bgmEnabled')) {
if (bgmController.playingBgm) {
bgmController.play(bgmController.playingBgm);
if (bgmPlayer.playingBgm) {
bgmPlayer.play(bgmPlayer.playingBgm);
} else {
play(main.startBgm, 0);
}
@ -38,8 +38,8 @@ export function patchAudio() {
}
});
patch.add('triggerBgm', function () {
if (bgmController.playing) bgmController.pause();
else bgmController.resume();
if (bgmPlayer.playing) bgmPlayer.pause();
else bgmPlayer.resume();
});
patch.add(
@ -47,7 +47,7 @@ export function patchAudio() {
function (sound, _pitch, callback, position, orientation) {
const name = core.getMappedName(sound) as SoundIds;
const num = soundPlayer.play(name, position, orientation);
const route = audioPlayer.getRoute(`sounds.${num}`);
const route = audioContext.getRoute(`sounds.${num}`);
if (!route) {
callback?.();
return -1;

View File

@ -1,11 +1,9 @@
import { loading } from '@user/data-base';
import { createAudio } from './audio';
import { patchAll } from './fallback';
import { createGameRenderer, createRender } from './render';
export function create() {
patchAll();
createAudio();
createRender();
loading.once('coreInit', () => {
createGameRenderer();
@ -13,7 +11,5 @@ export function create() {
}
export * from './action';
export * from './audio';
export * from './fallback';
export * from './loader';
export * from './render';

View File

@ -8,6 +8,7 @@ import type * as LegacyUI from '@motajs/legacy-ui';
import type * as Render from '@motajs/render';
import type * as RenderVue from '@motajs/render-vue';
import type * as System from '@motajs/system';
import type * as UserClientBase from '@user/client-base';
import type * as ClientModules from '@user/client-modules';
import type * as DataBase from '@user/data-base';
import type * as DataFallback from '@user/data-fallback';
@ -31,6 +32,7 @@ interface ModuleInterface {
'@motajs/render': typeof Render;
'@motajs/render-vue': typeof RenderVue;
'@motajs/system': typeof System;
'@user/client-base': typeof UserClientBase;
'@user/client-modules': typeof ClientModules;
'@user/data-base': typeof DataBase;
'@user/data-fallback': typeof DataFallback;

View File

@ -6,7 +6,8 @@ import {
IExcitationVariator,
ExcitationCurve,
VariatorCurveMode,
IExcitationDivider
IExcitationDivider,
IIntervalExcitation
} from './types';
import { excited } from './utils';
@ -108,6 +109,30 @@ export class RafExcitation extends ExcitationBase<number> {
}
}
export class IntervalExcitation
extends ExcitationBase<number>
implements IIntervalExcitation
{
private now: number = 0;
private readonly id: number;
constructor(readonly interval: number) {
super();
this.id = window.setInterval(() => {
this.excite(this.now);
this.now += interval;
}, interval);
}
payload(): number {
return this.now;
}
override destroy(): void {
window.clearInterval(this.id);
}
}
interface CurveQueue {
/** 速率曲线 */
curve: ExcitationCurve;

View File

@ -158,6 +158,11 @@ export interface IExcitationDivider<T> extends IExcitation<T> {
setDivider(divider: number): void;
}
export interface IIntervalExcitation extends IExcitation<number> {
/** 两次触发之间的时间间隔 */
readonly interval: number;
}
//#endregion
//#region 动画类

View File

@ -0,0 +1,7 @@
{
"name": "@motajs/audio",
"dependencies": {
"@motajs/common": "workspace:*",
"@motajs/loader": "workspace:*"
}
}

309
packages/audio/src/bgm.ts Normal file
View File

@ -0,0 +1,309 @@
import { guessTypeByExt, isAudioSupport } from './support';
import { logger } from '@motajs/common';
import { StreamLoader } from '@motajs/loader';
import {
IAudioRoute,
IAudioVolumeEffect,
IBGMPlayer,
IMotaAudioContext
} from './types';
import { AudioElementSource, AudioStreamSource } from './source';
interface BGMInfo {
/** 音频路由 */
readonly route: IAudioRoute;
/** 音频播放时执行的函数 */
readonly startFn: () => void;
/** 音频结束时执行的函数 */
readonly endFn: () => void;
}
interface AudioCacheInfo {
/** 音频路由 */
readonly route: IAudioRoute;
/** 当前其占用的内存,如果是 `AudioElementSource`,那么此值为估计值,并非准确值 */
size: number;
}
export class BGMPlayer<T extends string> implements IBGMPlayer<T> {
/** bgm音频名称的前缀 */
prefix: string = 'bgms.';
/** 每个 bgm 的音量控制器 */
private readonly gain: Map<T, BGMInfo> = new Map();
/** 正在播放的 bgm */
playingBgm?: T;
/** 是否正在播放 */
playing: boolean = false;
/** 是否已经启用 */
enabled: boolean = true;
/** 主音量控制器 */
private readonly mainGain: IAudioVolumeEffect;
/** 是否屏蔽所有的音乐切换 */
private blocking: boolean = false;
/** 渐变时长 */
private transitionTime: number = 2000;
/** 最大缓存容量 */
maxCacheSize: number = 256;
/** 音频缓存池 */
private readonly cachePool: AudioCacheInfo[] = [];
constructor(public readonly ac: IMotaAudioContext) {
this.mainGain = ac.createVolumeEffect();
}
setMaxCacheSize(size: number): void {
this.maxCacheSize = size;
this.checkMaxCache();
}
private checkMaxCache() {
if (this.cachePool.length <= 1) return;
let total = 0;
let toDelete = 0;
for (let i = this.cachePool.length - 1; i >= 0; i--) {
total += this.cachePool[i].size;
if (total >= this.maxCacheSize) {
toDelete = i + 1;
break;
}
}
for (let i = 0; i < toDelete; i++) {
const data = this.cachePool.shift();
if (!data) continue;
data.route.source.free();
data.size = 0;
}
}
/**
*
* @param time
*/
setTransitionTime(time: number) {
this.transitionTime = time;
}
/**
*
*/
blockChange() {
this.blocking = true;
}
/**
*
*/
unblockChange() {
this.blocking = false;
}
/**
*
* @param volume
*/
setVolume(volume: number) {
this.mainGain.setVolume(volume);
}
/**
*
*/
getVolume() {
return this.mainGain.getVolume();
}
/**
*
* @param enabled
*/
setEnabled(enabled: boolean) {
if (enabled) this.resume();
else this.stop();
this.enabled = enabled;
}
/**
* bgm
*/
setPrefix(prefix: string) {
this.prefix = prefix;
}
private getId(name: T) {
return `${this.prefix}${name}`;
}
/**
* bgm AudioRoute
* @param id
*/
get(id: T) {
return this.ac.getRoute(this.getId(id));
}
/**
* bgm
* @param id bgm
* @param url bgm
*/
addBGMFromURL(id: T, url: string) {
const type = guessTypeByExt(id);
if (!type) {
logger.warn(50, id.split('.').slice(0, -1).join('.'));
return;
}
const gain = this.ac.createVolumeEffect();
if (isAudioSupport(type)) {
const source = this.ac.createElementSource();
source.setSource(url);
source.setLoop(true);
const route = this.ac.createRoute(source);
route.addEffect([gain, this.mainGain]);
this.ac.addRoute(this.getId(id), route);
this.setTransition(id, route, gain);
} else {
const source = this.ac.createStreamSource();
const stream = new StreamLoader(url);
stream.pipe(source);
source.setLoop(true);
const route = this.ac.createRoute(source);
route.addEffect([gain, this.mainGain]);
this.ac.addRoute(this.getId(id), route);
this.setTransition(id, route, gain);
}
}
/**
* bgm
* @param id bgm
*/
removeBgm(id: T) {
this.ac.removeRoute(this.getId(id));
const gain = this.gain.get(id);
if (gain) {
gain.route.off('start', gain.startFn);
gain.route.off('end', gain.endFn);
}
this.gain.delete(id);
}
private setTransition(id: T, route: IAudioRoute, gain: IAudioVolumeEffect) {
const startFn = () => {
gain.output.gain.cancelScheduledValues(this.ac.ac.currentTime);
gain.output.gain.setTargetAtTime(
1,
this.ac.ac.currentTime,
this.transitionTime / 1000 / 3
);
};
const endFn = () => {
gain.output.gain.cancelScheduledValues(this.ac.ac.currentTime);
gain.output.gain.setTargetAtTime(
0,
this.ac.ac.currentTime,
this.transitionTime / 1000 / 3
);
};
route.on('start', startFn);
route.on('end', endFn);
route.setEndTime(this.transitionTime);
this.gain.set(id, { route, startFn, endFn });
}
/**
* bgm
* @param id bgm
*/
play(id: T, when?: number) {
if (this.blocking) return;
if (id !== this.playingBgm && this.playingBgm) {
this.ac.pause(this.getId(this.playingBgm));
}
this.playingBgm = id;
if (!this.enabled) return;
const full = this.getId(id);
this.ac.play(full, when);
this.playing = true;
const route = this.ac.getRoute(full);
if (!route) return;
const index = this.cachePool.findIndex(v => v.route === route);
if (index !== -1) {
// 说明还在缓冲区内,将其移动至最后面
const [data] = this.cachePool.splice(index, 1);
this.cachePool.push(data);
} else {
// 不在缓冲区内,则执行加载,加载完毕后检查尺寸
const cacheInfo: AudioCacheInfo = {
route,
size: 0
};
const source = route.source;
if (source instanceof AudioElementSource) {
// audio 元素音频源
source.once('load', () => {
const duration = source.audio.duration;
const estimatedSize = duration * 48000 * 2 * 4;
cacheInfo.size = estimatedSize;
this.checkMaxCache();
});
} else if (source instanceof AudioStreamSource) {
// 流式加载音频源
source.once('load', () => {
if (!source.buffer) return;
const buffer = source.buffer;
const size = buffer.numberOfChannels * buffer.length * 4;
cacheInfo.size = size;
this.checkMaxCache();
});
} else {
// 其他音频源
source.once('load', () => {
const duration = source.duration;
const estimatedSize = duration * 48000 * 2 * 4;
cacheInfo.size = estimatedSize;
this.checkMaxCache();
});
}
this.cachePool.push(cacheInfo);
}
}
/**
* bgm
*/
resume() {
if (this.blocking || !this.enabled || this.playing) return;
if (this.playingBgm) {
this.ac.resume(this.getId(this.playingBgm));
}
this.playing = true;
}
/**
* bgm
*/
pause() {
if (this.blocking || !this.enabled) return;
if (this.playingBgm) {
this.ac.pause(this.getId(this.playingBgm));
}
this.playing = false;
}
/**
* bgm
*/
stop() {
if (this.blocking || !this.enabled) return;
if (this.playingBgm) {
this.ac.stop(this.getId(this.playingBgm));
}
this.playing = false;
}
destroy(): void {}
}

View File

@ -0,0 +1,449 @@
import {
AudioBufferSource,
AudioElementSource,
AudioStreamSource
} from './source';
import {
ChannelVolumeEffect,
DelayEffect,
EchoEffect,
StereoEffect,
VolumeEffect
} from './effect';
import { logger } from '@motajs/common';
import { VanillaDecoder } from './decoder';
import {
AudioDecoderCreateFunc,
AudioType,
IAudioBufferSource,
IAudioChannelVolumeEffect,
IAudioDecodeData,
IAudioDecoder,
IAudioDelayEffect,
IAudioEchoEffect,
IAudioEffect,
IAudioElementSource,
IAudioRoute,
IAudioSource,
IAudioStereoEffect,
IAudioStreamSource,
IAudioVolumeEffect,
IMotaAudioContext,
ISoundPlayer
} from './types';
import { SoundPlayer } from './sound';
import { AudioRoute } from './route';
const fileSignatures: [AudioType, number[]][] = [
[AudioType.Mp3, [0x49, 0x44, 0x33]],
[AudioType.Ogg, [0x4f, 0x67, 0x67, 0x53]],
[AudioType.Wav, [0x52, 0x49, 0x46, 0x46]],
[AudioType.Flac, [0x66, 0x4c, 0x61, 0x43]],
[AudioType.Aac, [0xff, 0xf1]],
[AudioType.Aac, [0xff, 0xf9]]
];
const oggHeaders: [AudioType, number[]][] = [
[AudioType.Opus, [0x4f, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64]]
];
export class MotaAudioContext implements IMotaAudioContext {
/** 音频播放上下文 */
readonly ac: AudioContext;
/** 所有的音频播放路由 */
readonly audioRoutes: Map<string, IAudioRoute> = new Map();
/** 音量节点 */
readonly gain: GainNode;
/** 测试用 audio 元素 */
private readonly testAudio: HTMLAudioElement = new Audio();
/** 所有注册的解码器 */
private readonly decoders: Map<AudioType, AudioDecoderCreateFunc> =
new Map();
/** 最小音量 */
private readonly minDb = -60;
constructor() {
this.ac = new AudioContext();
this.gain = this.ac.createGain();
this.gain.connect(this.ac.destination);
}
/**
* `gain = 10 ** (dB / 20), where minDB = -60`
* @param volume
*/
setVolume(volume: number): void {
if (volume === 0) this.gain.gain.value = 0;
else {
const db = this.minDb + -this.minDb * volume;
const gain = 10 ** (db / 20);
this.gain.gain.value = gain;
}
}
/**
* `gain = 10 ** (dB / 20), where minDB = -60`
*/
getVolume(): number {
if (this.gain.gain.value === 0) return 0;
const db = -Math.log10(this.gain.gain.value) * 20;
return db / this.minDb;
}
/**
*
*/
createSoundPlayer<T extends string>(): ISoundPlayer<T> {
return new SoundPlayer(this);
}
/**
*
* @param Source
*/
createSource<T extends IAudioSource>(
Source: new (ac: IMotaAudioContext) => T
): T {
return new Source(this);
}
/**
* opus ogg
*/
createStreamSource(): IAudioStreamSource {
return new AudioStreamSource(this);
}
/**
* audio
*/
createElementSource(): IAudioElementSource {
return new AudioElementSource(this);
}
/**
* AudioBuffer
*/
createBufferSource(): IAudioBufferSource {
return new AudioBufferSource(this);
}
/**
*
*/
getDestination(): GainNode {
return this.gain;
}
/**
*
* @param Effect
*/
createEffect<T extends IAudioEffect>(
Effect: new (ac: IMotaAudioContext) => T
): T {
return new Effect(this);
}
/**
*
* ```txt
* |----------|
* Input ----> | GainNode | ----> Output
* |----------|
* ```
*/
createVolumeEffect(): IAudioVolumeEffect {
return new VolumeEffect(this);
}
/**
*
* ```txt
* |------------|
* Input ----> | PannerNode | ----> Output
* |------------|
* ```
*/
createStereoEffect(): IAudioStereoEffect {
return new StereoEffect(this);
}
/**
*
* ```txt
* |----------|
* -> | GainNode | \
* |--------------| / |----------| -> |------------|
* Input ----> | SplitterNode | ...... | MergerNode | ----> Output
* |--------------| \ |----------| -> |------------|
* -> | GainNode | /
* |----------|
* ```
*/
createChannelVolumeEffect(): IAudioChannelVolumeEffect {
return new ChannelVolumeEffect(this);
}
/**
*
* ```txt
* |-----------|
* Input ----> | DelayNode | ----> Output
* |-----------|
* ```
*/
createDelayEffect(): IAudioDelayEffect {
return new DelayEffect(this);
}
/**
*
* ```txt
* |----------|
* Input ----> | GainNode | ----> Output
* ^ |----------| |
* | |
* | |------------|
* |-- | Delay Node | <--
* |------------|
* ```
*/
createEchoEffect(): IAudioEchoEffect {
return new EchoEffect(this);
}
/**
*
* @param source
*/
createRoute(source: IAudioSource): IAudioRoute {
return new AudioRoute(source, this);
}
/**
*
* @param id
* @param route
*/
addRoute(id: string, route: IAudioRoute): void {
if (this.audioRoutes.has(id)) {
logger.warn(45, id);
}
this.audioRoutes.set(id, route);
}
/**
*
* @param id
*/
getRoute(id: string): IAudioRoute | null {
return this.audioRoutes.get(id) ?? null;
}
/**
*
* @param id
*/
removeRoute(id: string): void {
const route = this.audioRoutes.get(id);
if (route) {
route.destroy();
}
this.audioRoutes.delete(id);
}
/**
*
* @param id
* @param when
*/
play(id: string, when: number = 0): void {
const route = this.getRoute(id);
if (!route) {
logger.warn(53, 'play', id);
return;
}
route.play(when);
}
/**
*
* @param id
* @returns
*/
pause(id: string): Promise<void> {
const route = this.getRoute(id);
if (!route) {
logger.warn(53, 'pause', id);
return Promise.resolve();
}
return route.pause();
}
/**
*
* @param id
* @returns
*/
stop(id: string): Promise<void> {
const route = this.getRoute(id);
if (!route) {
logger.warn(53, 'stop', id);
return Promise.resolve();
}
return route.stop();
}
/**
*
* @param id
*/
resume(id: string): void {
const route = this.getRoute(id);
if (!route) {
logger.warn(53, 'resume', id);
return;
}
route.resume();
}
/**
* x正方向水平向右y正方向垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setListenerPosition(x: number, y: number, z: number) {
const listener = this.ac.listener;
listener.positionX.value = x;
listener.positionY.value = y;
listener.positionZ.value = z;
}
/**
* x正方向水平向右y正方向垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setListenerOrientation(x: number, y: number, z: number) {
const listener = this.ac.listener;
listener.forwardX.value = x;
listener.forwardY.value = y;
listener.forwardZ.value = z;
}
/**
* x正方向水平向右y正方向垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setListenerUp(x: number, y: number, z: number) {
const listener = this.ac.listener;
listener.upX.value = x;
listener.upY.value = y;
listener.upZ.value = z;
}
isAudioVanillaSupport(type: AudioType): boolean {
const support = this.testAudio.canPlayType(type);
return support === 'probably' || support === 'maybe';
}
registerDecoder(type: AudioType, decoder: AudioDecoderCreateFunc): void {
if (this.isAudioVanillaSupport(type)) return;
this.decoders.set(type, decoder);
}
createDecoder(type: AudioType): IAudioDecoder | null {
if (this.isAudioVanillaSupport(type)) {
return new VanillaDecoder(this);
} else {
const create = this.decoders.get(type);
if (!create) return null;
return create(this);
}
}
getAudioTypeFromData(data: Uint8Array): AudioType {
let audioType: AudioType = AudioType.Unknown;
// 检查头文件获取音频类型仅检查前256个字节
const toCheck = data.slice(0, 256);
for (const [type, value] of fileSignatures) {
if (value.every((v, i) => toCheck[i] === v)) {
audioType = type;
break;
}
}
if (audioType === AudioType.Ogg) {
// 如果是ogg的话进一步判断是不是opus
for (const [key, value] of oggHeaders) {
const has = toCheck.some((_, i) => {
return value.every((v, ii) => toCheck[i + ii] === v);
});
if (has) {
audioType = key;
break;
}
}
}
return audioType;
}
private getErrorHeaderInfo(data: Uint8Array) {
const toCheck = data.slice(0, 256);
return [...toCheck]
.map(v => v.toString(16).padStart(2, '0'))
.join(' ')
.toUpperCase();
}
async decodeAudio(data: Uint8Array): Promise<IAudioDecodeData | null> {
const type = this.getAudioTypeFromData(data);
if (type === AudioType.Unknown) {
logger.error(25, this.getErrorHeaderInfo(data));
return null;
}
const decoder = this.createDecoder(type);
if (!decoder) {
logger.error(25, this.getErrorHeaderInfo(data));
return null;
}
await decoder.create();
const decoded = await decoder.decodeAll(data);
await decoder.destroy();
return decoded;
}
toAudioBuffer(data: IAudioDecodeData): AudioBuffer {
const buffer = this.ac.createBuffer(
data.channelData.length,
data.samplesDecoded,
data.sampleRate
);
for (let i = 0; i < data.channelData.length; i++) {
buffer.copyToChannel(data.channelData[i], i);
}
return buffer;
}
async decodeToAudioBuffer(data: Uint8Array): Promise<AudioBuffer | null> {
const type = this.getAudioTypeFromData(data);
if (type === AudioType.Unknown) {
logger.error(53, this.getErrorHeaderInfo(data));
return null;
}
if (!(data.buffer instanceof ArrayBuffer)) return null;
if (this.isAudioVanillaSupport(type)) {
return this.ac.decodeAudioData(data.buffer);
} else {
const decoded = await this.decodeAudio(data);
if (!decoded) return null;
else return this.toAudioBuffer(decoded);
}
}
}

View File

@ -0,0 +1,110 @@
import { OggVorbisDecoderWebWorker } from '@wasm-audio-decoders/ogg-vorbis';
import { OggOpusDecoderWebWorker } from 'ogg-opus-decoder';
import { IAudioDecodeData, IAudioDecoder, IMotaAudioContext } from './types';
export class VorbisDecoder implements IAudioDecoder {
decoder?: OggVorbisDecoderWebWorker;
async create(): Promise<void> {
this.decoder = new OggVorbisDecoderWebWorker();
await this.decoder.ready;
}
async destroy(): Promise<void> {
if (!this.decoder) return;
else return this.decoder.free();
}
async decode(data: Uint8Array): Promise<IAudioDecodeData | null> {
if (!this.decoder) return Promise.resolve(null);
else return this.decoder.decode(data) as Promise<IAudioDecodeData>;
}
async decodeAll(data: Uint8Array): Promise<IAudioDecodeData | null> {
if (!this.decoder) return Promise.resolve(null);
else return this.decoder.decodeFile(data) as Promise<IAudioDecodeData>;
}
async flush(): Promise<IAudioDecodeData | null> {
if (!this.decoder) return Promise.resolve(null);
else return this.decoder.flush() as Promise<IAudioDecodeData>;
}
}
export class OpusDecoder implements IAudioDecoder {
decoder?: OggOpusDecoderWebWorker;
async create(): Promise<void> {
this.decoder = new OggOpusDecoderWebWorker({
speechQualityEnhancement: 'none'
});
await this.decoder.ready;
}
async destroy(): Promise<void> {
if (!this.decoder) return;
else return this.decoder.free();
}
async decode(data: Uint8Array): Promise<IAudioDecodeData | null> {
if (!this.decoder) return Promise.resolve(null);
else return this.decoder.decode(data) as Promise<IAudioDecodeData>;
}
async decodeAll(data: Uint8Array): Promise<IAudioDecodeData | null> {
if (!this.decoder) return Promise.resolve(null);
else return this.decoder.decodeFile(data) as Promise<IAudioDecodeData>;
}
async flush(): Promise<IAudioDecodeData | null> {
if (!this.decoder) return Promise.resolve(null);
else return this.decoder.flush() as Promise<IAudioDecodeData>;
}
}
export class VanillaDecoder implements IAudioDecoder {
constructor(readonly ac: IMotaAudioContext) {}
create(): Promise<void> {
return Promise.resolve();
}
destroy(): Promise<void> {
return Promise.resolve();
}
private async decodeData(
data: Uint8Array
): Promise<IAudioDecodeData | null> {
if (data.buffer instanceof ArrayBuffer) {
const buffer = await this.ac.ac.decodeAudioData(data.buffer);
const decodedData: Float32Array<ArrayBuffer>[] = [];
for (let i = 0; i < buffer.numberOfChannels; i++) {
const data = buffer.getChannelData(i);
decodedData.push(data);
}
const sampled = decodedData[0].length;
const sampleRate = buffer.sampleRate;
return {
errors: [],
channelData: decodedData,
samplesDecoded: sampled,
sampleRate
};
} else {
return Promise.resolve(null);
}
}
decode(data: Uint8Array): Promise<IAudioDecodeData | null> {
return this.decodeData(data);
}
decodeAll(data: Uint8Array): Promise<IAudioDecodeData | null> {
return this.decodeData(data);
}
flush(): Promise<IAudioDecodeData | null> {
return Promise.resolve(null);
}
}

View File

@ -1,23 +1,26 @@
import { isNil } from 'lodash-es';
import { sleep } from 'mutate-animate';
import {
IAudioEffect,
IAudioInput,
IAudioStereoEffect,
IAudioChannelVolumeEffect,
IAudioDelayEffect,
IMotaAudioContext,
IAudioEchoEffect
} from './types';
import { sleep } from '@motajs/common';
export interface IAudioInput {
/** 输入节点 */
input: AudioNode;
}
export interface IAudioOutput {
/** 输出节点 */
output: AudioNode;
}
export abstract class AudioEffect implements IAudioInput, IAudioOutput {
export abstract class AudioEffect implements IAudioEffect {
/** 输出节点 */
abstract output: AudioNode;
/** 输入节点 */
abstract input: AudioNode;
constructor(public readonly ac: AudioContext) {}
readonly ac: AudioContext;
constructor(public readonly motaAC: IMotaAudioContext) {
this.ac = motaAC.ac;
}
/**
*
@ -66,13 +69,13 @@ export abstract class AudioEffect implements IAudioInput, IAudioOutput {
}
}
export class StereoEffect extends AudioEffect {
export class StereoEffect extends AudioEffect implements IAudioStereoEffect {
output: PannerNode;
input: PannerNode;
constructor(ac: AudioContext) {
constructor(ac: IMotaAudioContext) {
super(ac);
const panner = ac.createPanner();
const panner = ac.ac.createPanner();
this.input = panner;
this.output = panner;
}
@ -110,9 +113,9 @@ export class VolumeEffect extends AudioEffect {
output: GainNode;
input: GainNode;
constructor(ac: AudioContext) {
constructor(ac: IMotaAudioContext) {
super(ac);
const gain = ac.createGain();
const gain = ac.ac.createGain();
this.input = gain;
this.output = gain;
}
@ -137,21 +140,24 @@ export class VolumeEffect extends AudioEffect {
start(): void {}
}
export class ChannelVolumeEffect extends AudioEffect {
export class ChannelVolumeEffect
extends AudioEffect
implements IAudioChannelVolumeEffect
{
output: ChannelMergerNode;
input: ChannelSplitterNode;
/** 所有的音量控制节点 */
private readonly gain: GainNode[] = [];
constructor(ac: AudioContext) {
constructor(ac: IMotaAudioContext) {
super(ac);
const splitter = ac.createChannelSplitter();
const merger = ac.createChannelMerger();
const splitter = ac.ac.createChannelSplitter();
const merger = ac.ac.createChannelMerger();
this.output = merger;
this.input = splitter;
for (let i = 0; i < 6; i++) {
const gain = ac.createGain();
const gain = ac.ac.createGain();
splitter.connect(gain, i);
gain.connect(merger, 0, i);
this.gain.push(gain);
@ -182,13 +188,13 @@ export class ChannelVolumeEffect extends AudioEffect {
start(): void {}
}
export class DelayEffect extends AudioEffect {
export class DelayEffect extends AudioEffect implements IAudioDelayEffect {
output: DelayNode;
input: DelayNode;
constructor(ac: AudioContext) {
constructor(ac: IMotaAudioContext) {
super(ac);
const delay = ac.createDelay();
const delay = ac.ac.createDelay();
this.input = delay;
this.output = delay;
}
@ -213,7 +219,7 @@ export class DelayEffect extends AudioEffect {
start(): void {}
}
export class EchoEffect extends AudioEffect {
export class EchoEffect extends AudioEffect implements IAudioEchoEffect {
output: GainNode;
input: GainNode;
@ -226,10 +232,10 @@ export class EchoEffect extends AudioEffect {
/** 是否正在播放 */
private playing: boolean = false;
constructor(ac: AudioContext) {
constructor(ac: IMotaAudioContext) {
super(ac);
const delay = ac.createDelay();
const gain = ac.createGain();
const delay = ac.ac.createDelay();
const gain = ac.ac.createGain();
gain.gain.value = 0.5;
delay.delayTime.value = 0.05;
delay.connect(gain);

View File

@ -0,0 +1,9 @@
export * from './bgm';
export * from './context';
export * from './decoder';
export * from './effect';
export * from './route';
export * from './sound';
export * from './source';
export * from './support';
export * from './types';

258
packages/audio/src/route.ts Normal file
View File

@ -0,0 +1,258 @@
import EventEmitter from 'eventemitter3';
import { isNil } from 'lodash-es';
import { sleep } from '@motajs/common';
import { AudioEffect } from './effect';
import {
IAudioRoute,
AudioStatus,
IAudioSource,
IMotaAudioContext,
EAudioRouteEvent
} from './types';
export class AudioRoute
extends EventEmitter<EAudioRouteEvent>
implements IAudioRoute
{
output: AudioNode;
/** 效果器路由图 */
readonly effectRoute: AudioEffect[] = [];
/** 结束时长,当音频暂停或停止时,会经过这么长时间之后才真正终止播放,期间可以做音频淡入淡出等效果 */
endTime: number = 0;
/** 当前播放状态 */
status: AudioStatus = AudioStatus.Stoped;
/** 暂停时刻 */
private pauseTime: number = 0;
/** 暂停时播放了多长时间 */
private pauseCurrentTime: number = 0;
/** 音频时长,单位秒 */
get duration() {
return this.source.duration;
}
/** 当前播放了多长时间,单位秒 */
get currentTime() {
if (this.status === AudioStatus.Paused) {
return this.pauseCurrentTime;
} else {
return this.source.currentTime;
}
}
set currentTime(time: number) {
this.source.stop();
this.source.play(time);
}
private shouldStop: boolean = false;
/**
*
*
*/
stopIdentifier: number = 0;
constructor(
public readonly source: IAudioSource,
public readonly player: IMotaAudioContext
) {
super();
this.output = source.output;
source.on('end', () => {
if (this.status === AudioStatus.Playing) {
this.status = AudioStatus.Stoped;
}
});
source.on('play', () => {
if (this.status !== AudioStatus.Playing) {
this.status = AudioStatus.Playing;
}
});
}
/**
*
* @param time
*/
setEndTime(time: number) {
this.endTime = time;
}
/**
*
* @param when
*/
async play(when: number = 0) {
if (this.status === AudioStatus.Playing) return;
this.link();
await this.player.ac.resume();
if (this.effectRoute.length > 0) {
const first = this.effectRoute[0];
this.source.connect(first);
const last = this.effectRoute.at(-1)!;
last.connect({ input: this.player.getDestination() });
} else {
this.source.connect({ input: this.player.getDestination() });
}
this.source.play(when);
this.status = AudioStatus.Playing;
this.pauseTime = 0;
this.emit('start', this);
this.startAllEffect();
this.emit('play');
}
/**
*
*/
async pause() {
if (this.status !== AudioStatus.Playing) return;
this.status = AudioStatus.Pausing;
this.stopIdentifier++;
const identifier = this.stopIdentifier;
this.emit('end', this.endTime, this);
await sleep(this.endTime);
if (
this.status !== AudioStatus.Pausing ||
this.stopIdentifier !== identifier
) {
return;
}
this.pauseCurrentTime = this.source.currentTime;
const time = this.source.stop();
this.pauseTime = time;
if (this.shouldStop) {
this.status = AudioStatus.Stoped;
this.endAllEffect();
this.emit('stop');
this.shouldStop = false;
} else {
this.status = AudioStatus.Paused;
this.endAllEffect();
this.emit('pause');
}
}
/**
*
*/
resume() {
if (this.status === AudioStatus.Playing) return;
if (
this.status === AudioStatus.Pausing ||
this.status === AudioStatus.Stoping
) {
this.emit('start', this);
this.emit('resume');
return;
}
if (this.status === AudioStatus.Paused) {
this.play(this.pauseTime);
} else {
this.play(0);
}
this.status = AudioStatus.Playing;
this.pauseTime = 0;
this.emit('start', this);
this.startAllEffect();
this.emit('resume');
}
/**
*
*/
async stop() {
if (this.status !== AudioStatus.Playing) {
if (this.status === AudioStatus.Pausing) {
this.shouldStop = true;
}
return;
}
this.status = AudioStatus.Stoping;
this.stopIdentifier++;
const identifier = this.stopIdentifier;
this.emit('end', this.endTime, this);
await sleep(this.endTime);
if (
this.status !== AudioStatus.Stoping ||
this.stopIdentifier !== identifier
) {
return;
}
this.source.stop();
this.status = AudioStatus.Stoped;
this.pauseTime = 0;
this.endAllEffect();
this.emit('stop');
}
/**
*
* @param effect
* @param index 0
*/
addEffect(effect: AudioEffect | AudioEffect[], index?: number) {
if (isNil(index)) {
if (effect instanceof Array) {
this.effectRoute.push(...effect);
} else {
this.effectRoute.push(effect);
}
} else {
if (effect instanceof Array) {
this.effectRoute.splice(index, 0, ...effect);
} else {
this.effectRoute.splice(index, 0, effect);
}
}
this.setOutput();
if (this.source.playing) this.link();
this.emit('updateEffect');
}
/**
*
* @param effect
*/
removeEffect(effect: AudioEffect) {
const index = this.effectRoute.indexOf(effect);
if (index === -1) return;
this.effectRoute.splice(index, 1);
effect.disconnect();
this.setOutput();
if (this.source.playing) this.link();
this.emit('updateEffect');
}
destroy() {
this.effectRoute.forEach(v => v.disconnect());
}
private setOutput() {
const effect = this.effectRoute.at(-1);
if (!effect) this.output = this.source.output;
else this.output = effect.output;
}
/**
*
*/
private link() {
this.effectRoute.forEach(v => v.disconnect());
this.effectRoute.forEach((v, i) => {
const next = this.effectRoute[i + 1];
if (next) {
v.connect(next);
}
});
}
private startAllEffect() {
this.effectRoute.forEach(v => v.start());
}
private endAllEffect() {
this.effectRoute.forEach(v => v.end());
}
}

View File

@ -1,15 +1,9 @@
import EventEmitter from 'eventemitter3';
import { audioPlayer, AudioPlayer } from './player';
import { logger } from '@motajs/common';
import { VolumeEffect } from './effect';
import { IAudioVolumeEffect, IMotaAudioContext } from './types';
type LocationArray = [number, number, number];
interface SoundPlayerEvent {}
export class SoundPlayer<
T extends string = SoundIds
> extends EventEmitter<SoundPlayerEvent> {
export class SoundPlayer<T extends string = SoundIds> {
/** 每个音效的唯一标识符 */
private num: number = 0;
@ -18,14 +12,13 @@ export class SoundPlayer<
/** 所有正在播放的音乐 */
readonly playing: Set<number> = new Set();
/** 音量节点 */
readonly gain: VolumeEffect;
readonly gain: IAudioVolumeEffect;
/** 是否已经启用 */
enabled: boolean = true;
constructor(public readonly player: AudioPlayer) {
super();
this.gain = player.createVolumeEffect();
constructor(public readonly ac: IMotaAudioContext) {
this.gain = ac.createVolumeEffect();
}
/**
@ -58,7 +51,7 @@ export class SoundPlayer<
* @param data Uint8Array数据
*/
async add(id: T, data: Uint8Array) {
const buffer = await this.player.decodeAudioData(data);
const buffer = await this.ac.decodeToAudioBuffer(data);
if (!buffer) {
logger.warn(51, id);
return;
@ -84,19 +77,19 @@ export class SoundPlayer<
return -1;
}
const soundNum = this.num++;
const source = this.player.createBufferSource();
const source = this.ac.createBufferSource();
source.setBuffer(buffer);
const route = this.player.createRoute(source);
const stereo = this.player.createStereoEffect();
const route = this.ac.createRoute(source);
const stereo = this.ac.createStereoEffect();
stereo.setPosition(position[0], position[1], position[2]);
stereo.setOrientation(orientation[0], orientation[1], orientation[2]);
route.addEffect([stereo, this.gain]);
this.player.addRoute(`sounds.${soundNum}`, route);
this.ac.addRoute(`sounds.${soundNum}`, route);
route.play();
// 清理垃圾
source.output.addEventListener('ended', () => {
this.playing.delete(soundNum);
this.player.removeRoute(`sounds.${soundNum}`);
this.ac.removeRoute(`sounds.${soundNum}`);
});
this.playing.add(soundNum);
return soundNum;
@ -108,10 +101,10 @@ export class SoundPlayer<
*/
stop(num: number) {
const id = `sounds.${num}`;
const route = this.player.getRoute(id);
const route = this.ac.getRoute(id);
if (route) {
route.stop();
this.player.removeRoute(id);
this.ac.removeRoute(id);
this.playing.delete(num);
}
}
@ -122,14 +115,12 @@ export class SoundPlayer<
stopAllSounds() {
this.playing.forEach(v => {
const id = `sounds.${v}`;
const route = this.player.getRoute(id);
const route = this.ac.getRoute(id);
if (route) {
route.stop();
this.player.removeRoute(id);
this.ac.removeRoute(id);
}
});
this.playing.clear();
}
}
export const soundPlayer = new SoundPlayer<SoundIds>(audioPlayer);

View File

@ -1,61 +1,22 @@
import EventEmitter from 'eventemitter3';
import { IStreamController, IStreamReader } from '../loader';
import { IAudioInput, IAudioOutput } from './effect';
import { IStreamController, IStreamReader } from '@motajs/loader';
import { logger } from '@motajs/common';
import { AudioType } from './support';
import CodecParser, { CodecFrame, MimeType, OggPage } from 'codec-parser';
import { isNil } from 'lodash-es';
import { IAudioDecodeData, AudioDecoder, checkAudioType } from './decoder';
import {
AudioType,
EAudioSourceEvent,
IAudioBufferSource,
IAudioDecodeData,
IAudioDecoder,
IAudioElementSource,
IAudioInput,
IAudioStreamSource,
IMotaAudioContext
} from './types';
import EventEmitter from 'eventemitter3';
interface AudioSourceEvent {
play: [];
end: [];
}
export abstract class AudioSource
extends EventEmitter<AudioSourceEvent>
implements IAudioOutput
{
/** 音频源的输出节点 */
abstract readonly output: AudioNode;
/** 是否正在播放 */
playing: boolean = false;
/** 获取音频时长 */
abstract get duration(): number;
/** 获取当前音频播放了多长时间 */
abstract get currentTime(): number;
constructor(public readonly ac: AudioContext) {
super();
}
/**
*
*/
abstract play(when?: number): void;
/**
*
* @returns
*/
abstract stop(): number;
/**
*
* @param target
*/
abstract connect(target: IAudioInput): void;
/**
*
* @param loop
*/
abstract setLoop(loop: boolean): void;
}
const mimeTypeMap: Record<AudioType, MimeType> = {
const mimeTypeMap: Record<AudioType, MimeType | 'unknown'> = {
[AudioType.Unknown]: 'unknown',
[AudioType.Aac]: 'audio/aac',
[AudioType.Flac]: 'audio/flac',
[AudioType.Mp3]: 'audio/mpeg',
@ -68,11 +29,16 @@ function isOggPage(data: any): data is OggPage {
return !isNil(data.isFirstPage);
}
export class AudioStreamSource extends AudioSource implements IStreamReader {
export class AudioStreamSource
extends EventEmitter<EAudioSourceEvent>
implements IAudioStreamSource, IStreamReader
{
readonly ac: AudioContext;
/** 音频源节点 */
output: AudioBufferSourceNode;
/** 音频数据 */
buffer?: AudioBuffer;
buffer: AudioBuffer | null = null;
/** 是否已经完全加载完毕 */
loaded: boolean = false;
@ -80,6 +46,8 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
buffered: number = 0;
/** 已经缓冲的采样点数量 */
bufferedSamples: number = 0;
/** 当前是否正在播放 */
playing: boolean = false;
/** 歌曲时长,加载完毕之前保持为 0 */
duration: number = 0;
/** 当前已经播放了多长时间 */
@ -91,7 +59,7 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
/** 音频的采样率,未成功解析出之前保持为 0 */
sampleRate: number = 0;
private controller?: IStreamController;
private controller: IStreamController | null = null;
private loop: boolean = false;
private target?: IAudioInput;
@ -108,9 +76,9 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
/** 音频类型 */
private audioType: AudioType | '' = '';
/** 音频解码器 */
private decoder?: AudioDecoder;
private decoder: IAudioDecoder | null = null;
/** 音频解析器 */
private parser?: CodecParser;
private parser: CodecParser | null = null;
/** 每多长时间组成一个缓存 Float32Array */
private bufferChunkSize: number = 10;
/** 缓存音频数据,每 bufferChunkSize 秒钟组成一个 Float32Array用于流式解码 */
@ -118,9 +86,10 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
private errored: boolean = false;
constructor(context: AudioContext) {
super(context);
this.output = context.createBufferSource();
constructor(readonly motaAC: IMotaAudioContext) {
super();
this.ac = motaAC.ac;
this.output = motaAC.ac.createBufferSource();
}
/**
@ -132,6 +101,24 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
this.bufferChunkSize = size;
}
free(): void {
this.stop();
this.audioData = [];
this.decoder?.destroy();
this.decoder = null;
this.parser = null;
this.audioType = '';
this.headerRecieved = false;
this.errored = false;
this.duration = 0;
this.buffered = 0;
this.bufferedSamples = 0;
this.loaded = false;
this.sampleRate = 0;
this.buffer = null;
this.output.buffer = null;
}
piped(controller: IStreamController): void {
this.controller = controller;
}
@ -141,8 +128,9 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
if (!this.headerRecieved) {
// 检查头文件获取音频类型仅检查前256个字节
const toCheck = data.slice(0, 256);
this.audioType = checkAudioType(data);
if (!this.audioType) {
const type = this.motaAC.getAudioTypeFromData(data);
this.audioType = type;
if (type === AudioType.Unknown) {
logger.error(
25,
[...toCheck]
@ -152,22 +140,23 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
);
return;
}
// 创建解码器
const Decoder = AudioDecoder.decoderMap.get(this.audioType);
if (!Decoder) {
const decoder = this.motaAC.createDecoder(type);
if (!decoder) {
this.errored = true;
logger.error(24, this.audioType);
return Promise.reject(
`Cannot decode stream source type of '${this.audioType}', since there is no registered decoder for that type.`
);
}
this.decoder = new Decoder();
this.decoder = decoder;
// 创建数据解析器
const mime = mimeTypeMap[this.audioType];
const parser = new CodecParser(mime);
this.parser = parser;
await this.decoder.create();
this.headerRecieved = true;
if (mime !== 'unknown') {
const parser = new CodecParser(mime);
this.parser = parser;
await decoder.create();
this.headerRecieved = true;
}
}
const decoder = this.decoder;
@ -209,7 +198,7 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
*/
private async decodeData(
data: Uint8Array,
decoder: AudioDecoder,
decoder: IAudioDecoder,
parser: CodecParser
) {
// 解析音频数据
@ -230,7 +219,7 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
/**
*
*/
private async decodeFlushData(decoder: AudioDecoder, parser: CodecParser) {
private async decodeFlushData(decoder: IAudioDecoder, parser: CodecParser) {
const audioData = await decoder.flush();
if (!audioData) return;
// @ts-expect-error 库类型声明错误
@ -348,7 +337,7 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
}
async start() {
delete this.buffer;
this.buffer = null;
this.headerRecieved = false;
this.audioType = '';
this.errored = false;
@ -365,13 +354,14 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
end(done: boolean, reason?: string): void {
if (done && this.buffer) {
this.loaded = true;
delete this.controller;
this.controller = null;
this.mergeBuffers();
this.duration = this.buffered;
this.audioData = [];
this.decoder?.destroy();
delete this.decoder;
delete this.parser;
this.decoder = null;
this.parser = null;
this.emit('load');
} else {
logger.warn(44, reason ?? '');
}
@ -381,14 +371,14 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
if (!this.buffer) return;
this.lastStartTime = this.ac.currentTime;
if (this.playing) this.output.stop();
this.emit('play');
this.createSourceNode(this.buffer);
this.output.start(0, when);
this.playing = true;
this.emit('play');
this.output.addEventListener('ended', () => {
this.playing = false;
this.emit('end');
if (this.loop && !this.output.loop) this.play(0);
this.emit('end');
});
}
@ -428,9 +418,16 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
}
}
export class AudioElementSource extends AudioSource {
export class AudioElementSource
extends EventEmitter<EAudioSourceEvent>
implements IAudioElementSource
{
readonly ac: AudioContext;
output: MediaElementAudioSourceNode;
/** 当前是否正在播放 */
playing: boolean = false;
/** audio 元素 */
readonly audio: HTMLAudioElement;
@ -441,11 +438,12 @@ export class AudioElementSource extends AudioSource {
return this.audio.currentTime;
}
constructor(context: AudioContext) {
super(context);
constructor(readonly motaAC: IMotaAudioContext) {
super();
this.ac = motaAC.ac;
const audio = new Audio();
audio.preload = 'none';
this.output = context.createMediaElementSource(audio);
this.output = motaAC.ac.createMediaElementSource(audio);
this.audio = audio;
audio.addEventListener('play', () => {
this.playing = true;
@ -455,6 +453,11 @@ export class AudioElementSource extends AudioSource {
this.playing = false;
this.emit('end');
});
audio.addEventListener('load', () => {
if (audio.src.length > 0) {
this.emit('load');
}
});
}
/**
@ -465,6 +468,12 @@ export class AudioElementSource extends AudioSource {
this.audio.src = url;
}
free(): void {
this.stop();
this.audio.src = '';
this.audio.load();
}
play(when: number = 0): void {
if (this.playing) return;
this.audio.currentTime = when;
@ -474,7 +483,6 @@ export class AudioElementSource extends AudioSource {
stop(): number {
this.audio.pause();
this.playing = false;
this.emit('end');
return this.audio.currentTime;
}
@ -487,14 +495,21 @@ export class AudioElementSource extends AudioSource {
}
}
export class AudioBufferSource extends AudioSource {
export class AudioBufferSource
extends EventEmitter<EAudioSourceEvent>
implements IAudioBufferSource
{
readonly ac: AudioContext;
output: AudioBufferSourceNode;
/** 音频数据 */
buffer?: AudioBuffer;
buffer: AudioBuffer | null = null;
/** 是否循环 */
private loop: boolean = false;
/** 当前是否正在播放 */
playing: boolean = false;
duration: number = 0;
get currentTime(): number {
return this.ac.currentTime - this.lastStartTime + this.lastStartWhen;
@ -506,9 +521,10 @@ export class AudioBufferSource extends AudioSource {
private lastStartTime: number = 0;
private target?: IAudioInput;
constructor(context: AudioContext) {
super(context);
this.output = context.createBufferSource();
constructor(readonly motaAC: IMotaAudioContext) {
super();
this.ac = motaAC.ac;
this.output = motaAC.ac.createBufferSource();
}
/**
@ -522,19 +538,26 @@ export class AudioBufferSource extends AudioSource {
this.buffer = buffer;
}
this.duration = this.buffer.duration;
this.emit('load');
}
free(): void {
this.stop();
this.output.buffer = null;
this.buffer = null;
}
play(when?: number): void {
if (this.playing || !this.buffer) return;
this.playing = true;
this.lastStartTime = this.ac.currentTime;
this.emit('play');
this.createSourceNode(this.buffer);
this.output.start(0, when);
this.emit('play');
this.output.addEventListener('ended', () => {
this.playing = false;
this.emit('end');
if (this.loop && !this.output.loop) this.play(0);
this.emit('end');
});
}

View File

@ -1,16 +1,9 @@
import { AudioType } from './types';
const audio = new Audio();
const supportMap = new Map<string, boolean>();
export const enum AudioType {
Mp3 = 'audio/mpeg',
Wav = 'audio/wav; codecs="1"',
Flac = 'audio/flac',
Opus = 'audio/ogg; codecs="opus"',
Ogg = 'audio/ogg; codecs="vorbis"',
Aac = 'audio/aac'
}
/**
*
* @param type

792
packages/audio/src/types.ts Normal file
View File

@ -0,0 +1,792 @@
import EventEmitter from 'eventemitter3';
import { IStreamReader } from '@motajs/loader';
export interface IAudioInput {
/** 输入节点 */
readonly input: AudioNode;
}
export interface IAudioOutput {
/** 输出节点 */
readonly output: AudioNode;
}
export const enum AudioType {
Unknown = 'unknown',
Mp3 = 'audio/mpeg',
Wav = 'audio/wav; codecs="1"',
Flac = 'audio/flac',
Opus = 'audio/ogg; codecs="opus"',
Ogg = 'audio/ogg; codecs="vorbis"',
Aac = 'audio/aac'
}
//#region 音频源
export interface EAudioSourceEvent {
play: [];
end: [];
load: [];
}
export interface IAudioSource
extends IAudioOutput, EventEmitter<EAudioSourceEvent> {
/** 所属的 {@link IMotaAudioContext} 上下文 */
readonly motaAC: IMotaAudioContext;
/** 音频播放上下文 */
readonly ac: AudioContext;
/** 音频源对应的的音频节点 */
readonly output: AudioNode;
/** 当前是否正在播放 */
readonly playing: boolean;
/** 音频总时长 */
readonly duration: number;
/** 当前播放时长 */
readonly currentTime: number;
/**
*
*/
play(when?: number): void;
/**
*
* @returns
*/
stop(): number;
/**
*
* @param target
*/
connect(target: IAudioInput): void;
/**
*
* @param loop
*/
setLoop(loop: boolean): void;
/**
*
*/
free(): void;
}
export interface IAudioStreamSource extends IAudioSource, IStreamReader {
/** 流式加载的输出节点 */
readonly output: AudioBufferSourceNode;
/** 音频缓冲区 */
readonly buffer: AudioBuffer | null;
/** 当前是否已经加载完毕 */
readonly loaded: boolean;
/** 已缓冲时长 */
readonly buffered: number;
/** 已缓冲的采样点数量 */
readonly bufferedSamples: number;
/** 音频采样率 */
readonly sampleRate: number;
/**
* 10
* @param size
*/
setChunkSize(size: number): void;
}
export interface IAudioElementSource extends IAudioSource {
/** `audio` 元素音频源节点 */
readonly output: MediaElementAudioSourceNode;
/** `audio` 元素对象 */
readonly audio: HTMLAudioElement;
/**
*
* @param url
*/
setSource(url: string): void;
}
export interface IAudioBufferSource extends IAudioSource {
/** 音频源节点 */
readonly output: AudioBufferSourceNode;
/** 音频数据缓冲区 */
readonly buffer: AudioBuffer | null;
/**
*
* @param buffer ArrayBuffer AudioBuffer
*/
setBuffer(buffer: ArrayBuffer | AudioBuffer): Promise<void>;
}
//#endregion
//#region 音频路由
export const enum AudioStatus {
Playing,
Pausing,
Paused,
Stoping,
Stoped
}
export interface EAudioRouteEvent {
updateEffect: [];
play: [];
stop: [];
pause: [];
resume: [];
start: [route: IAudioRoute];
end: [time: number, route: IAudioRoute];
}
export interface IAudioRoute
extends IAudioOutput, EventEmitter<EAudioRouteEvent> {
/** 音频路由图 */
readonly effectRoute: readonly IAudioEffect[];
/** 结束时长,当音频暂停或停止时,会经过这么长时间之后才真正终止播放,期间可以做音频淡入淡出等效果 */
readonly endTime: number;
/** 当前音频播放状态 */
readonly status: AudioStatus;
/** 音频总时长 */
readonly duration: number;
/** 当前音频播放时长 */
readonly currentTime: number;
/** 音频路由的音频源 */
readonly source: IAudioSource;
/**
*
* @param time
*/
setEndTime(time: number): void;
/**
*
* @param when
*/
play(when?: number): Promise<void>;
/**
*
*/
pause(): Promise<void>;
/**
*
*/
resume(): void;
/**
*
*/
stop(): Promise<void>;
/**
*
* @param effect
* @param index 0
*/
addEffect(effect: IAudioEffect | IAudioEffect[], index?: number): void;
/**
*
* @param effect
*/
removeEffect(effect: IAudioEffect): void;
/**
*
*/
destroy(): void;
}
//#endregion
//#region 音频效果
export interface IAudioEffect extends IAudioInput, IAudioOutput {
/** 所属的 {@link IMotaAudioContext} 上下文 */
readonly motaAC: IMotaAudioContext;
/** 音频播放上下文 */
readonly ac: AudioContext;
/**
*
*/
end(): void;
/**
*
*/
start(): void;
/**
*
* @param target
* @param output
* @param input
*/
connect(target: IAudioInput, output?: number, input?: number): void;
/**
*
* @param target
* @param output
* @param input
*/
disconnect(target?: IAudioInput, output?: number, input?: number): void;
}
export interface IAudioStereoEffect extends IAudioEffect {
/**
* x正方形水平向右y正方形垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setOrientation(x: number, y: number, z: number): void;
/**
* x正方形水平向右y正方形垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setPosition(x: number, y: number, z: number): void;
}
export interface IAudioVolumeEffect extends IAudioEffect {
/** 输入增益节点 */
readonly input: GainNode;
/** 输出增益节点 */
readonly output: GainNode;
/**
*
* @param volume
*/
setVolume(volume: number): void;
/**
*
*/
getVolume(): number;
}
export interface IAudioChannelVolumeEffect extends IAudioEffect {
/**
*
* @param channel 0-5
* @param volume
*/
setVolume(channel: number, volume: number): void;
/**
* 0-5
* @param channel
*/
getVolume(channel: number): number;
}
export interface IAudioDelayEffect extends IAudioEffect {
/**
*
* @param delay
*/
setDelay(delay: number): void;
/**
*
*/
getDelay(): number;
}
export interface IAudioEchoEffect extends IAudioEffect {
/**
*
* @param gain 0-110.500
*/
setFeedbackGain(gain: number): void;
/**
*
* @param delay 0.01-Infinity0.010.01
*/
setEchoDelay(delay: number): void;
/**
*
*/
getFeedbackGain(): number;
/**
*
*/
getEchoDelay(): number;
}
//#endregion
//#region 音频上下文
export interface IMotaAudioContextCreator {
/**
*
* @param Source
*/
createSource<T extends IAudioSource>(
Source: new (ac: IMotaAudioContext) => T
): T;
/**
* opus ogg
*/
createStreamSource(): IAudioStreamSource;
/**
* audio
*/
createElementSource(): IAudioElementSource;
/**
* AudioBuffer
*/
createBufferSource(): IAudioBufferSource;
/**
*
* @param Effect
*/
createEffect<T extends IAudioEffect>(
Effect: new (ac: IMotaAudioContext) => T
): T;
/**
*
* ```txt
* |----------|
* Input ----> | GainNode | ----> Output
* |----------|
* ```
*/
createVolumeEffect(): IAudioVolumeEffect;
/**
*
* ```txt
* |------------|
* Input ----> | PannerNode | ----> Output
* |------------|
* ```
*/
createStereoEffect(): IAudioStereoEffect;
/**
*
* ```txt
* |----------|
* -> | GainNode | \
* |--------------| / |----------| -> |------------|
* Input ----> | SplitterNode | ...... | MergerNode | ----> Output
* |--------------| \ |----------| -> |------------|
* -> | GainNode | /
* |----------|
* ```
*/
createChannelVolumeEffect(): IAudioChannelVolumeEffect;
/**
*
* ```txt
* |-----------|
* Input ----> | DelayNode | ----> Output
* |-----------|
* ```
*/
createDelayEffect(): IAudioDelayEffect;
/**
*
* ```txt
* |----------|
* Input ----> | GainNode | ----> Output
* ^ |----------| |
* | |
* | |------------|
* |-- | Delay Node | <--
* |------------|
* ```
*/
createEchoEffect(): IAudioEchoEffect;
}
export type AudioDecoderCreateFunc = (
context: IMotaAudioContext
) => IAudioDecoder;
export interface IMotaAudioContext extends IMotaAudioContextCreator {
/** 音频上下文 */
readonly ac: AudioContext;
/** 音频播放路由 */
readonly audioRoutes: Map<string, IAudioRoute>;
/** 音频增益节点 */
readonly gain: GainNode;
/**
* `gain = 10 ** (dB / 20), where minDB = -60`
* @param volume
*/
setVolume(volume: number): void;
/**
* `gain = 10 ** (dB / 20), where minDB = -60`
*/
getVolume(): number;
/**
*
*/
getDestination(): AudioNode;
/**
*
*/
createSoundPlayer<T extends string>(): ISoundPlayer<T>;
/**
*
* @param source
*/
createRoute(source: IAudioSource): IAudioRoute;
/**
*
* @param id
* @param route
*/
addRoute(id: string, route: IAudioRoute): void;
/**
*
* @param id
*/
getRoute(id: string): IAudioRoute | null;
/**
*
* @param id
*/
removeRoute(id: string): void;
/**
*
* @param id
* @param when
*/
play(id: string, when?: number): void;
/**
*
* @param id
* @returns
*/
pause(id: string): Promise<void>;
/**
*
* @param id
* @returns
*/
stop(id: string): Promise<void>;
/**
*
* @param id
*/
resume(id: string): void;
/**
* x正方向水平向右y正方向垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setListenerPosition(x: number, y: number, z: number): void;
/**
* x正方向水平向右y正方向垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setListenerOrientation(x: number, y: number, z: number): void;
/**
* x正方向水平向右y正方向垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setListenerUp(x: number, y: number, z: number): void;
/**
*
* @param type
*/
isAudioVanillaSupport(type: AudioType): boolean;
/**
*
* @param type
* @param decoder
*/
registerDecoder(type: AudioType, decoder: AudioDecoderCreateFunc): void;
/**
*
* @param type
*/
createDecoder(type: AudioType): IAudioDecoder | null;
/**
*
* @param data
*/
getAudioTypeFromData(data: Uint8Array): AudioType;
/**
* 使
* @param data
*/
decodeAudio(data: Uint8Array): Promise<IAudioDecodeData | null>;
/**
* `AudioBuffer`
* @param data
*/
toAudioBuffer(data: IAudioDecodeData): AudioBuffer;
/**
* `AudioBuffer`
* @param data
*/
decodeToAudioBuffer(data: Uint8Array): Promise<AudioBuffer | null>;
}
//#endregion
//#region 音效播放器
export type AudioLocationArray = [number, number, number];
export interface ISoundPlayer<T extends string> {
/** 音频上下文 */
readonly ac: IMotaAudioContext;
/** 当前是否启用此音效播放器 */
readonly enabled: boolean;
/**
*
* @param enabled
*/
setEnabled(enabled: boolean): void;
/**
*
* @param volume
*/
setVolume(volume: number): void;
/**
*
*/
getVolume(): void;
/**
*
* @param id
* @param data Uint8Array数据
*/
add(id: T, data: Uint8Array): Promise<void>;
/**
*
* @param id
* @param position [0, 0, 0]x轴指向水平向右y轴指向水平向上z轴指向竖直向上
* @param orientation [0, 1, 0]
*/
play(
id: T,
position?: AudioLocationArray,
orientation?: AudioLocationArray
): number;
/**
*
* @param num id
*/
stop(num: number): void;
/**
*
*/
stopAllSounds(): void;
}
//#endregion
//#region bgm 播放器
export interface IBGMPlayer<T extends string> {
/** 当前是否启用此播放器 */
readonly enabled: boolean;
/** 当前正在播放的音乐 */
readonly playingBGM?: T;
/** 当前是否有音乐正在播放 */
readonly playing: boolean;
/** 最大缓存容量,默认 256MB */
readonly maxCacheSize: number;
/**
* 使
* 128MB 256MB
*
* 48000 33MB
* @param size MB 512M 32M
*/
setMaxCacheSize(size: number): void;
/**
*
* @param time
*/
setTransitionTime(time: number): void;
/**
*
*/
blockChange(): void;
/**
*
*/
unblockChange(): void;
/**
*
* @param volume
*/
setVolume(volume: number): void;
/**
*
*/
getVolume(): number;
/**
*
* @param enabled
*/
setEnabled(enabled: boolean): void;
/**
* bgm AudioRoute
* @param id
*/
get(id: T): IAudioRoute | null;
/**
* bgm
* @param id bgm
* @param url bgm
*/
addBGMFromURL(id: T, url: string): void;
/**
* bgm
* @param id bgm
*/
removeBgm(id: T): void;
/**
* bgm
* @param id bgm
* @param when
*/
play(id: T, when?: number): void;
/**
* bgm
*/
resume(): void;
/**
* bgm
*/
pause(): void;
/**
* bgm
*/
stop(): void;
/**
*
*/
destroy(): void;
}
//#endregion
//#region 解码器
export interface IAudioDecodeError {
/** 错误信息 */
readonly message: string;
}
export interface IAudioDecodeData {
/** 每个声道的音频信息 */
readonly channelData: Float32Array<ArrayBuffer>[];
/** 已经被解码的 PCM 采样数 */
readonly samplesDecoded: number;
/** 音频采样率 */
readonly sampleRate: number;
/** 解码错误信息 */
readonly errors: IAudioDecodeError[];
}
export interface IAudioDecoder {
/**
*
*/
create(): Promise<void>;
/**
*
*/
destroy(): Promise<void>;
/**
*
* @param data
*/
decode(data: Uint8Array): Promise<IAudioDecodeData | null>;
/**
*
* @param data
*/
decodeAll(data: Uint8Array): Promise<IAudioDecodeData | null>;
/**
* 使
*/
flush(): Promise<IAudioDecodeData | null>;
}
//#endregion

View File

@ -24,7 +24,7 @@
"22": "Incorrect props for custom tag. Please ensure you have delivered 'item' prop and other required props.",
"23": "Cannot get reader when fetching '$1'.",
"24": "Cannot decode source type of '$1', since there is no registered decoder for that type.",
"25": "Unknown audio type. Header: '$1'",
"25": "Cannot decode audio binary, since the audio format may not be supported. File header: '$1'",
"26": "Uncaught error when fetching stream data from '$1'. Error info: $2.",
"27": "No autotile connection data, please ensure you have created autotile connection map.",
"28": "Cannot compile map render shader.",

View File

@ -530,7 +530,7 @@ export function loadDefaultResource() {
const res = LoadTask.add('byte', `byte/project/sounds/${v}`);
Mota.r(() => {
res.once('load', res => {
const { soundPlayer } = Mota.require('@user/client-modules');
const { soundPlayer } = Mota.require('@user/client-base');
soundPlayer.add(v, res.resource!);
});
});
@ -696,9 +696,8 @@ export async function loadCompressedResource() {
new FontFace(name.slice(0, -4), font)
);
} else if (usage === 'sound' && main.mode === 'play') {
const { soundPlayer } = Mota.require(
'@user/client-modules'
);
const { soundPlayer } =
Mota.require('@user/client-base');
soundPlayer.add(name as SoundIds, value as Uint8Array);
} else if (usage === 'animate') {
const ani = value as string;

View File

@ -110,12 +110,12 @@ function handleAudioSetting<T extends number | boolean>(
n: T,
_o: T
) {
const { bgmController, soundPlayer } = Mota.require('@user/client-modules');
const { bgmPlayer, soundPlayer } = Mota.require('@user/client-base');
if (key === 'bgmEnabled') {
bgmController.setEnabled(n as boolean);
bgmPlayer.setEnabled(n as boolean);
core.checkBgm();
} else if (key === 'bgmVolume') {
bgmController.setVolume((n as number) / 100);
bgmPlayer.setVolume((n as number) / 100);
} else if (key === 'soundEnabled') {
soundPlayer.setEnabled(n as boolean);
} else if (key === 'soundVolume') {

View File

@ -0,0 +1,6 @@
{
"name": "@motajs/loader",
"dependencies": {
"@motajs/common": "workspace:*"
}
}