mirror of
https://github.com/unanmed/HumanBreak.git
synced 2025-04-11 15:47:06 +08:00
refactor: 音效
This commit is contained in:
parent
e6ac3197e1
commit
6153ca8b51
@ -2988,7 +2988,6 @@ control.prototype.screenFlash = function (
|
||||
});
|
||||
};
|
||||
|
||||
// todo: deprecate playBgm, pauseBgm, resumeBgm, triggerBgm
|
||||
////// 播放背景音乐 //////
|
||||
control.prototype.playBgm = function (bgm, startTime) {
|
||||
// see src/module/fallback/audio.ts
|
||||
@ -3007,32 +3006,21 @@ control.prototype.resumeBgm = function (resumeTime) {
|
||||
////// 更改背景音乐的播放 //////
|
||||
control.prototype.triggerBgm = function () {
|
||||
// see src/module/fallback/audio.ts
|
||||
return;
|
||||
if (main.mode !== 'play') return;
|
||||
const bgm = Mota.require('var', 'bgm');
|
||||
bgm.disable = !bgm.disable;
|
||||
|
||||
if (!bgm.disable) this.resumeBgm();
|
||||
else this.pauseBgm();
|
||||
};
|
||||
|
||||
// todo: deprecate playSound, stopSound, getPlayingSounds
|
||||
////// 播放音频 //////
|
||||
control.prototype.playSound = function (sound, pitch, callback) {
|
||||
sound = core.getMappedName(sound);
|
||||
Mota.require('var', 'sound').play(sound, callback);
|
||||
// see src/module/fallback/audio.ts
|
||||
};
|
||||
|
||||
////// 停止所有音频 //////
|
||||
control.prototype.stopSound = function (id) {
|
||||
if (typeof id === 'number') Mota.require('var', 'sound').stop(id);
|
||||
else Mota.require('var', 'sound').stopAll();
|
||||
// see src/module/fallback/audio.ts
|
||||
};
|
||||
|
||||
////// 获得当前正在播放的所有(指定)音效的id列表 //////
|
||||
control.prototype.getPlayingSounds = function (name) {
|
||||
name = core.getMappedName(name);
|
||||
return Mota.require('var', 'sound').getPlaying(name);
|
||||
// see src/module/fallback/audio.ts
|
||||
};
|
||||
|
||||
////// 检查bgm状态 //////
|
||||
|
@ -11,7 +11,8 @@ type ResourceType =
|
||||
| 'material'
|
||||
| 'audio'
|
||||
| 'json'
|
||||
| 'zip';
|
||||
| 'zip'
|
||||
| 'byte';
|
||||
interface CompressedLoadListItem {
|
||||
type: ResourceType;
|
||||
name: string;
|
||||
@ -147,7 +148,7 @@ export async function splitResource() {
|
||||
for (const sound of data.main.sounds) {
|
||||
const path = `./dist/project/sounds/${sound}`;
|
||||
const stat = await fs.stat(path);
|
||||
await pushItem('audio', sound, 'sound', stat, await fs.readFile(path));
|
||||
await pushItem('byte', sound, 'sound', stat, await fs.readFile(path));
|
||||
}
|
||||
|
||||
// fonts
|
||||
|
@ -1,93 +0,0 @@
|
||||
import { EventEmitter } from '../common/eventEmitter';
|
||||
|
||||
const ac = new AudioContext();
|
||||
|
||||
interface BaseNode {
|
||||
node: AudioNode;
|
||||
channel?: number;
|
||||
}
|
||||
|
||||
interface AudioPlayerEvent {
|
||||
play: (node: AudioBufferSourceNode) => void;
|
||||
update: (audio: AudioBuffer) => void;
|
||||
end: (node: AudioBufferSourceNode) => void;
|
||||
}
|
||||
|
||||
export type AudioParamOf<T> = Record<SelectKey<T, AudioParam>, number>;
|
||||
|
||||
export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
|
||||
static ac: AudioContext = ac;
|
||||
static index: number = 0;
|
||||
|
||||
/** 音频的索引,这样的话可以复用来提高性能表现 */
|
||||
index: number = AudioPlayer.index++;
|
||||
|
||||
data: ArrayBuffer;
|
||||
buffer: AudioBuffer | null = null;
|
||||
source?: AudioBufferSourceNode;
|
||||
|
||||
baseNode: BaseNode[] = [];
|
||||
|
||||
constructor(data: ArrayBuffer) {
|
||||
super();
|
||||
this.data = data;
|
||||
this.update(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* 更新音频数据
|
||||
* @param data 音频的ArrayBuffer数据
|
||||
*/
|
||||
async update(data: ArrayBuffer) {
|
||||
this.data = data;
|
||||
this.buffer = await ac.decodeAudioData(data);
|
||||
|
||||
this.emit('update', this.buffer);
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取音频源数据节点
|
||||
*/
|
||||
getSource() {
|
||||
this.source ??= ac.createBufferSource();
|
||||
this.source.buffer = this.buffer;
|
||||
|
||||
return this.source;
|
||||
}
|
||||
|
||||
/**
|
||||
* 播放音频
|
||||
*/
|
||||
play(when?: number, offset?: number, duration?: number) {
|
||||
this.ready();
|
||||
if (!this.source) return;
|
||||
|
||||
this.source.start(when, offset, duration);
|
||||
|
||||
const source = this.source;
|
||||
this.source?.addEventListener('ended', ev => {
|
||||
this.emit('end', source);
|
||||
});
|
||||
this.emit('play', source);
|
||||
|
||||
delete this.source;
|
||||
return source;
|
||||
}
|
||||
|
||||
/**
|
||||
* 准备音频资源连接
|
||||
*/
|
||||
ready() {
|
||||
const source = this.getSource();
|
||||
this.baseNode.forEach(v => {
|
||||
source.connect(v.node, 0, v.channel);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取音频输出destination
|
||||
*/
|
||||
getDestination() {
|
||||
return ac.destination;
|
||||
}
|
||||
}
|
@ -1,222 +0,0 @@
|
||||
import { has } from '@/plugin/utils';
|
||||
import { AudioParamOf, AudioPlayer } from './audio';
|
||||
import { ResourceController } from '../loader/controller';
|
||||
|
||||
// todo: 立体声,可设置音源位置
|
||||
|
||||
type Panner = AudioParamOf<PannerNode>;
|
||||
type Listener = AudioParamOf<AudioListener>;
|
||||
|
||||
export class SoundEffect extends AudioPlayer {
|
||||
static playIndex = 0;
|
||||
|
||||
/** 音量 */
|
||||
static volume: number = 1;
|
||||
/** 是否关闭音效 */
|
||||
static disable: boolean = false;
|
||||
|
||||
private playing: Record<string, AudioBufferSourceNode> = {};
|
||||
private _stopingAll: boolean = false;
|
||||
private playMap: Map<AudioBufferSourceNode, number> = new Map();
|
||||
|
||||
private _stereo: boolean = false;
|
||||
|
||||
gain: GainNode = AudioPlayer.ac.createGain();
|
||||
panner: PannerNode | null = null;
|
||||
|
||||
set volumn(value: number) {
|
||||
this.gain.gain.value = value * SoundEffect.volume;
|
||||
}
|
||||
get volumn(): number {
|
||||
return this.gain.gain.value / SoundEffect.volume;
|
||||
}
|
||||
|
||||
set stereo(value: boolean) {
|
||||
if (value !== this._stereo) this.initAudio(value);
|
||||
this._stereo = value;
|
||||
}
|
||||
get stereo(): boolean {
|
||||
return this._stereo;
|
||||
}
|
||||
|
||||
constructor(data: ArrayBuffer, stereo: boolean = true) {
|
||||
super(data);
|
||||
|
||||
this.on('end', node => {
|
||||
if (this._stopingAll) return;
|
||||
const index = this.playMap.get(node);
|
||||
if (!index) return;
|
||||
delete this.playing[index];
|
||||
this.playMap.delete(node);
|
||||
});
|
||||
this.on('update', () => {
|
||||
this.initAudio(this._stereo);
|
||||
});
|
||||
|
||||
this._stereo = stereo;
|
||||
this.initAudio(stereo);
|
||||
}
|
||||
|
||||
/**
|
||||
* 设置音频路由线路
|
||||
* ```txt
|
||||
* 不启用立体声:source -> gain -> destination
|
||||
* 启用立体声:source -> panner -> gain -> destination
|
||||
* ```
|
||||
* @param stereo 是否启用立体声
|
||||
*/
|
||||
protected initAudio(stereo: boolean = true) {
|
||||
const channel = this.buffer?.numberOfChannels;
|
||||
const ac = AudioPlayer.ac;
|
||||
if (!channel) return;
|
||||
this.panner = null;
|
||||
if (stereo) {
|
||||
this.panner = ac.createPanner();
|
||||
this.panner.connect(this.gain);
|
||||
this.baseNode = [{ node: this.panner }];
|
||||
} else {
|
||||
this.baseNode = [{ node: this.gain }];
|
||||
}
|
||||
this.gain.connect(this.getDestination());
|
||||
}
|
||||
|
||||
/**
|
||||
* 播放音频
|
||||
* @returns 音频的唯一id
|
||||
*/
|
||||
playSE() {
|
||||
if (SoundEffect.disable || this.volumn === 0) return;
|
||||
const node = this.play();
|
||||
if (!node) return;
|
||||
const index = SoundEffect.playIndex++;
|
||||
this.playing[index] = node;
|
||||
this.playMap.set(node, index);
|
||||
|
||||
return index;
|
||||
}
|
||||
|
||||
/**
|
||||
* 停止所有音频
|
||||
*/
|
||||
stopAll() {
|
||||
this._stopingAll = true;
|
||||
Object.values(this.playing).forEach(v => {
|
||||
v.stop();
|
||||
});
|
||||
this.playing = {};
|
||||
this.playMap.clear();
|
||||
this._stopingAll = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据唯一id停止音频
|
||||
* @param index 音频唯一id
|
||||
*/
|
||||
stopByIndex(index: number) {
|
||||
this.playing[index]?.stop();
|
||||
delete this.playing[index];
|
||||
}
|
||||
|
||||
/**
|
||||
* 设置立体声信息
|
||||
* @param source 立体声声源位置与朝向
|
||||
* @param listener 听者的位置、头顶方向、面朝方向
|
||||
*/
|
||||
setPanner(source?: Partial<Panner>, listener?: Partial<Listener>) {
|
||||
if (!this.panner) return;
|
||||
if (source) {
|
||||
for (const [key, value] of Object.entries(source)) {
|
||||
this.panner[key as keyof Panner].value = value;
|
||||
}
|
||||
}
|
||||
if (listener) {
|
||||
const l = AudioPlayer.ac.listener;
|
||||
for (const [key, value] of Object.entries(listener)) {
|
||||
l[key as keyof Listener].value = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class SoundController extends ResourceController<
|
||||
ArrayBuffer,
|
||||
SoundEffect
|
||||
> {
|
||||
private seIndex: Record<string, SoundEffect> = {};
|
||||
|
||||
/**
|
||||
* 添加一个新的音频
|
||||
* @param uri 音频的uri,由于音频也是一种资源,因此格式为`sounds.xxx`
|
||||
* @param data 音频的ArrayBuffer信息,会被解析为AudioBuffer
|
||||
*/
|
||||
add(uri: string, data: ArrayBuffer) {
|
||||
const se = new SoundEffect(data, true);
|
||||
if (this.list[uri]) {
|
||||
console.warn(`Repeated sound effect: '${uri}'.`);
|
||||
}
|
||||
return (this.list[uri] = se);
|
||||
}
|
||||
|
||||
/**
|
||||
* 播放音频
|
||||
* @param sound 音效的名称
|
||||
* @returns 本次播放的音效的唯一标识符,如果音效不存在返回-1
|
||||
*/
|
||||
play(sound: SoundIds, end?: () => void): number {
|
||||
const se = this.get(sound);
|
||||
if (!se) return -1;
|
||||
const index = se.playSE();
|
||||
if (!has(index)) return -1;
|
||||
this.seIndex[index] = se;
|
||||
if (end) se.once('end', end);
|
||||
se.volumn = core.musicStatus.userVolume;
|
||||
|
||||
return index;
|
||||
}
|
||||
|
||||
/**
|
||||
* 停止一个音效的播放
|
||||
* @param id 音效的唯一标识符
|
||||
*/
|
||||
stop(id: number) {
|
||||
const se = this.seIndex[id];
|
||||
se.stopByIndex(id);
|
||||
}
|
||||
|
||||
/**
|
||||
* 停止一个名称的所有音效的播放
|
||||
* @param id 音效名称
|
||||
*/
|
||||
stopById(id: SoundIds) {
|
||||
const se = this.get(id);
|
||||
se.stopAll();
|
||||
}
|
||||
|
||||
/**
|
||||
* 停止所有音效的播放
|
||||
*/
|
||||
stopAll() {
|
||||
Object.values(this.list).forEach(v => v.stopAll());
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取一个音效实例
|
||||
* @param sound 音效名称
|
||||
*/
|
||||
get(sound: SoundIds) {
|
||||
return this.list[`sounds.${sound}`];
|
||||
}
|
||||
|
||||
getPlaying(sound?: SoundIds) {
|
||||
if (sound) {
|
||||
const se = this.get(sound);
|
||||
return Object.keys(this.seIndex).filter(
|
||||
v => this.seIndex[v] === se
|
||||
);
|
||||
} else {
|
||||
return Object.keys(this.seIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const sound = new SoundController();
|
@ -14,6 +14,7 @@ interface ResourceType {
|
||||
audio: HTMLAudioElement;
|
||||
json: any;
|
||||
zip: JSZip;
|
||||
byte: Uint8Array;
|
||||
}
|
||||
|
||||
interface ResourceMap {
|
||||
@ -24,6 +25,7 @@ interface ResourceMap {
|
||||
audio: AudioResource;
|
||||
json: JSONResource;
|
||||
zip: ZipResource;
|
||||
byte: ByteResource;
|
||||
}
|
||||
|
||||
interface CompressedLoadListItem {
|
||||
@ -40,7 +42,8 @@ const types: Record<keyof ResourceType, JSZip.OutputType> = {
|
||||
material: 'blob',
|
||||
audio: 'arraybuffer',
|
||||
json: 'string',
|
||||
zip: 'arraybuffer'
|
||||
zip: 'arraybuffer',
|
||||
byte: 'uint8array'
|
||||
};
|
||||
|
||||
const base = import.meta.env.DEV ? '/' : '';
|
||||
@ -191,6 +194,27 @@ export class BufferResource extends Resource<ArrayBuffer> {
|
||||
}
|
||||
}
|
||||
|
||||
export class ByteResource extends Resource<Uint8Array> {
|
||||
/**
|
||||
* 创建一个二进制缓冲区资源
|
||||
* @param uri 资源的URI,格式为 byte/file,例如 'byte/myBuffer.mp3'
|
||||
*/
|
||||
constructor(uri: string) {
|
||||
super(uri, 'buffer');
|
||||
}
|
||||
|
||||
async load(_onProgress?: ProgressFn): Promise<Uint8Array> {
|
||||
const response = await fetch(this.resolveURI());
|
||||
const data = await response.bytes();
|
||||
this.resource = data;
|
||||
return data;
|
||||
}
|
||||
|
||||
resolveURI(): string {
|
||||
return toURL(`${base}${findURL(this.uri)}`);
|
||||
}
|
||||
}
|
||||
|
||||
export class JSONResource<T = any> extends Resource<T> {
|
||||
/**
|
||||
* 创建一个JSON对象资源
|
||||
@ -303,7 +327,8 @@ export const resourceTypeMap = {
|
||||
material: MaterialResource,
|
||||
audio: AudioResource,
|
||||
json: JSONResource,
|
||||
zip: ZipResource
|
||||
zip: ZipResource,
|
||||
byte: ByteResource
|
||||
};
|
||||
|
||||
interface LoadEvent<T extends keyof ResourceType> {
|
||||
@ -501,10 +526,11 @@ export function loadDefaultResource() {
|
||||
});
|
||||
// sound
|
||||
data.main.sounds.forEach(v => {
|
||||
const res = LoadTask.add('buffer', `buffer/project/sounds/${v}`);
|
||||
const res = LoadTask.add('byte', `byte/project/sounds/${v}`);
|
||||
Mota.r(() => {
|
||||
res.once('load', res => {
|
||||
Mota.require('var', 'sound').add(`sounds.${v}`, res.resource!);
|
||||
const { soundPlayer } = Mota.require('module', 'Audio');
|
||||
soundPlayer.add(v, res.resource!);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -691,11 +717,8 @@ export async function loadCompressedResource() {
|
||||
new FontFace(name.slice(0, -4), font)
|
||||
);
|
||||
} else if (usage === 'sound') {
|
||||
const sound = value as ArrayBuffer;
|
||||
Mota.require('var', 'sound').add(
|
||||
`sounds.${name}`,
|
||||
sound
|
||||
);
|
||||
const { soundPlayer } = Mota.require('module', 'Audio');
|
||||
soundPlayer.add(v, value);
|
||||
} else if (usage === 'animate') {
|
||||
const ani = value as string;
|
||||
core.material.animates[
|
||||
|
@ -1,4 +1,3 @@
|
||||
import { SoundController, SoundEffect, sound } from './audio/sound';
|
||||
import { Focus, GameUi, UiController } from './main/custom/ui';
|
||||
import { GameStorage } from './main/storage';
|
||||
import './main/init/';
|
||||
@ -14,7 +13,6 @@ import { KeyCode } from '@/plugin/keyCodes';
|
||||
import { status } from '@/plugin/ui/statusBar';
|
||||
import '@/plugin';
|
||||
import './package';
|
||||
import { AudioPlayer } from './audio/audio';
|
||||
import { CustomToolbar } from './main/custom/toolbar';
|
||||
import {
|
||||
Hotkey,
|
||||
@ -78,7 +76,6 @@ import { MotaOffscreenCanvas2D } from './fx/canvas2d';
|
||||
import { TextboxStore } from './render/index';
|
||||
|
||||
// ----- 类注册
|
||||
Mota.register('class', 'AudioPlayer', AudioPlayer);
|
||||
Mota.register('class', 'CustomToolbar', CustomToolbar);
|
||||
Mota.register('class', 'Focus', Focus);
|
||||
Mota.register('class', 'GameStorage', GameStorage);
|
||||
@ -87,8 +84,6 @@ Mota.register('class', 'Hotkey', Hotkey);
|
||||
Mota.register('class', 'Keyboard', Keyboard);
|
||||
Mota.register('class', 'MotaSetting', MotaSetting);
|
||||
Mota.register('class', 'SettingDisplayer', SettingDisplayer);
|
||||
Mota.register('class', 'SoundController', SoundController);
|
||||
Mota.register('class', 'SoundEffect', SoundEffect);
|
||||
Mota.register('class', 'UiController', UiController);
|
||||
Mota.register('class', 'MComponent', MComponent);
|
||||
Mota.register('class', 'ResourceController', ResourceController);
|
||||
@ -104,7 +99,6 @@ Mota.register('fn', 'removeAnimate', removeAnimate);
|
||||
// ----- 变量注册
|
||||
Mota.register('var', 'mainUi', mainUi);
|
||||
Mota.register('var', 'fixedUi', fixedUi);
|
||||
Mota.register('var', 'sound', sound);
|
||||
Mota.register('var', 'gameKey', gameKey);
|
||||
Mota.register('var', 'mainSetting', mainSetting);
|
||||
Mota.register('var', 'KeyCode', KeyCode);
|
||||
|
@ -23,7 +23,7 @@
|
||||
"21": "Incorrect render prop type is delivered. key: '$1', expected type: '$2', delivered type: '$3'",
|
||||
"22": "Incorrect props for custom tag. Please ensure you have delivered 'item' prop and other required props.",
|
||||
"23": "Cannot get reader when fetching '$1'.",
|
||||
"24": "Cannot decode stream source type of '$1', since there is no registered decoder for that type.",
|
||||
"24": "Cannot decode source type of '$1', since there is no registered decoder for that type.",
|
||||
"25": "Unknown audio type. Header: '$1'",
|
||||
"26": "Uncaught error when fetching stream data from '$1'. Error info: $2.",
|
||||
"1101": "Shadow extension needs 'floor-hero' extension as dependency.",
|
||||
@ -82,6 +82,9 @@
|
||||
"48": "Sample rate in stream audio must be constant.",
|
||||
"49": "Repeated patch for '$1', key: '$2'.",
|
||||
"50": "Unknown audio extension name: '$1'",
|
||||
"51": "Cannot decode sound '$1', since audio file may not supported by 2.b.",
|
||||
"52": "Cannot play sound '$1', since there is no added data named it.",
|
||||
"53": "Cannot $1 audio route '$2', since there is not added route named it.",
|
||||
"1001": "Item-detail extension needs 'floor-binder' and 'floor-damage' extension as dependency.",
|
||||
"1101": "Cannot add new effect to point effect instance, for there's no more reserve space for it. Please increase the max count of the instance."
|
||||
}
|
||||
|
@ -11,7 +11,12 @@ interface BgmVolume {
|
||||
transition: Transition;
|
||||
}
|
||||
|
||||
interface BgmControllerEvent {}
|
||||
interface BgmControllerEvent {
|
||||
play: [];
|
||||
pause: [];
|
||||
resume: [];
|
||||
stop: [];
|
||||
}
|
||||
|
||||
export class BgmController<
|
||||
T extends string = BgmIds
|
||||
@ -203,6 +208,7 @@ export class BgmController<
|
||||
if (!this.enabled) return;
|
||||
this.player.play(this.getId(id), when);
|
||||
this.playing = true;
|
||||
this.emit('play');
|
||||
}
|
||||
|
||||
/**
|
||||
@ -214,6 +220,7 @@ export class BgmController<
|
||||
this.player.resume(this.getId(this.playingBgm));
|
||||
}
|
||||
this.playing = true;
|
||||
this.emit('resume');
|
||||
}
|
||||
|
||||
/**
|
||||
@ -225,6 +232,7 @@ export class BgmController<
|
||||
this.player.pause(this.getId(this.playingBgm));
|
||||
}
|
||||
this.playing = false;
|
||||
this.emit('pause');
|
||||
}
|
||||
|
||||
/**
|
||||
@ -236,6 +244,7 @@ export class BgmController<
|
||||
this.player.stop(this.getId(this.playingBgm));
|
||||
}
|
||||
this.playing = false;
|
||||
this.emit('stop');
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,155 @@
|
||||
import { logger } from '@/core/common/logger';
|
||||
import { OggVorbisDecoderWebWorker } from '@wasm-audio-decoders/ogg-vorbis';
|
||||
import { IAudioDecodeData, IAudioDecoder } from './source';
|
||||
import { OggOpusDecoderWebWorker } from 'ogg-opus-decoder';
|
||||
import { AudioType, isAudioSupport } from './support';
|
||||
import type { AudioPlayer } from './player';
|
||||
|
||||
export class VorbisDecoder implements IAudioDecoder {
|
||||
const fileSignatures: [AudioType, number[]][] = [
|
||||
[AudioType.Mp3, [0x49, 0x44, 0x33]],
|
||||
[AudioType.Ogg, [0x4f, 0x67, 0x67, 0x53]],
|
||||
[AudioType.Wav, [52, 0x49, 0x46, 0x46]],
|
||||
[AudioType.Flac, [0x66, 0x4c, 0x61, 0x43]],
|
||||
[AudioType.Aac, [0xff, 0xf1]],
|
||||
[AudioType.Aac, [0xff, 0xf9]]
|
||||
];
|
||||
const oggHeaders: [AudioType, number[]][] = [
|
||||
[AudioType.Opus, [0x4f, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64]]
|
||||
];
|
||||
|
||||
export function checkAudioType(data: Uint8Array) {
|
||||
let audioType: AudioType | '' = '';
|
||||
// 检查头文件获取音频类型,仅检查前256个字节
|
||||
const toCheck = data.slice(0, 256);
|
||||
for (const [type, value] of fileSignatures) {
|
||||
if (value.every((v, i) => toCheck[i] === v)) {
|
||||
audioType = type;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (audioType === AudioType.Ogg) {
|
||||
// 如果是ogg的话,进一步判断是不是opus
|
||||
for (const [key, value] of oggHeaders) {
|
||||
const has = toCheck.some((_, i) => {
|
||||
return value.every((v, ii) => toCheck[i + ii] === v);
|
||||
});
|
||||
if (has) {
|
||||
audioType = key;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return audioType;
|
||||
}
|
||||
|
||||
export interface IAudioDecodeError {
|
||||
/** 错误信息 */
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface IAudioDecodeData {
|
||||
/** 每个声道的音频信息 */
|
||||
channelData: Float32Array[];
|
||||
/** 已经被解码的 PCM 采样数 */
|
||||
samplesDecoded: number;
|
||||
/** 音频采样率 */
|
||||
sampleRate: number;
|
||||
/** 解码错误信息 */
|
||||
errors: IAudioDecodeError[];
|
||||
}
|
||||
|
||||
export abstract class AudioDecoder {
|
||||
static readonly decoderMap: Map<AudioType, new () => AudioDecoder> =
|
||||
new Map();
|
||||
|
||||
/**
|
||||
* 注册一个解码器
|
||||
* @param type 要注册的解码器允许解码的类型
|
||||
* @param decoder 解码器对象
|
||||
*/
|
||||
static registerDecoder(type: AudioType, decoder: new () => AudioDecoder) {
|
||||
if (this.decoderMap.has(type)) {
|
||||
logger.warn(47, type);
|
||||
return;
|
||||
}
|
||||
this.decoderMap.set(type, decoder);
|
||||
}
|
||||
|
||||
/**
|
||||
* 解码音频数据
|
||||
* @param data 音频文件数据
|
||||
* @param player AudioPlayer实例
|
||||
*/
|
||||
static async decodeAudioData(data: Uint8Array, player: AudioPlayer) {
|
||||
// 检查头文件获取音频类型,仅检查前256个字节
|
||||
const toCheck = data.slice(0, 256);
|
||||
const type = checkAudioType(data);
|
||||
if (type === '') {
|
||||
logger.error(
|
||||
25,
|
||||
[...toCheck]
|
||||
.map(v => v.toString().padStart(2, '0'))
|
||||
.join(' ')
|
||||
.toUpperCase()
|
||||
);
|
||||
return null;
|
||||
}
|
||||
if (isAudioSupport(type)) {
|
||||
if (data.buffer instanceof ArrayBuffer) {
|
||||
return player.ac.decodeAudioData(data.buffer);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
const Decoder = this.decoderMap.get(type);
|
||||
if (!Decoder) {
|
||||
return null;
|
||||
} else {
|
||||
const decoder = new Decoder();
|
||||
await decoder.create();
|
||||
const decodedData = await decoder.decode(data);
|
||||
if (!decodedData) return null;
|
||||
const buffer = player.ac.createBuffer(
|
||||
decodedData.channelData.length,
|
||||
decodedData.channelData[0].length,
|
||||
decodedData.sampleRate
|
||||
);
|
||||
decodedData.channelData.forEach((v, i) => {
|
||||
buffer.copyToChannel(v, i);
|
||||
});
|
||||
return buffer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建音频解码器
|
||||
*/
|
||||
abstract create(): Promise<void>;
|
||||
|
||||
/**
|
||||
* 摧毁这个解码器
|
||||
*/
|
||||
abstract destroy(): void;
|
||||
|
||||
/**
|
||||
* 解码流数据
|
||||
* @param data 流数据
|
||||
*/
|
||||
abstract decode(data: Uint8Array): Promise<IAudioDecodeData | undefined>;
|
||||
|
||||
/**
|
||||
* 解码整个文件
|
||||
* @param data 文件数据
|
||||
*/
|
||||
abstract decodeAll(data: Uint8Array): Promise<IAudioDecodeData | undefined>;
|
||||
|
||||
/**
|
||||
* 当音频解码完成后,会调用此函数,需要返回之前还未解析或未返回的音频数据。调用后,该解码器将不会被再次使用
|
||||
*/
|
||||
abstract flush(): Promise<IAudioDecodeData | undefined>;
|
||||
}
|
||||
|
||||
export class VorbisDecoder implements AudioDecoder {
|
||||
decoder?: OggVorbisDecoderWebWorker;
|
||||
|
||||
async create(): Promise<void> {
|
||||
@ -18,12 +165,16 @@ export class VorbisDecoder implements IAudioDecoder {
|
||||
return this.decoder?.decode(data);
|
||||
}
|
||||
|
||||
async decodeAll(data: Uint8Array): Promise<IAudioDecodeData | undefined> {
|
||||
return this.decoder?.decodeFile(data);
|
||||
}
|
||||
|
||||
async flush(): Promise<IAudioDecodeData | undefined> {
|
||||
return this.decoder?.flush();
|
||||
}
|
||||
}
|
||||
|
||||
export class OpusDecoder implements IAudioDecoder {
|
||||
export class OpusDecoder implements AudioDecoder {
|
||||
decoder?: OggOpusDecoderWebWorker;
|
||||
|
||||
async create(): Promise<void> {
|
||||
@ -39,6 +190,10 @@ export class OpusDecoder implements IAudioDecoder {
|
||||
return this.decoder?.decode(data);
|
||||
}
|
||||
|
||||
async decodeAll(data: Uint8Array): Promise<IAudioDecodeData | undefined> {
|
||||
return this.decoder?.decodeFile(data);
|
||||
}
|
||||
|
||||
async flush(): Promise<IAudioDecodeData | undefined> {
|
||||
return await this.decoder?.flush();
|
||||
}
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { loadAllBgm } from './bgm';
|
||||
import { OpusDecoder, VorbisDecoder } from './decoder';
|
||||
import { AudioStreamSource } from './source';
|
||||
import { AudioType } from './support';
|
||||
import { AudioDecoder } from './decoder';
|
||||
|
||||
loadAllBgm();
|
||||
AudioStreamSource.registerDecoder(AudioType.Ogg, VorbisDecoder);
|
||||
AudioStreamSource.registerDecoder(AudioType.Opus, OpusDecoder);
|
||||
AudioDecoder.registerDecoder(AudioType.Ogg, VorbisDecoder);
|
||||
AudioDecoder.registerDecoder(AudioType.Opus, OpusDecoder);
|
||||
|
||||
export * from './support';
|
||||
export * from './effect';
|
||||
@ -13,3 +13,4 @@ export * from './player';
|
||||
export * from './source';
|
||||
export * from './bgm';
|
||||
export * from './decoder';
|
||||
export * from './sound';
|
||||
|
@ -17,6 +17,7 @@ import {
|
||||
import { isNil } from 'lodash-es';
|
||||
import { logger } from '@/core/common/logger';
|
||||
import { sleep } from 'mutate-animate';
|
||||
import { AudioDecoder } from './decoder';
|
||||
|
||||
interface AudioPlayerEvent {}
|
||||
|
||||
@ -36,6 +37,14 @@ export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
|
||||
this.gain.connect(this.ac.destination);
|
||||
}
|
||||
|
||||
/**
|
||||
* 解码音频数据
|
||||
* @param data 音频数据
|
||||
*/
|
||||
decodeAudioData(data: Uint8Array) {
|
||||
return AudioDecoder.decodeAudioData(data, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* 设置音量
|
||||
* @param volume 音量
|
||||
@ -207,7 +216,12 @@ export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
|
||||
* @param when 从音频的哪个位置开始播放,单位秒
|
||||
*/
|
||||
play(id: string, when: number = 0) {
|
||||
this.getRoute(id)?.play(when);
|
||||
const route = this.getRoute(id);
|
||||
if (!route) {
|
||||
logger.warn(53, 'play', id);
|
||||
return;
|
||||
}
|
||||
route.play(when);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -217,8 +231,11 @@ export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
|
||||
*/
|
||||
pause(id: string) {
|
||||
const route = this.getRoute(id);
|
||||
if (!route) return Promise.resolve();
|
||||
else return route.pause();
|
||||
if (!route) {
|
||||
logger.warn(53, 'pause', id);
|
||||
return;
|
||||
}
|
||||
return route.pause();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -228,8 +245,11 @@ export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
|
||||
*/
|
||||
stop(id: string) {
|
||||
const route = this.getRoute(id);
|
||||
if (!route) return Promise.resolve();
|
||||
else return route.stop();
|
||||
if (!route) {
|
||||
logger.warn(53, 'stop', id);
|
||||
return;
|
||||
}
|
||||
return route.stop();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -237,11 +257,16 @@ export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
|
||||
* @param id 音频名称
|
||||
*/
|
||||
resume(id: string) {
|
||||
this.getRoute(id)?.resume();
|
||||
const route = this.getRoute(id);
|
||||
if (!route) {
|
||||
logger.warn(53, 'play', id);
|
||||
return;
|
||||
}
|
||||
route.resume();
|
||||
}
|
||||
|
||||
/**
|
||||
* 设置听者位置,x正方形水平向右,y正方形垂直于地面向上,z正方向垂直屏幕远离用户
|
||||
* 设置听者位置,x正方向水平向右,y正方向垂直于地面向上,z正方向垂直屏幕远离用户
|
||||
* @param x 位置x坐标
|
||||
* @param y 位置y坐标
|
||||
* @param z 位置z坐标
|
||||
@ -254,7 +279,7 @@ export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
|
||||
}
|
||||
|
||||
/**
|
||||
* 设置听者朝向,x正方形水平向右,y正方形垂直于地面向上,z正方向垂直屏幕远离用户
|
||||
* 设置听者朝向,x正方向水平向右,y正方向垂直于地面向上,z正方向垂直屏幕远离用户
|
||||
* @param x 朝向x坐标
|
||||
* @param y 朝向y坐标
|
||||
* @param z 朝向z坐标
|
||||
@ -267,7 +292,7 @@ export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
|
||||
}
|
||||
|
||||
/**
|
||||
* 设置听者头顶朝向,x正方形水平向右,y正方形垂直于地面向上,z正方向垂直屏幕远离用户
|
||||
* 设置听者头顶朝向,x正方向水平向右,y正方向垂直于地面向上,z正方向垂直屏幕远离用户
|
||||
* @param x 头顶朝向x坐标
|
||||
* @param y 头顶朝向y坐标
|
||||
* @param z 头顶朝向z坐标
|
||||
@ -316,6 +341,11 @@ export class AudioRoute
|
||||
/** 暂停时刻 */
|
||||
private pauseTime: number = 0;
|
||||
|
||||
/** 音频时长,单位秒 */
|
||||
get duration() {
|
||||
return this.source.duration;
|
||||
}
|
||||
|
||||
private shouldStop: boolean = false;
|
||||
/**
|
||||
* 每次暂停或停止时自增,用于判断当前正在处理的情况。
|
||||
@ -423,8 +453,6 @@ export class AudioRoute
|
||||
this.status === AudioStatus.Pausing ||
|
||||
this.status === AudioStatus.Stoping
|
||||
) {
|
||||
console.log(1);
|
||||
|
||||
this.audioStartHook?.(this);
|
||||
this.emit('resume');
|
||||
return;
|
||||
|
100
src/module/audio/sound.ts
Normal file
100
src/module/audio/sound.ts
Normal file
@ -0,0 +1,100 @@
|
||||
import EventEmitter from 'eventemitter3';
|
||||
import { audioPlayer, AudioPlayer } from './player';
|
||||
import { logger } from '@/core/common/logger';
|
||||
|
||||
type LocationArray = [number, number, number];
|
||||
|
||||
interface SoundPlayerEvent {}
|
||||
|
||||
export class SoundPlayer<
|
||||
T extends string = SoundIds
|
||||
> extends EventEmitter<SoundPlayerEvent> {
|
||||
/** 每个音效的唯一标识符 */
|
||||
private num: number = 0;
|
||||
|
||||
/** 每个音效的数据 */
|
||||
readonly buffer: Map<T, AudioBuffer> = new Map();
|
||||
/** 所有正在播放的音乐 */
|
||||
readonly playing: Set<number> = new Set();
|
||||
|
||||
constructor(public readonly player: AudioPlayer) {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* 添加一个音效
|
||||
* @param id 音效名称
|
||||
* @param data 音效的Uint8Array数据
|
||||
*/
|
||||
async add(id: T, data: Uint8Array) {
|
||||
const buffer = await this.player.decodeAudioData(data);
|
||||
if (!buffer) {
|
||||
logger.warn(51, id);
|
||||
return;
|
||||
}
|
||||
this.buffer.set(id, buffer);
|
||||
}
|
||||
|
||||
/**
|
||||
* 播放一个音效
|
||||
* @param id 音效名称
|
||||
* @param position 音频位置,[0, 0, 0]表示正中心,x轴指向水平向右,y轴指向水平向上,z轴指向竖直向上
|
||||
* @param orientation 音频朝向,[0, 1, 0]表示朝向前方
|
||||
*/
|
||||
play(
|
||||
id: T,
|
||||
position: LocationArray = [0, 0, 0],
|
||||
orientation: LocationArray = [1, 0, 0]
|
||||
) {
|
||||
const buffer = this.buffer.get(id);
|
||||
if (!buffer) {
|
||||
logger.warn(52, id);
|
||||
return -1;
|
||||
}
|
||||
const soundNum = this.num++;
|
||||
const source = this.player.createBufferSource();
|
||||
source.setBuffer(buffer);
|
||||
const route = this.player.createRoute(source);
|
||||
const stereo = this.player.createStereoEffect();
|
||||
stereo.setPosition(position[0], position[1], position[2]);
|
||||
stereo.setOrientation(orientation[0], orientation[1], orientation[2]);
|
||||
route.addEffect(stereo);
|
||||
this.player.addRoute(`sounds.${soundNum}`, route);
|
||||
route.play();
|
||||
source.output.addEventListener('ended', () => {
|
||||
this.playing.delete(soundNum);
|
||||
});
|
||||
this.playing.add(soundNum);
|
||||
return soundNum;
|
||||
}
|
||||
|
||||
/**
|
||||
* 停止一个音效
|
||||
* @param num 音效的唯一 id
|
||||
*/
|
||||
stop(num: number) {
|
||||
const id = `sounds.${num}`;
|
||||
const route = this.player.getRoute(id);
|
||||
if (route) {
|
||||
route.stop();
|
||||
this.player.removeRoute(id);
|
||||
this.playing.delete(num);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 停止播放所有音效
|
||||
*/
|
||||
stopAllSounds() {
|
||||
this.playing.forEach(v => {
|
||||
const id = `sounds.${v}`;
|
||||
const route = this.player.getRoute(id);
|
||||
if (route) {
|
||||
route.stop();
|
||||
this.player.removeRoute(id);
|
||||
}
|
||||
});
|
||||
this.playing.clear();
|
||||
}
|
||||
}
|
||||
export const soundPlayer = new SoundPlayer<SoundIds>(audioPlayer);
|
@ -5,6 +5,7 @@ import { logger } from '@/core/common/logger';
|
||||
import { AudioType } from './support';
|
||||
import CodecParser, { CodecFrame, MimeType, OggPage } from 'codec-parser';
|
||||
import { isNil } from 'lodash-es';
|
||||
import { IAudioDecodeData, AudioDecoder, checkAudioType } from './decoder';
|
||||
|
||||
interface AudioSourceEvent {
|
||||
play: [];
|
||||
@ -21,6 +22,9 @@ export abstract class AudioSource
|
||||
/** 是否正在播放 */
|
||||
playing: boolean = false;
|
||||
|
||||
/** 获取音频时长 */
|
||||
abstract get duration(): number;
|
||||
|
||||
constructor(public readonly ac: AudioContext) {
|
||||
super();
|
||||
}
|
||||
@ -49,57 +53,6 @@ export abstract class AudioSource
|
||||
abstract setLoop(loop: boolean): void;
|
||||
}
|
||||
|
||||
export interface IAudioDecodeError {
|
||||
/** 错误信息 */
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface IAudioDecodeData {
|
||||
/** 每个声道的音频信息 */
|
||||
channelData: Float32Array[];
|
||||
/** 已经被解码的 PCM 采样数 */
|
||||
samplesDecoded: number;
|
||||
/** 音频采样率 */
|
||||
sampleRate: number;
|
||||
/** 解码错误信息 */
|
||||
errors: IAudioDecodeError[];
|
||||
}
|
||||
|
||||
export interface IAudioDecoder {
|
||||
/**
|
||||
* 创建音频解码器
|
||||
*/
|
||||
create(): Promise<void>;
|
||||
|
||||
/**
|
||||
* 摧毁这个解码器
|
||||
*/
|
||||
destroy(): void;
|
||||
|
||||
/**
|
||||
* 解码流数据
|
||||
* @param data 流数据
|
||||
*/
|
||||
decode(data: Uint8Array): Promise<IAudioDecodeData | undefined>;
|
||||
|
||||
/**
|
||||
* 当音频解码完成后,会调用此函数,需要返回之前还未解析或未返回的音频数据。调用后,该解码器将不会被再次使用
|
||||
*/
|
||||
flush(): Promise<IAudioDecodeData | undefined>;
|
||||
}
|
||||
|
||||
const fileSignatures: [AudioType, number[]][] = [
|
||||
[AudioType.Mp3, [0x49, 0x44, 0x33]],
|
||||
[AudioType.Ogg, [0x4f, 0x67, 0x67, 0x53]],
|
||||
[AudioType.Wav, [52, 0x49, 0x46, 0x46]],
|
||||
[AudioType.Flac, [0x66, 0x4c, 0x61, 0x43]],
|
||||
[AudioType.Aac, [0xff, 0xf1]],
|
||||
[AudioType.Aac, [0xff, 0xf9]]
|
||||
];
|
||||
const oggHeaders: [AudioType, number[]][] = [
|
||||
[AudioType.Opus, [0x4f, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64]]
|
||||
];
|
||||
|
||||
const mimeTypeMap: Record<AudioType, MimeType> = {
|
||||
[AudioType.Aac]: 'audio/aac',
|
||||
[AudioType.Flac]: 'audio/flac',
|
||||
@ -114,8 +67,6 @@ function isOggPage(data: any): data is OggPage {
|
||||
}
|
||||
|
||||
export class AudioStreamSource extends AudioSource implements IStreamReader {
|
||||
static readonly decoderMap: Map<AudioType, new () => IAudioDecoder> =
|
||||
new Map();
|
||||
output: AudioBufferSourceNode;
|
||||
|
||||
/** 音频数据 */
|
||||
@ -129,6 +80,8 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
|
||||
bufferedSamples: number = 0;
|
||||
/** 歌曲时长,加载完毕之前保持为 0 */
|
||||
duration: number = 0;
|
||||
/** 当前已经播放了多长时间 */
|
||||
// readonly currentTime: number = -1;
|
||||
/** 在流传输阶段,至少缓冲多长时间的音频之后才开始播放,单位秒 */
|
||||
bufferPlayDuration: number = 1;
|
||||
/** 音频的采样率,未成功解析出之前保持为 0 */
|
||||
@ -149,7 +102,7 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
|
||||
/** 音频类型 */
|
||||
private audioType: AudioType | '' = '';
|
||||
/** 音频解码器 */
|
||||
private decoder?: IAudioDecoder;
|
||||
private decoder?: AudioDecoder;
|
||||
/** 音频解析器 */
|
||||
private parser?: CodecParser;
|
||||
/** 每多长时间组成一个缓存 Float32Array */
|
||||
@ -159,19 +112,6 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
|
||||
|
||||
private errored: boolean = false;
|
||||
|
||||
/**
|
||||
* 注册一个解码器
|
||||
* @param type 要注册的解码器允许解码的类型
|
||||
* @param decoder 解码器对象
|
||||
*/
|
||||
static registerDecoder(type: AudioType, decoder: new () => IAudioDecoder) {
|
||||
if (this.decoderMap.has(type)) {
|
||||
logger.warn(47, type);
|
||||
return;
|
||||
}
|
||||
this.decoderMap.set(type, decoder);
|
||||
}
|
||||
|
||||
constructor(context: AudioContext) {
|
||||
super(context);
|
||||
this.output = context.createBufferSource();
|
||||
@ -195,24 +135,7 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
|
||||
if (!this.headerRecieved) {
|
||||
// 检查头文件获取音频类型,仅检查前256个字节
|
||||
const toCheck = data.slice(0, 256);
|
||||
for (const [type, value] of fileSignatures) {
|
||||
if (value.every((v, i) => toCheck[i] === v)) {
|
||||
this.audioType = type;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (this.audioType === AudioType.Ogg) {
|
||||
// 如果是ogg的话,进一步判断是不是opus
|
||||
for (const [key, value] of oggHeaders) {
|
||||
const has = toCheck.some((_, i) => {
|
||||
return value.every((v, ii) => toCheck[i + ii] === v);
|
||||
});
|
||||
if (has) {
|
||||
this.audioType = key;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
this.audioType = checkAudioType(data);
|
||||
if (!this.audioType) {
|
||||
logger.error(
|
||||
25,
|
||||
@ -224,7 +147,7 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
|
||||
return;
|
||||
}
|
||||
// 创建解码器
|
||||
const Decoder = AudioStreamSource.decoderMap.get(this.audioType);
|
||||
const Decoder = AudioDecoder.decoderMap.get(this.audioType);
|
||||
if (!Decoder) {
|
||||
this.errored = true;
|
||||
logger.error(24, this.audioType);
|
||||
@ -280,7 +203,7 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
|
||||
*/
|
||||
private async decodeData(
|
||||
data: Uint8Array,
|
||||
decoder: IAudioDecoder,
|
||||
decoder: AudioDecoder,
|
||||
parser: CodecParser
|
||||
) {
|
||||
// 解析音频数据
|
||||
@ -301,7 +224,7 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
|
||||
/**
|
||||
* 解码剩余数据
|
||||
*/
|
||||
private async decodeFlushData(decoder: IAudioDecoder, parser: CodecParser) {
|
||||
private async decodeFlushData(decoder: AudioDecoder, parser: CodecParser) {
|
||||
const audioData = await decoder.flush();
|
||||
if (!audioData) return;
|
||||
// @ts-expect-error 库类型声明错误
|
||||
@ -375,7 +298,6 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
|
||||
return;
|
||||
}
|
||||
if (dt < this.bufferPlayDuration) return;
|
||||
console.log(played, this.lastBufferSamples, this.sampleRate);
|
||||
this.lastBufferSamples = this.bufferedSamples;
|
||||
// 需要播放
|
||||
this.mergeBuffers();
|
||||
@ -439,8 +361,6 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
|
||||
this.loaded = true;
|
||||
delete this.controller;
|
||||
this.mergeBuffers();
|
||||
// const played = this.lastBufferSamples / this.sampleRate;
|
||||
// this.playAudio(played);
|
||||
this.duration = this.buffered;
|
||||
this.audioData = [];
|
||||
this.decoder?.destroy();
|
||||
@ -510,6 +430,13 @@ export class AudioElementSource extends AudioSource {
|
||||
/** audio 元素 */
|
||||
readonly audio: HTMLAudioElement;
|
||||
|
||||
get duration(): number {
|
||||
return this.audio.duration;
|
||||
}
|
||||
get currentTime(): number {
|
||||
return this.audio.currentTime;
|
||||
}
|
||||
|
||||
constructor(context: AudioContext) {
|
||||
super(context);
|
||||
const audio = new Audio();
|
||||
@ -564,6 +491,9 @@ export class AudioBufferSource extends AudioSource {
|
||||
/** 是否循环 */
|
||||
private loop: boolean = false;
|
||||
|
||||
duration: number = 0;
|
||||
// readonly currentTime: number = -1;
|
||||
|
||||
/** 播放开始时刻 */
|
||||
private lastStartTime: number = 0;
|
||||
private target?: IAudioInput;
|
||||
@ -583,6 +513,7 @@ export class AudioBufferSource extends AudioSource {
|
||||
} else {
|
||||
this.buffer = buffer;
|
||||
}
|
||||
this.duration = this.buffer.duration;
|
||||
}
|
||||
|
||||
play(when?: number): void {
|
||||
|
@ -1,6 +1,8 @@
|
||||
import { Patch, PatchClass } from '@/common/patch';
|
||||
import { bgmController } from '../audio';
|
||||
import { audioPlayer, bgmController, soundPlayer } from '../audio';
|
||||
import { mainSetting } from '@/core/main/setting';
|
||||
import { sleep } from 'mutate-animate';
|
||||
import { isNil } from 'lodash-es';
|
||||
|
||||
export function patchAudio() {
|
||||
const patch = new Patch(PatchClass.Control);
|
||||
@ -37,4 +39,30 @@ export function patchAudio() {
|
||||
if (bgmController.playing) bgmController.pause();
|
||||
else bgmController.resume();
|
||||
});
|
||||
|
||||
patch.add(
|
||||
'playSound',
|
||||
function (sound, _pitch, callback, position, orientation) {
|
||||
const name = core.getMappedName(sound) as SoundIds;
|
||||
const num = soundPlayer.play(name, position, orientation);
|
||||
const route = audioPlayer.getRoute(`sounds.${num}`);
|
||||
if (!route) {
|
||||
callback?.();
|
||||
return -1;
|
||||
} else {
|
||||
sleep(route.duration).then(() => callback?.());
|
||||
return num;
|
||||
}
|
||||
}
|
||||
);
|
||||
patch.add('stopSound', function (id) {
|
||||
if (isNil(id)) {
|
||||
soundPlayer.stopAllSounds();
|
||||
} else {
|
||||
soundPlayer.stop(id);
|
||||
}
|
||||
});
|
||||
patch.add('getPlayingSounds', function () {
|
||||
return [...soundPlayer.playing];
|
||||
});
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { soundPlayer } from './audio';
|
||||
import { patchAll } from './fallback';
|
||||
import { controller } from './weather';
|
||||
import { RainWeather } from './weather/rain';
|
||||
@ -9,6 +10,7 @@ Mota.register('module', 'Weather', {
|
||||
WeatherController,
|
||||
RainWeather
|
||||
});
|
||||
Mota.register('module', 'Audio', { soundPlayer });
|
||||
|
||||
export * from './weather';
|
||||
export * from './audio';
|
||||
|
11
src/types/control.d.ts
vendored
11
src/types/control.d.ts
vendored
@ -959,7 +959,7 @@ interface Control {
|
||||
/**
|
||||
* 获得映射文件名
|
||||
*/
|
||||
getMappedName<K extends keyof NameMap>(name: K): NameMap[K];
|
||||
getMappedName(name: string): string;
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
@ -1063,12 +1063,16 @@ interface Control {
|
||||
* @param sound 音效名
|
||||
* @param pitch 音调,同时会修改播放速度,100为原速
|
||||
* @param callback 回调函数
|
||||
* @param position 音频播放位置
|
||||
* @param orientation 音频播放朝向
|
||||
* @returns 音效的唯一标识符,用于停止音效等操作
|
||||
*/
|
||||
playSound(
|
||||
sound: SoundIds | NameMapIn<SoundIds>,
|
||||
pitch?: number,
|
||||
callback?: () => void
|
||||
callback?: () => void,
|
||||
position?: [number, nnumber, number],
|
||||
orientation?: [number, number, number]
|
||||
): number;
|
||||
|
||||
/**
|
||||
@ -1081,9 +1085,8 @@ interface Control {
|
||||
/**
|
||||
* @deprecated
|
||||
* 获得正在播放的所有音效的id列表
|
||||
* @param name 要获得的音效名
|
||||
*/
|
||||
getPlayingSounds(name?: SoundIds | NameMapIn<SoundIds>): number[];
|
||||
getPlayingSounds(): number[];
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
|
Loading…
Reference in New Issue
Block a user