Compare commits

..

11 Commits

Author SHA1 Message Date
a058dfda4a feat: 根据支持情况选择播放方式 2025-01-15 23:14:54 +08:00
307bf8a10d feat: All intrinsic audio effects 2025-01-15 21:53:54 +08:00
5efa60a0de fix: All bug for opus and ogg decoder 2025-01-15 21:29:29 +08:00
88c5e39f5c feat: ogg opus 解码器 2025-01-15 17:22:53 +08:00
5265b0a90e refactor: 新的音频系统 2025-01-13 22:24:40 +08:00
231a72e78c feat: 流式加载器 & fix: gl2 注释 2025-01-13 16:38:28 +08:00
AncTe
6dde0334e1
Merge pull request #3 from unanmed/dev-strawberry
feat(dev-strawberry): graphics & 图标 & winskin & 事件
2025-01-12 15:45:28 +08:00
7d8cbac246 perf: 子元素渲染性能 2025-01-12 12:03:53 +08:00
ba30d111a5 perf: 数组解构比分别赋值更快 2025-01-11 17:23:55 +08:00
babb478542 feat: ogg opus 解码库 2025-01-10 21:13:11 +08:00
27af3ff5d1 fix: audio提前加载占用流量 2025-01-09 14:55:18 +08:00
25 changed files with 3511 additions and 76 deletions

View File

@ -22,6 +22,7 @@
"ant-design-vue": "^3.2.20",
"axios": "^1.7.4",
"chart.js": "^4.4.3",
"codec-parser": "^2.5.0",
"eventemitter3": "^5.0.1",
"gl-matrix": "^3.4.3",
"gsap": "^3.12.5",
@ -29,6 +30,8 @@
"lodash-es": "^4.17.21",
"lz-string": "^1.5.0",
"mutate-animate": "^1.4.2",
"ogg-opus-decoder": "^1.6.14",
"opus-decoder": "^0.7.7",
"vue": "^3.5.13"
},
"devDependencies": {
@ -69,6 +72,8 @@
"typescript-eslint": "^8.18.2",
"unplugin-vue-components": "^0.22.12",
"vite": "^4.5.3",
"vite-plugin-dts": "^4.4.0",
"vitepress": "^1.5.0",
"vue-tsc": "^2.1.6",
"ws": "^8.18.0"
}

File diff suppressed because it is too large Load Diff

View File

@ -38,7 +38,7 @@ var data_comment_c456ea59_6018_45ef_8bcc_211a24c627dc = {
"_range": "editor.mode.checkImages(thiseval, './project/images/')",
"_directory": "./project/images/",
"_transform": (function (one) {
if (one.endsWith('.png') || one.endsWith('.jpg') || one.endsWith('.jpeg') || one.endsWith('.gif'))
if (one.endsWith('.png') || one.endsWith('.jpg') || one.endsWith('.jpeg') || one.endsWith('.gif') || one.endsWith('.webp'))
return one;
return null;
}).toString(),
@ -96,7 +96,7 @@ var data_comment_c456ea59_6018_45ef_8bcc_211a24c627dc = {
"_range": "editor.mode.checkUnique(thiseval)",
"_directory": "./project/bgms/",
"_transform": (function (one) {
if (one.endsWith('.mp3') || one.endsWith('.ogg') || one.endsWith('.wav') || one.endsWith('.m4a') || one.endsWith('.flac'))
if (one.endsWith('.mp3') || one.endsWith('.ogg') || one.endsWith('.wav') || one.endsWith('.m4a') || one.endsWith('.flac') || one.endsWith('.opus'))
return one;
return null;
}).toString(),

View File

@ -193,13 +193,14 @@ var data_a1e2fb4a_e986_4524_b0da_9b7ba7c0874d =
"zone"
],
"bgms": [
"beforeBoss.mp3",
"beforeBoss.opus",
"cave.mp3",
"escape.mp3",
"escape2.mp3",
"grass.mp3",
"mount.mp3",
"mount.opus",
"night.mp3",
"output6.ogg",
"palaceCenter.mp3",
"palaceNorth.mp3",
"palaceSouth.mp3",

View File

@ -100,7 +100,7 @@ export class ImageResource extends Resource<HTMLImageElement> {
super(uri, 'image');
}
load(onProgress?: ProgressFn): Promise<HTMLImageElement> {
load(_onProgress?: ProgressFn): Promise<HTMLImageElement> {
const img = new Image();
img.src = this.resolveURI();
this.resource = img;
@ -229,9 +229,10 @@ export class AudioResource extends Resource<HTMLAudioElement> {
super(uri, 'audio');
}
load(onProgress?: ProgressFn): Promise<HTMLAudioElement> {
load(_onProgress?: ProgressFn): Promise<HTMLAudioElement> {
const audio = new Audio();
audio.src = this.resolveURI();
audio.preload = 'none';
this.resource = audio;
return new Promise<HTMLAudioElement>(res => {
this.loaded = true;
@ -383,13 +384,12 @@ export class LoadTask<
}
this.loaded = now;
})
.catch(reason => {
.catch(() => {
LoadTask.errorTask++;
logger.error(2, this.resource.type, this.resource.uri);
});
this.emit('loadStart', this.resource);
const value = await load;
// @ts-ignore
LoadTask.loadedTaskList.add(this);
this.loaded = totalByte;
LoadTask.loadedTask++;
@ -407,7 +407,6 @@ export class LoadTask<
uri: string
): LoadTask<T> {
const task = new LoadTask(type, uri);
// @ts-ignore
this.taskList.add(task);
return task;
}
@ -541,7 +540,7 @@ export function loadDefaultResource() {
.forEach(v => {
const res = LoadTask.add('material', `material/${v}`);
res.once('load', res => {
// @ts-ignore
// @ts-expect-error 不能推导
core.material.images[
v.slice(0, -4) as SelectKey<
MaterialImages,
@ -554,7 +553,7 @@ export function loadDefaultResource() {
weathers.forEach(v => {
const res = LoadTask.add('material', `material/${v}.png`);
res.once('load', res => {
// @ts-ignore
// @ts-expect-error 需要赋值
core.animateFrame.weather[v] = res.resource;
});
});
@ -674,7 +673,6 @@ export async function loadCompressedResource() {
// material
if (materialImages.some(v => name === v + '.png')) {
// @ts-ignore
core.material.images[
name.slice(0, -4) as SelectKey<
MaterialImages,
@ -682,7 +680,7 @@ export async function loadCompressedResource() {
>
] = image;
} else if (weathers.some(v => name === v + '.png')) {
// @ts-ignore
// @ts-expect-error 需要赋值
core.animateFrame.weather[v] = image;
}
}

View File

@ -107,5 +107,5 @@ export function buildFont(
weight: number = 500,
italic: boolean = false
) {
return `${italic ? 'italic ' : ''}${weight} ${size}px ${family}`;
return `${italic ? 'italic ' : ''}${weight} ${size}px "${family}"`;
}

View File

@ -36,13 +36,9 @@ export class Container<E extends EContainerEvent = EContainerEvent>
canvas: MotaOffscreenCanvas2D,
transform: Transform
): void {
const { ctx } = canvas;
this.sortedChildren.forEach(v => {
if (v.hidden) return;
ctx.save();
v.renderContent(canvas, transform);
ctx.restore();
});
}

View File

@ -248,6 +248,7 @@ export abstract class GL2<E extends EGL2Event = EGL2Event> extends RenderItem<
case RenderMode.Arrays: {
const { mode, first, count } = param as DrawArraysParam;
gl.drawArrays(mode, first, count);
break;
}
case RenderMode.Elements: {
if (!indices) return;
@ -255,11 +256,13 @@ export abstract class GL2<E extends EGL2Event = EGL2Event> extends RenderItem<
param as DrawElementsParam;
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indices.data);
gl.drawElements(mode, count, type, offset);
break;
}
case RenderMode.ArraysInstanced: {
const { mode, first, count, instanceCount } =
param as DrawArraysInstancedParam;
gl.drawArraysInstanced(mode, first, count, instanceCount);
break;
}
case RenderMode.ElementsInstanced: {
if (!indices) return;
@ -272,6 +275,7 @@ export abstract class GL2<E extends EGL2Event = EGL2Event> extends RenderItem<
} = param as DrawElementsInstancedParam;
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indices.data);
gl.drawElementsInstanced(mode, count, type, offset, ins);
break;
}
}
}
@ -714,12 +718,8 @@ class ShaderUniform<T extends UniformType> implements IShaderUniform<T> {
) {}
set(...params: UniformSetFn[T]): void {
this.gl.vertexAttribIPointer;
// 因为ts类型推导的限制类型肯定正确但是推导不出所以这里直接 as any 屏蔽掉类型推导
const x0 = params[0] as any;
const x1 = params[1] as any;
const x2 = params[2] as any;
const x3 = params[3] as any;
const [x0, x1, x2, x3] = params as any[];
switch (this.type) {
case UniformType.Uniform1f:
this.gl.uniform1f(this.location, x0);
@ -807,10 +807,7 @@ class ShaderAttrib<T extends AttribType> implements IShaderAttrib<T> {
set(...params: AttribSetFn[T]) {
// 因为ts类型推导的限制类型肯定正确但是推导不出所以这里直接 as any 屏蔽掉类型推导
const x0 = params[0] as any;
const x1 = params[1] as any;
const x2 = params[2] as any;
const x3 = params[3] as any;
const [x0, x1, x2, x3] = params as any[];
switch (this.type) {
case AttribType.Attrib1f:
this.gl.vertexAttrib1f(this.location, x0);
@ -1035,10 +1032,10 @@ class ShaderUniformBlock implements IShaderUniformBlock {
const buffer = this.buffer;
gl.bindBuffer(gl.UNIFORM_BUFFER, buffer);
if (srcOffset !== void 0) {
// @ts-ignore
// @ts-expect-error 无法推断
gl.bufferSubData(gl.UNIFORM_BUFFER, 0, srcData, srcOffset, length);
} else {
// @ts-ignore
// @ts-expect-error 无法推断
gl.bufferSubData(gl.UNIFORM_BUFFER, 0, srcData);
}
gl.bindBufferBase(gl.UNIFORM_BUFFER, this.binding, buffer);
@ -1478,7 +1475,7 @@ export class GL2Program extends EventEmitter<ShaderProgramEvent> {
/**
* uniform uniform
* @param uniform uniform
* @param type uniform {@link Shader.UNIFORM_1f} {@link Shader.UNIFORM_4uiv}
* @param type uniform {@link GL2.UNIFORM_1f} {@link GL2.UNIFORM_4uiv}
* @returns uniform
*/
defineUniform<T extends UniformType>(
@ -1506,7 +1503,7 @@ export class GL2Program extends EventEmitter<ShaderProgramEvent> {
/**
* uniform uniform
* @param uniform uniform
* @param type uniform {@link Shader.U_MATRIX_2x2} {@link Shader.U_MATRIX_4x4}
* @param type uniform {@link GL2.U_MATRIX_2x2} {@link GL2.U_MATRIX_4x4}
* @returns uniform
*/
defineUniformMatrix(
@ -1534,7 +1531,7 @@ export class GL2Program extends EventEmitter<ShaderProgramEvent> {
/**
* attribute attribute es 300 in
* @param attrib attribute
* @param type attribute {@link Shader.Attrib1f} {@link Shader.AttribI4uiv}
* @param type attribute {@link GL2.ATTRIB_1f} {@link GL2.ATTRIB_I4uiv}
* @returns attribute
*/
defineAttribute<T extends AttribType>(

View File

@ -69,7 +69,6 @@ Mota.require('var', 'loading').once('coreInit', () => {
</layer-group>
<Textbox id="main-textbox" {...mainTextboxProps}></Textbox>
<FloorChange id="floor-change" zIndex={50}></FloorChange>
<icon icon={13} animate></icon>
</container>
);
});

View File

@ -140,8 +140,6 @@ interface IRenderVueSupport {
}
export interface ERenderItemEvent {
beforeUpdate: [item?: RenderItem];
afterUpdate: [item?: RenderItem];
beforeRender: [transform: Transform];
afterRender: [transform: Transform];
destroy: [];
@ -344,7 +342,7 @@ export abstract class RenderItem<E extends ERenderItemEvent = ERenderItemEvent>
canvas.ctx.translate(ax, ay);
this.render(canvas, tran);
}
canvas.ctx.restore();
ctx.restore();
this.emit('afterRender', transform);
}
@ -389,6 +387,7 @@ export abstract class RenderItem<E extends ERenderItemEvent = ERenderItemEvent>
* 使
*/
getAbsolutePosition(): LocArr {
if (this.type === 'absolute') return [0, 0];
const { x, y } = this.transform;
if (!this.parent) return [x, y];
else {
@ -400,13 +399,15 @@ export abstract class RenderItem<E extends ERenderItemEvent = ERenderItemEvent>
setAnchor(x: number, y: number): void {
this.anchorX = x;
this.anchorY = y;
this.update();
}
update(item: RenderItem<any> = this, force: boolean = false): void {
if ((this.needUpdate || this.hidden) && !force) return;
update(item: RenderItem<any> = this): void {
if (this.needUpdate) return;
this.needUpdate = true;
this.cacheDirty = true;
this.parent?.update(item, force);
if (this.hidden) return;
this.parent?.update(item);
}
setHD(hd: boolean): void {
@ -475,7 +476,7 @@ export abstract class RenderItem<E extends ERenderItemEvent = ERenderItemEvent>
hide() {
if (this.hidden) return;
this.hidden = true;
this.update(this, true);
this.update(this);
}
/**
@ -484,13 +485,13 @@ export abstract class RenderItem<E extends ERenderItemEvent = ERenderItemEvent>
show() {
if (!this.hidden) return;
this.hidden = false;
this.refreshAllChildren(true);
this.refreshAllChildren();
}
/**
*
*/
refreshAllChildren(force: boolean = false) {
refreshAllChildren() {
if (this.children.size > 0) {
const stack: RenderItem[] = [this];
while (stack.length > 0) {
@ -500,7 +501,7 @@ export abstract class RenderItem<E extends ERenderItemEvent = ERenderItemEvent>
item.children.forEach(v => stack.push(v));
}
}
this.update(this, force);
this.update(this);
}
/**
@ -523,6 +524,7 @@ export abstract class RenderItem<E extends ERenderItemEvent = ERenderItemEvent>
/**
*
* @returns
*/
remove(): boolean {
if (!this.parent) return false;

View File

@ -2,20 +2,10 @@ import { MotaOffscreenCanvas2D } from '@/core/fx/canvas2d';
import { Container, EContainerEvent } from '../container';
import { Sprite } from '../sprite';
import { TimingFn } from 'mutate-animate';
import {
ERenderItemEvent,
IAnimateFrame,
renderEmits,
RenderItem
} from '../item';
import { IAnimateFrame, renderEmits, RenderItem } from '../item';
import { logger } from '@/core/common/logger';
import { RenderableData, texture } from '../cache';
import {
BlockCacher,
CanvasCacheItem,
IBlockCacheable,
ICanvasCacheItem
} from './block';
import { BlockCacher, CanvasCacheItem, ICanvasCacheItem } from './block';
import { Transform } from '../transform';
import { LayerFloorBinder, LayerGroupFloorBinder } from './floor';
import { RenderAdapter } from '../adapter';
@ -148,13 +138,9 @@ export class LayerGroup
}
protected render(canvas: MotaOffscreenCanvas2D): void {
const { ctx } = canvas;
this.sortedChildren.forEach(v => {
if (v.hidden) return;
ctx.save();
v.renderContent(canvas, this.camera);
ctx.restore();
});
}
@ -362,13 +348,14 @@ export class LayerGroup
if (!this.assertType(nextValue, 'number', key)) return;
this.setBlockSize(nextValue);
return;
case 'floorId':
case 'floorId': {
if (!this.assertType(nextValue, 'number', key)) return;
const binder = this.getExtends('floor-binder');
if (binder instanceof LayerGroupFloorBinder) {
binder.bindFloor(nextValue);
}
return;
}
case 'camera':
if (!this.assertType(nextValue, Camera, key)) return;
this.camera = nextValue;
@ -1376,7 +1363,7 @@ export class Layer extends Container<ELayerEvent> {
// 删除原始位置的图块
this.putRenderData([0], 1, fx, fy);
let nowZ = fy;
const nowZ = fy;
const startTime = Date.now();
return new Promise<void>(resolve => {
this.delegateTicker(
@ -1423,7 +1410,7 @@ export class Layer extends Container<ELayerEvent> {
time: number,
relative: boolean = true
) {
let nowZ = y;
const nowZ = y;
const startTime = Date.now();
return new Promise<void>(resolve => {
this.delegateTicker(
@ -1459,7 +1446,7 @@ export class Layer extends Container<ELayerEvent> {
parentComponent?: ComponentInternalInstance | null
): void {
switch (key) {
case 'layer':
case 'layer': {
if (!this.assertType(nextValue, 'string', key)) return;
const parent = this.parent;
if (parent instanceof LayerGroup) {
@ -1471,6 +1458,7 @@ export class Layer extends Container<ELayerEvent> {
}
this.update();
return;
}
case 'cellSize':
if (!this.assertType(nextValue, 'number', key)) return;
this.setCellSize(nextValue);

View File

@ -31,21 +31,19 @@ export class MotaRenderer extends Container {
MotaRenderer.list.set(id, this);
}
update(item: RenderItem = this) {
update(_item: RenderItem = this) {
if (this.needUpdate || this.hidden) return;
this.needUpdate = true;
this.requestRenderFrame(() => {
this.refresh(item);
this.refresh();
});
}
protected refresh(item: RenderItem = this): void {
protected refresh(): void {
if (!this.needUpdate) return;
this.needUpdate = false;
this.emit('beforeUpdate', item);
this.target.clear();
this.renderContent(this.target, Transform.identity);
this.emit('afterUpdate', item);
}
/**

View File

@ -1,8 +1,8 @@
import { Ticker, TimingFn } from 'mutate-animate';
import { TimingFn } from 'mutate-animate';
import { RenderAdapter } from './adapter';
import { FloorViewport } from './preset/viewport';
import { JSX } from 'vue/jsx-runtime';
import { Component, DefineComponent, DefineSetupFnComponent } from 'vue';
import { DefineComponent, DefineSetupFnComponent } from 'vue';
export type Props<
T extends

View File

@ -22,6 +22,10 @@
"20": "Cannot create render element for tag '$1', since there's no registration for it.",
"21": "Incorrect render prop type is delivered. key: '$1', expected type: '$2', delivered type: '$3'",
"22": "Incorrect props for custom tag. Please ensure you have delivered 'item' prop and other required props.",
"23": "Cannot get reader when fetching '$1'.",
"24": "Cannot decode stream source type of '$1', since there is no registered decoder for that type.",
"25": "Unknown audio type. Header: '$1'",
"26": "Uncaught error when fetching stream data from '$1'. Error info: $2.",
"1101": "Shadow extension needs 'floor-hero' extension as dependency.",
"1201": "Floor-damage extension needs 'floor-binder' extension as dependency.",
"1301": "Portal extension need 'floor-binder' extension as dependency.",
@ -71,6 +75,11 @@
"41": "Width of text content components must be positive. receive: $1",
"42": "Repeated Textbox id: '$1'.",
"43": "Cannot set icon of '$1', since it does not exists. Please ensure you have delivered correct icon id or number.",
"44": "Unexpected end when loading stream audio, reason: '$1'",
"45": "Audio route with id of '$1' has already existed. New route will override old route.",
"46": "Cannot pipe new StreamReader object when stream is loading.",
"47": "Audio stream decoder for audio type '$1' has already existed.",
"48": "Sample rate in stream audio must be constant.",
"1001": "Item-detail extension needs 'floor-binder' and 'floor-damage' extension as dependency.",
"1101": "Cannot add new effect to point effect instance, for there's no more reserve space for it. Please increase the max count of the instance."
}

View File

@ -0,0 +1,28 @@
import { StreamLoader } from '../loader';
import { audioPlayer, AudioRoute } from './player';
import { guessTypeByExt, isAudioSupport } from './support';
export function loadAllBgm() {
const loading = Mota.require('var', 'loading');
loading.once('coreInit', () => {
const data = data_a1e2fb4a_e986_4524_b0da_9b7ba7c0874d;
for (const bgm of data.main.bgms) {
const type = guessTypeByExt(bgm);
if (!type) continue;
if (isAudioSupport(type)) {
const source = audioPlayer.createElementSource();
source.setSource(`project/bgms/${bgm}`);
source.setLoop(true);
const route = new AudioRoute(source, audioPlayer);
audioPlayer.addRoute(`bgms.${bgm}`, route);
} else {
const source = audioPlayer.createStreamSource();
const stream = new StreamLoader(`project/bgms/${bgm}`);
stream.pipe(source);
source.setLoop(true);
const route = new AudioRoute(source, audioPlayer);
audioPlayer.addRoute(`bgms.${bgm}`, route);
}
}
});
}

View File

@ -0,0 +1,45 @@
import { OggVorbisDecoder } from '@wasm-audio-decoders/ogg-vorbis';
import { IAudioDecodeData, IAudioDecoder } from './source';
import { OggOpusDecoder } from 'ogg-opus-decoder';
export class VorbisDecoder implements IAudioDecoder {
decoder?: OggVorbisDecoder;
async create(): Promise<void> {
this.decoder = new OggVorbisDecoder();
await this.decoder.ready;
}
destroy(): void {
this.decoder?.free();
}
async decode(data: Uint8Array): Promise<IAudioDecodeData | undefined> {
return this.decoder?.decode(data);
}
async flush(): Promise<IAudioDecodeData | undefined> {
return await this.decoder?.flush();
}
}
export class OpusDecoder implements IAudioDecoder {
decoder?: OggOpusDecoder;
async create(): Promise<void> {
this.decoder = new OggOpusDecoder();
await this.decoder.ready;
}
destroy(): void {
this.decoder?.free();
}
async decode(data: Uint8Array): Promise<IAudioDecodeData | undefined> {
return this.decoder?.decode(data);
}
async flush(): Promise<IAudioDecodeData | undefined> {
return await this.decoder?.flush();
}
}

288
src/module/audio/effect.ts Normal file
View File

@ -0,0 +1,288 @@
import { isNil } from 'lodash-es';
import { sleep } from 'mutate-animate';
export interface IAudioInput {
/** 输入节点 */
input: AudioNode;
}
export interface IAudioOutput {
/** 输出节点 */
output: AudioNode;
}
export abstract class AudioEffect implements IAudioInput, IAudioOutput {
/** 输出节点 */
abstract output: AudioNode;
/** 输入节点 */
abstract input: AudioNode;
constructor(public readonly ac: AudioContext) {}
/**
*
*/
abstract end(): void;
/**
*
*/
abstract start(): void;
/**
*
* @param target
* @param output
* @param input
*/
connect(target: IAudioInput, output?: number, input?: number) {
this.output.connect(target.input, output, input);
}
/**
*
* @param target
* @param output
* @param input
*/
disconnect(target?: IAudioInput, output?: number, input?: number) {
if (!target) {
if (!isNil(output)) {
this.output.disconnect(output);
} else {
this.output.disconnect();
}
} else {
if (!isNil(output)) {
if (!isNil(input)) {
this.output.disconnect(target.input, output, input);
} else {
this.output.disconnect(target.input, output);
}
} else {
this.output.disconnect(target.input);
}
}
}
}
export class StereoEffect extends AudioEffect {
output: PannerNode;
input: PannerNode;
constructor(ac: AudioContext) {
super(ac);
const panner = ac.createPanner();
this.input = panner;
this.output = panner;
}
/**
* x正方形水平向右y正方形垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setOrientation(x: number, y: number, z: number) {
this.output.orientationX.value = x;
this.output.orientationY.value = y;
this.output.orientationZ.value = z;
}
/**
* x正方形水平向右y正方形垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setPosition(x: number, y: number, z: number) {
this.output.positionX.value = x;
this.output.positionY.value = y;
this.output.positionZ.value = z;
}
end(): void {}
start(): void {}
}
export class VolumeEffect extends AudioEffect {
output: GainNode;
input: GainNode;
constructor(ac: AudioContext) {
super(ac);
const gain = ac.createGain();
this.input = gain;
this.output = gain;
}
/**
*
* @param volume
*/
setVolume(volume: number) {
this.output.gain.value = volume;
}
/**
*
*/
getVolume(): number {
return this.output.gain.value;
}
end(): void {}
start(): void {}
}
export class ChannelVolumeEffect extends AudioEffect {
output: ChannelMergerNode;
input: ChannelSplitterNode;
/** 所有的音量控制节点 */
private readonly gain: GainNode[] = [];
constructor(ac: AudioContext) {
super(ac);
const splitter = ac.createChannelSplitter();
const merger = ac.createChannelMerger();
this.output = merger;
this.input = splitter;
for (let i = 0; i < 6; i++) {
const gain = ac.createGain();
splitter.connect(gain, i);
gain.connect(merger, 0, i);
this.gain.push(gain);
}
}
/**
*
* @param channel 0-5
* @param volume
*/
setVolume(channel: number, volume: number) {
if (!this.gain[channel]) return;
this.gain[channel].gain.value = volume;
}
/**
* 0-5
* @param channel
*/
getVolume(channel: number): number {
if (!this.gain[channel]) return 0;
return this.gain[channel].gain.value;
}
end(): void {}
start(): void {}
}
export class DelayEffect extends AudioEffect {
output: DelayNode;
input: DelayNode;
constructor(ac: AudioContext) {
super(ac);
const delay = ac.createDelay();
this.input = delay;
this.output = delay;
}
/**
*
* @param delay
*/
setDelay(delay: number) {
this.output.delayTime.value = delay;
}
/**
*
*/
getDelay() {
return this.output.delayTime.value;
}
end(): void {}
start(): void {}
}
export class EchoEffect extends AudioEffect {
output: GainNode;
input: GainNode;
/** 延迟节点 */
private readonly delay: DelayNode;
/** 反馈增益节点 */
private readonly gainNode: GainNode;
/** 当前增益 */
private gain: number = 0.5;
/** 是否正在播放 */
private playing: boolean = false;
constructor(ac: AudioContext) {
super(ac);
const delay = ac.createDelay();
const gain = ac.createGain();
gain.gain.value = 0.5;
delay.delayTime.value = 0.05;
delay.connect(gain);
gain.connect(delay);
this.delay = delay;
this.gainNode = gain;
this.input = gain;
this.output = gain;
}
/**
*
* @param gain 0-110.500
*/
setFeedbackGain(gain: number) {
const resolved = gain >= 1 ? 0.5 : gain < 0 ? 0 : gain;
this.gain = resolved;
if (this.playing) this.gainNode.gain.value = resolved;
}
/**
*
* @param delay 0.01-Infinity0.010.01
*/
setEchoDelay(delay: number) {
const resolved = delay < 0.01 ? 0.01 : delay;
this.delay.delayTime.value = resolved;
}
/**
*
*/
getFeedbackGain() {
return this.gain;
}
/**
*
*/
getEchoDelay() {
return this.delay.delayTime.value;
}
end(): void {
this.playing = false;
const echoTime = Math.ceil(Math.log(0.001) / Math.log(this.gain)) + 10;
sleep(this.delay.delayTime.value * echoTime).then(() => {
if (!this.playing) this.gainNode.gain.value = 0;
});
}
start(): void {
this.playing = true;
this.gainNode.gain.value = this.gain;
}
}

14
src/module/audio/index.ts Normal file
View File

@ -0,0 +1,14 @@
import { loadAllBgm } from './bgmLoader';
import { OpusDecoder, VorbisDecoder } from './decoder';
import { AudioStreamSource } from './source';
import { AudioType } from './support';
loadAllBgm();
AudioStreamSource.registerDecoder(AudioType.Ogg, VorbisDecoder);
AudioStreamSource.registerDecoder(AudioType.Opus, OpusDecoder);
export * from './support';
export * from './effect';
export * from './player';
export * from './source';
export * from './bgmLoader';

475
src/module/audio/player.ts Normal file
View File

@ -0,0 +1,475 @@
import EventEmitter from 'eventemitter3';
import {
AudioBufferSource,
AudioElementSource,
AudioSource,
AudioStreamSource
} from './source';
import {
AudioEffect,
ChannelVolumeEffect,
DelayEffect,
EchoEffect,
IAudioOutput,
StereoEffect,
VolumeEffect
} from './effect';
import { isNil } from 'lodash-es';
import { logger } from '@/core/common/logger';
import { sleep } from 'mutate-animate';
interface AudioPlayerEvent {}
export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
/** 音频播放上下文 */
readonly ac: AudioContext;
/** 所有的音频播放路由 */
readonly audioRoutes: Map<string, AudioRoute> = new Map();
/** 音量节点 */
readonly gain: GainNode;
constructor() {
super();
this.ac = new AudioContext();
this.gain = this.ac.createGain();
this.gain.connect(this.ac.destination);
}
/**
*
* @param volume
*/
setVolume(volume: number) {
this.gain.gain.value = volume;
}
/**
*
*/
getVolume() {
return this.gain.gain.value;
}
/**
*
* @param Source
*/
createSource<T extends AudioSource>(
Source: new (ac: AudioContext) => T
): T {
return new Source(this.ac);
}
/**
* opus ogg
*/
createStreamSource() {
return new AudioStreamSource(this.ac);
}
/**
* audio
*/
createElementSource() {
return new AudioElementSource(this.ac);
}
/**
* AudioBuffer
*/
createBufferSource() {
return new AudioBufferSource(this.ac);
}
/**
*
*/
getDestination() {
return this.gain;
}
/**
*
* @param Effect
*/
createEffect<T extends AudioEffect>(
Effect: new (ac: AudioContext) => T
): T {
return new Effect(this.ac);
}
/**
*
* ```txt
* |----------|
* Input ----> | GainNode | ----> Output
* |----------|
* ```
*/
createVolumeEffect() {
return new VolumeEffect(this.ac);
}
/**
*
* ```txt
* |------------|
* Input ----> | PannerNode | ----> Output
* |------------|
* ```
*/
createStereoEffect() {
return new StereoEffect(this.ac);
}
/**
*
* ```txt
* |----------|
* -> | GainNode | \
* |--------------| / |----------| -> |------------|
* Input ----> | SplitterNode | ...... | MergerNode | ----> Output
* |--------------| \ |----------| -> |------------|
* -> | GainNode | /
* |----------|
* ```
*/
createChannelVolumeEffect() {
return new ChannelVolumeEffect(this.ac);
}
/**
*
* |-----------|
* Input ----> | DelayNode | ----> Output
* |-----------|
*/
createDelay() {
return new DelayEffect(this.ac);
}
/**
*
* ```txt
* |----------|
* Input ----> | GainNode | ----> Output
* ^ |----------| |
* | |
* | |------------|
* |-- | Delay Node | <--
* |------------|
* ```
*/
createEchoEffect() {
return new EchoEffect(this.ac);
}
/**
*
* @param source
*/
createRoute(source: AudioSource) {
return new AudioRoute(source, this);
}
/**
*
* @param id
* @param route
*/
addRoute(id: string, route: AudioRoute) {
if (this.audioRoutes.has(id)) {
logger.warn(45, id);
}
this.audioRoutes.set(id, route);
}
/**
*
* @param id
*/
getRoute(id: string) {
return this.audioRoutes.get(id);
}
/**
*
* @param id
* @param when
*/
play(id: string, when: number = 0) {
this.getRoute(id)?.play(when);
}
/**
*
* @param id
* @returns
*/
pause(id: string) {
const route = this.getRoute(id);
if (!route) return Promise.resolve();
else return route.pause();
}
/**
*
* @param id
* @returns
*/
stop(id: string) {
const route = this.getRoute(id);
if (!route) return Promise.resolve();
else return route.stop();
}
/**
*
* @param id
*/
resume(id: string) {
this.getRoute(id)?.resume();
}
/**
* x正方形水平向右y正方形垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setListenerPosition(x: number, y: number, z: number) {
const listener = this.ac.listener;
listener.positionX.value = x;
listener.positionY.value = y;
listener.positionZ.value = z;
}
/**
* x正方形水平向右y正方形垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setListenerOrientation(x: number, y: number, z: number) {
const listener = this.ac.listener;
listener.forwardX.value = x;
listener.forwardY.value = y;
listener.forwardZ.value = z;
}
/**
* x正方形水平向右y正方形垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标
* @param y y坐标
* @param z z坐标
*/
setListenerUp(x: number, y: number, z: number) {
const listener = this.ac.listener;
listener.upX.value = x;
listener.upY.value = y;
listener.upZ.value = z;
}
}
type AudioStartHook = (route: AudioRoute) => void;
type AudioEndHook = (time: number, route: AudioRoute) => void;
interface AudioRouteEvent {
updateEffect: [];
play: [];
stop: [];
pause: [];
resume: [];
}
export class AudioRoute
extends EventEmitter<AudioRouteEvent>
implements IAudioOutput
{
output: AudioNode;
/** 效果器路由图 */
readonly effectRoute: AudioEffect[] = [];
/** 结束时长,当音频暂停或停止时,会经过这么长时间之后才真正终止播放,期间可以做音频淡入淡出等效果 */
endTime: number = 0;
/** 是否已暂停,注意停止播放是不算暂停的 */
paused: boolean = false;
/** 暂停时刻 */
private pauseTime: number = 0;
private audioStartHook?: AudioStartHook;
private audioEndHook?: AudioEndHook;
constructor(
public readonly source: AudioSource,
public readonly player: AudioPlayer
) {
super();
this.output = source.output;
}
/**
*
* @param time
*/
setEndTime(time: number) {
this.endTime = time;
}
/**
*
* @param fn
*/
onStart(fn?: AudioStartHook) {
this.audioStartHook = fn;
}
/**
*
* @param fn
*
*/
onEnd(fn?: AudioEndHook) {
this.audioEndHook = fn;
}
/**
*
* @param when
*/
play(when: number = 0) {
if (this.source.playing) return;
this.link();
if (this.effectRoute.length > 0) {
const first = this.effectRoute[0];
this.source.connect(first);
} else {
this.source.connect({ input: this.player.getDestination() });
}
this.source.play(when);
this.paused = false;
this.pauseTime = 0;
this.audioStartHook?.(this);
this.startAllEffect();
this.emit('play');
}
/**
*
*/
async pause() {
if (this.paused || !this.source.playing) return;
if (this.audioEndHook) {
this.audioEndHook(this.endTime, this);
await sleep(this.endTime);
}
const time = this.source.stop();
this.pauseTime = time;
this.paused = true;
this.endAllEffect();
this.emit('pause');
}
/**
*
*/
resume() {
if (this.source.playing) return;
if (this.paused) {
this.play(this.pauseTime);
} else {
this.play(0);
}
this.paused = false;
this.pauseTime = 0;
this.audioStartHook?.(this);
this.startAllEffect();
this.emit('resume');
}
/**
*
*/
async stop() {
if (!this.source.playing) return;
if (this.audioEndHook) {
this.audioEndHook(this.endTime, this);
await sleep(this.endTime);
}
this.source.stop();
this.paused = false;
this.pauseTime = 0;
this.endAllEffect();
this.emit('stop');
}
/**
*
* @param effect
* @param index 0
*/
addEffect(effect: AudioEffect | AudioEffect[], index?: number) {
if (isNil(index)) {
if (effect instanceof Array) {
this.effectRoute.push(...effect);
} else {
this.effectRoute.push(effect);
}
} else {
if (effect instanceof Array) {
this.effectRoute.splice(index, 0, ...effect);
} else {
this.effectRoute.splice(index, 0, effect);
}
}
this.setOutput();
if (this.source.playing) this.link();
this.emit('updateEffect');
}
/**
*
* @param effect
*/
removeEffect(effect: AudioEffect) {
const index = this.effectRoute.indexOf(effect);
if (index === -1) return;
this.effectRoute.splice(index, 1);
effect.disconnect();
this.setOutput();
if (this.source.playing) this.link();
this.emit('updateEffect');
}
private setOutput() {
const effect = this.effectRoute.at(-1);
if (!effect) this.output = this.source.output;
else this.output = effect.output;
}
/**
*
*/
private link() {
this.effectRoute.forEach(v => v.disconnect());
this.effectRoute.forEach((v, i) => {
const next = this.effectRoute[i + 1];
if (next) {
v.connect(next);
}
});
}
private startAllEffect() {
this.effectRoute.forEach(v => v.start());
}
private endAllEffect() {
this.effectRoute.forEach(v => v.end());
}
}
export const audioPlayer = new AudioPlayer();

623
src/module/audio/source.ts Normal file
View File

@ -0,0 +1,623 @@
import EventEmitter from 'eventemitter3';
import { IStreamController, IStreamReader } from '../loader';
import { IAudioInput, IAudioOutput } from './effect';
import { logger } from '@/core/common/logger';
import { AudioType } from './support';
import CodecParser, { CodecFrame, MimeType, OggPage } from 'codec-parser';
import { isNil } from 'lodash-es';
interface AudioSourceEvent {
play: [];
end: [];
}
export abstract class AudioSource
extends EventEmitter<AudioSourceEvent>
implements IAudioOutput
{
/** 音频源的输出节点 */
abstract readonly output: AudioNode;
/** 是否正在播放 */
playing: boolean = false;
constructor(public readonly ac: AudioContext) {
super();
}
/**
*
*/
abstract play(when?: number): void;
/**
*
* @returns
*/
abstract stop(): number;
/**
*
* @param target
*/
abstract connect(target: IAudioInput): void;
/**
*
* @param loop
*/
abstract setLoop(loop: boolean): void;
}
export interface IAudioDecodeError {
/** 错误信息 */
message: string;
}
export interface IAudioDecodeData {
/** 每个声道的音频信息 */
channelData: Float32Array[];
/** 已经被解码的 PCM 采样数 */
samplesDecoded: number;
/** 音频采样率 */
sampleRate: number;
/** 解码错误信息 */
errors: IAudioDecodeError[];
}
export interface IAudioDecoder {
/**
*
*/
create(): Promise<void>;
/**
*
*/
destroy(): void;
/**
*
* @param data
*/
decode(data: Uint8Array): Promise<IAudioDecodeData | undefined>;
/**
* 使
*/
flush(): Promise<IAudioDecodeData | undefined>;
}
const fileSignatures: [AudioType, number[]][] = [
[AudioType.Mp3, [0x49, 0x44, 0x33]],
[AudioType.Ogg, [0x4f, 0x67, 0x67, 0x53]],
[AudioType.Wav, [52, 0x49, 0x46, 0x46]],
[AudioType.Flac, [0x66, 0x4c, 0x61, 0x43]],
[AudioType.Aac, [0xff, 0xf1]],
[AudioType.Aac, [0xff, 0xf9]]
];
const oggHeaders: [AudioType, number[]][] = [
[AudioType.Opus, [0x4f, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64]]
];
const mimeTypeMap: Record<AudioType, MimeType> = {
[AudioType.Aac]: 'audio/aac',
[AudioType.Flac]: 'audio/flac',
[AudioType.Mp3]: 'audio/mpeg',
[AudioType.Ogg]: 'application/ogg',
[AudioType.Opus]: 'application/ogg',
[AudioType.Wav]: 'application/ogg'
};
function isOggPage(data: any): data is OggPage {
return !isNil(data.isFirstPage);
}
export class AudioStreamSource extends AudioSource implements IStreamReader {
static readonly decoderMap: Map<AudioType, new () => IAudioDecoder> =
new Map();
output: AudioBufferSourceNode;
/** 音频数据 */
buffer?: AudioBuffer;
/** 是否已经完全加载完毕 */
loaded: boolean = false;
/** 已经缓冲了多长时间,如果缓冲完那么跟歌曲时长一致 */
buffered: number = 0;
/** 已经缓冲的采样点数量 */
bufferedSamples: number = 0;
/** 歌曲时长,加载完毕之前保持为 0 */
duration: number = 0;
/** 在流传输阶段,至少缓冲多长时间的音频之后才开始播放,单位秒 */
bufferPlayDuration: number = 1;
/** 音频的采样率,未成功解析出之前保持为 0 */
sampleRate: number = 0;
private controller?: IStreamController;
private loop: boolean = false;
private target?: IAudioInput;
/** 开始播放时刻 */
private lastStartTime: number = 0;
/** 上一次播放的缓存长度 */
private lastBufferSamples: number = 0;
/** 是否已经获取到头文件 */
private headerRecieved: boolean = false;
/** 音频类型 */
private audioType: AudioType | '' = '';
/** 音频解码器 */
private decoder?: IAudioDecoder;
/** 音频解析器 */
private parser?: CodecParser;
/** 每多长时间组成一个缓存 Float32Array */
private bufferChunkSize = 10;
/** 缓存音频数据,每 bufferChunkSize 秒钟组成一个 Float32Array用于流式解码 */
private audioData: Float32Array[][] = [];
private errored: boolean = false;
/**
*
* @param type
* @param decoder
*/
static registerDecoder(type: AudioType, decoder: new () => IAudioDecoder) {
if (this.decoderMap.has(type)) {
logger.warn(47, type);
return;
}
this.decoderMap.set(type, decoder);
}
constructor(context: AudioContext) {
super(context);
this.output = context.createBufferSource();
}
/**
* 10
* @param size
*/
setChunkSize(size: number) {
if (this.controller?.loading || this.loaded) return;
this.bufferChunkSize = size;
}
piped(controller: IStreamController): void {
this.controller = controller;
}
async pump(data: Uint8Array | undefined, done: boolean): Promise<void> {
if (!data || this.errored) return;
if (!this.headerRecieved) {
// 检查头文件获取音频类型仅检查前256个字节
const toCheck = data.slice(0, 256);
for (const [type, value] of fileSignatures) {
if (value.every((v, i) => toCheck[i] === v)) {
this.audioType = type;
break;
}
}
if (this.audioType === AudioType.Ogg) {
// 如果是ogg的话进一步判断是不是opus
for (const [key, value] of oggHeaders) {
const has = toCheck.some((_, i) => {
return value.every((v, ii) => toCheck[i + ii] === v);
});
if (has) {
this.audioType = key;
break;
}
}
}
if (!this.audioType) {
logger.error(
25,
[...toCheck]
.map(v => v.toString().padStart(2, '0'))
.join(' ')
.toUpperCase()
);
return;
}
// 创建解码器
const Decoder = AudioStreamSource.decoderMap.get(this.audioType);
if (!Decoder) {
this.errored = true;
logger.error(24, this.audioType);
return Promise.reject(
`Cannot decode stream source type of '${this.audioType}', since there is no registered decoder for that type.`
);
}
this.decoder = new Decoder();
// 创建数据解析器
const mime = mimeTypeMap[this.audioType];
const parser = new CodecParser(mime);
this.parser = parser;
await this.decoder.create();
this.headerRecieved = true;
}
const decoder = this.decoder;
const parser = this.parser;
if (!decoder || !parser) {
this.errored = true;
return Promise.reject(
'No parser or decoder attached in this AudioStreamSource'
);
}
await this.decodeData(data, decoder, parser);
if (done) await this.decodeFlushData(decoder, parser);
this.checkBufferedPlay();
}
/**
*
*/
private checkSampleRate(info: (OggPage | CodecFrame)[]) {
for (const one of info) {
const frame = isOggPage(one) ? one.codecFrames[0] : one;
if (frame) {
const rate = frame.header.sampleRate;
if (this.sampleRate === 0) {
this.sampleRate = rate;
break;
} else {
if (rate !== this.sampleRate) {
logger.warn(48);
}
}
}
}
}
/**
*
*/
private async decodeData(
data: Uint8Array,
decoder: IAudioDecoder,
parser: CodecParser
) {
// 解析音频数据
const audioData = await decoder.decode(data);
if (!audioData) return;
// @ts-expect-error 库类型声明错误
const audioInfo = [...parser.parseChunk(data)] as (
| OggPage
| CodecFrame
)[];
// 检查采样率
this.checkSampleRate(audioInfo);
// 追加音频数据
this.appendDecodedData(audioData, audioInfo);
}
/**
*
*/
private async decodeFlushData(decoder: IAudioDecoder, parser: CodecParser) {
const audioData = await decoder.flush();
if (!audioData) return;
// @ts-expect-error 库类型声明错误
const audioInfo = [...parser.flush()] as (OggPage | CodecFrame)[];
this.checkSampleRate(audioInfo);
this.appendDecodedData(audioData, audioInfo);
}
/**
*
*/
private appendDecodedData(
data: IAudioDecodeData,
info: (CodecFrame | OggPage)[]
) {
const channels = data.channelData.length;
if (channels === 0) return;
if (this.audioData.length !== channels) {
this.audioData = [];
for (let i = 0; i < channels; i++) {
this.audioData.push([]);
}
}
// 计算出应该放在哪
const chunk = this.sampleRate * this.bufferChunkSize;
const sampled = this.bufferedSamples;
const pushIndex = Math.floor(sampled / chunk);
const bufferIndex = sampled % chunk;
const dataLength = data.channelData[0].length;
let buffered = 0;
let nowIndex = pushIndex;
let toBuffer = bufferIndex;
while (buffered < dataLength) {
const rest = toBuffer !== 0 ? chunk - bufferIndex : chunk;
for (let i = 0; i < channels; i++) {
const audioData = this.audioData[i];
if (!audioData[nowIndex]) {
audioData.push(new Float32Array(chunk));
}
const toPush = data.channelData[i].slice(
buffered,
buffered + rest
);
audioData[nowIndex].set(toPush, toBuffer);
}
buffered += rest;
nowIndex++;
toBuffer = 0;
}
this.buffered +=
info.reduce((prev, curr) => prev + curr.duration, 0) / 1000;
this.bufferedSamples += info.reduce(
(prev, curr) => prev + curr.samples,
0
);
}
/**
*
*/
private checkBufferedPlay() {
if (this.playing || this.sampleRate === 0) return;
const played = this.lastBufferSamples / this.sampleRate;
const dt = this.buffered - played;
if (this.loaded) {
this.playAudio(played);
return;
}
if (dt < this.bufferPlayDuration) return;
console.log(played, this.lastBufferSamples, this.sampleRate);
this.lastBufferSamples = this.bufferedSamples;
// 需要播放
this.mergeBuffers();
if (!this.buffer) return;
if (this.playing) this.output.stop();
this.createSourceNode(this.buffer);
this.output.loop = false;
this.output.start(0, played);
this.lastStartTime = this.ac.currentTime;
this.playing = true;
this.output.addEventListener('ended', () => {
this.playing = false;
this.checkBufferedPlay();
});
}
private mergeBuffers() {
const buffer = this.ac.createBuffer(
this.audioData.length,
this.bufferedSamples,
this.sampleRate
);
const chunk = this.sampleRate * this.bufferChunkSize;
const bufferedChunks = Math.floor(this.bufferedSamples / chunk);
const restLength = this.bufferedSamples % chunk;
for (let i = 0; i < this.audioData.length; i++) {
const audio = this.audioData[i];
const data = new Float32Array(this.bufferedSamples);
for (let j = 0; j < bufferedChunks; j++) {
data.set(audio[j], chunk * j);
}
if (restLength !== 0) {
data.set(
audio[bufferedChunks].slice(0, restLength),
chunk * bufferedChunks
);
}
buffer.copyToChannel(data, i, 0);
}
this.buffer = buffer;
}
async start() {
delete this.buffer;
this.headerRecieved = false;
this.audioType = '';
this.errored = false;
this.buffered = 0;
this.sampleRate = 0;
this.bufferedSamples = 0;
this.duration = 0;
this.loaded = false;
if (this.playing) this.output.stop();
this.playing = false;
this.lastStartTime = this.ac.currentTime;
}
end(done: boolean, reason?: string): void {
if (done && this.buffer) {
this.loaded = true;
delete this.controller;
this.mergeBuffers();
// const played = this.lastBufferSamples / this.sampleRate;
// this.playAudio(played);
this.duration = this.buffered;
this.audioData = [];
this.decoder?.destroy();
delete this.decoder;
delete this.parser;
} else {
logger.warn(44, reason ?? '');
}
}
private playAudio(when?: number) {
if (!this.buffer) return;
this.lastStartTime = this.ac.currentTime;
if (this.playing) this.output.stop();
this.emit('play');
this.createSourceNode(this.buffer);
this.output.start(0, when);
this.playing = true;
console.log(when);
this.output.addEventListener('ended', () => {
this.playing = false;
this.emit('end');
if (this.loop && !this.output.loop) this.play(0);
});
}
play(when?: number): void {
if (this.playing || this.errored) return;
if (this.loaded && this.buffer) {
this.playing = true;
this.playAudio(when);
} else {
this.controller?.start();
}
}
private createSourceNode(buffer: AudioBuffer) {
if (!this.target) return;
const node = this.ac.createBufferSource();
node.buffer = buffer;
if (this.playing) this.output.stop();
this.playing = false;
this.output = node;
node.connect(this.target.input);
node.loop = this.loop;
}
stop(): number {
if (this.playing) this.output.stop();
this.playing = false;
return this.ac.currentTime - this.lastStartTime;
}
connect(target: IAudioInput): void {
this.target = target;
}
setLoop(loop: boolean): void {
this.loop = loop;
}
}
export class AudioElementSource extends AudioSource {
output: MediaElementAudioSourceNode;
/** audio 元素 */
readonly audio: HTMLAudioElement;
constructor(context: AudioContext) {
super(context);
const audio = new Audio();
audio.preload = 'none';
this.output = context.createMediaElementSource(audio);
this.audio = audio;
audio.addEventListener('play', () => {
this.playing = true;
this.emit('play');
});
audio.addEventListener('ended', () => {
this.playing = false;
this.emit('end');
});
}
/**
*
* @param url
*/
setSource(url: string) {
this.audio.src = url;
}
play(when: number = 0): void {
if (this.playing) return;
this.audio.currentTime = when;
this.audio.play();
}
stop(): number {
this.audio.pause();
this.playing = false;
this.emit('end');
return this.audio.currentTime;
}
connect(target: IAudioInput): void {
this.output.connect(target.input);
}
setLoop(loop: boolean): void {
this.audio.loop = loop;
}
}
export class AudioBufferSource extends AudioSource {
output: AudioBufferSourceNode;
/** 音频数据 */
buffer?: AudioBuffer;
/** 是否循环 */
private loop: boolean = false;
/** 播放开始时刻 */
private lastStartTime: number = 0;
private target?: IAudioInput;
constructor(context: AudioContext) {
super(context);
this.output = context.createBufferSource();
}
/**
*
* @param buffer ArrayBuffer AudioBuffer
*/
async setBuffer(buffer: ArrayBuffer | AudioBuffer) {
if (buffer instanceof ArrayBuffer) {
this.buffer = await this.ac.decodeAudioData(buffer);
} else {
this.buffer = buffer;
}
}
play(when?: number): void {
if (this.playing || !this.buffer) return;
this.playing = true;
this.lastStartTime = this.ac.currentTime;
this.emit('play');
this.createSourceNode(this.buffer);
this.output.start(0, when);
this.output.addEventListener('ended', () => {
this.playing = false;
this.emit('end');
if (this.loop && !this.output.loop) this.play(0);
});
}
private createSourceNode(buffer: AudioBuffer) {
if (!this.target) return;
const node = this.ac.createBufferSource();
node.buffer = buffer;
this.output = node;
node.connect(this.target.input);
node.loop = this.loop;
}
stop(): number {
this.output.stop();
return this.ac.currentTime - this.lastStartTime;
}
connect(target: IAudioInput): void {
this.target = target;
}
setLoop(loop: boolean): void {
this.loop = loop;
}
}

View File

@ -0,0 +1,55 @@
const audio = new Audio();
const supportMap = new Map<string, boolean>();
export const enum AudioType {
Mp3 = 'audio/mpeg',
Wav = 'audio/wav; codecs="1"',
Flac = 'audio/flac',
Opus = 'audio/ogg; codecs="opus"',
Ogg = 'audio/ogg; codecs="vorbis"',
Aac = 'audio/aac'
}
/**
*
* @param type
*/
export function isAudioSupport(type: AudioType): boolean {
if (supportMap.has(type)) return supportMap.get(type)!;
else {
const support = audio.canPlayType(type);
const canPlay = support === 'maybe' || support === 'probably';
supportMap.set(type, canPlay);
return canPlay;
}
}
const typeMap = new Map<string, AudioType>([
['ogg', AudioType.Ogg],
['mp3', AudioType.Mp3],
['wav', AudioType.Wav],
['flac', AudioType.Flac],
['opus', AudioType.Opus],
['aac', AudioType.Aac]
]);
/**
*
* @param file
*/
export function guessTypeByExt(file: string): AudioType | '' {
const ext = /\.[a-zA-Z\d]+$/.exec(file);
if (!ext?.[0]) return '';
const type = ext[0].slice(1);
return typeMap.get(type.toLocaleLowerCase()) ?? '';
}
isAudioSupport(AudioType.Ogg);
isAudioSupport(AudioType.Mp3);
isAudioSupport(AudioType.Wav);
isAudioSupport(AudioType.Flac);
isAudioSupport(AudioType.Opus);
isAudioSupport(AudioType.Aac);
console.log(supportMap);

View File

@ -7,3 +7,7 @@ Mota.register('module', 'Weather', {
WeatherController,
RainWeather
});
export * from './weather';
export * from './audio';
export * from './loader';

View File

@ -0,0 +1 @@
export * from './stream';

125
src/module/loader/stream.ts Normal file
View File

@ -0,0 +1,125 @@
import { logger } from '@/core/common/logger';
import EventEmitter from 'eventemitter3';
export interface IStreamController<T = void> {
readonly loading: boolean;
/**
*
*/
start(): Promise<T>;
/**
*
* @param reason
*/
cancel(reason?: string): void;
}
export interface IStreamReader<T = any> {
/**
*
* @param data
* @param done
*/
pump(
data: Uint8Array | undefined,
done: boolean,
response: Response
): Promise<void>;
/**
*
* @param controller
*/
piped(controller: IStreamController<T>): void;
/**
*
* @param stream
* @param controller
*/
start(
stream: ReadableStream,
controller: IStreamController<T>,
response: Response
): Promise<void>;
/**
*
* @param done false
* @param reason
*/
end(done: boolean, reason?: string): void;
}
interface StreamLoaderEvent {
data: [data: Uint8Array | undefined, done: boolean];
}
export class StreamLoader
extends EventEmitter<StreamLoaderEvent>
implements IStreamController<void>
{
/** 传输目标 */
private target: Set<IStreamReader> = new Set();
/** 读取流对象 */
private stream?: ReadableStream;
loading: boolean = false;
constructor(public readonly url: string) {
super();
}
/**
*
* @param reader
*/
pipe(reader: IStreamReader) {
if (this.loading) {
logger.warn(46);
return;
}
this.target.add(reader);
reader.piped(this);
return this;
}
async start() {
if (this.loading) return;
this.loading = true;
const response = await window.fetch(this.url);
const stream = response.body;
if (!stream) {
logger.error(23);
return;
}
// 获取读取器
this.stream = stream;
const reader = response.body?.getReader();
const targets = [...this.target];
// try {
await Promise.all(targets.map(v => v.start(stream, this, response)));
// 开始流传输
while (true) {
const { value, done } = await reader.read();
await Promise.all(targets.map(v => v.pump(value, done, response)));
if (done) break;
}
this.loading = false;
targets.forEach(v => v.end(true));
// } catch (e) {
// logger.error(26, this.url, String(e));
// }
}
cancel(reason?: string) {
if (!this.stream) return;
this.stream.cancel(reason);
this.loading = false;
this.target.forEach(v => v.end(false, reason));
}
}

View File

@ -210,13 +210,14 @@ type SoundIds =
| 'zone.mp3'
type BgmIds =
| 'beforeBoss.mp3'
| 'beforeBoss.opus'
| 'cave.mp3'
| 'escape.mp3'
| 'escape2.mp3'
| 'grass.mp3'
| 'mount.mp3'
| 'mount.opus'
| 'night.mp3'
| 'output6.ogg'
| 'palaceCenter.mp3'
| 'palaceNorth.mp3'
| 'palaceSouth.mp3'