From 5265b0a90ede122060ec8e3ff93b5e9c96025d10 Mon Sep 17 00:00:00 2001 From: unanmed <1319491857@qq.com> Date: Mon, 13 Jan 2025 22:24:40 +0800 Subject: [PATCH] =?UTF-8?q?refactor:=20=E6=96=B0=E7=9A=84=E9=9F=B3?= =?UTF-8?q?=E9=A2=91=E7=B3=BB=E7=BB=9F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/data/logger.json | 3 + src/module/audio/effect.ts | 266 ++++++++++++++++++++++++ src/module/audio/index.ts | 4 + src/module/audio/player.ts | 404 ++++++++++++++++++++++++++++++++++++ src/module/audio/source.ts | 234 +++++++++++++++++++++ src/module/audio/support.ts | 46 ++++ src/module/index.ts | 4 + src/module/loader/index.ts | 1 + src/module/loader/stream.ts | 12 ++ 9 files changed, 974 insertions(+) create mode 100644 src/module/audio/effect.ts create mode 100644 src/module/audio/index.ts create mode 100644 src/module/audio/player.ts create mode 100644 src/module/audio/source.ts create mode 100644 src/module/audio/support.ts create mode 100644 src/module/loader/index.ts diff --git a/src/data/logger.json b/src/data/logger.json index 4561353..51d6b17 100644 --- a/src/data/logger.json +++ b/src/data/logger.json @@ -22,6 +22,7 @@ "20": "Cannot create render element for tag '$1', since there's no registration for it.", "21": "Incorrect render prop type is delivered. key: '$1', expected type: '$2', delivered type: '$3'", "22": "Incorrect props for custom tag. Please ensure you have delivered 'item' prop and other required props.", + "23": "Cannot get reader when fetching '$1'.", "1101": "Shadow extension needs 'floor-hero' extension as dependency.", "1201": "Floor-damage extension needs 'floor-binder' extension as dependency.", "1301": "Portal extension need 'floor-binder' extension as dependency.", @@ -71,6 +72,8 @@ "41": "Width of text content components must be positive. receive: $1", "42": "Repeated Textbox id: '$1'.", "43": "Cannot set icon of '$1', since it does not exists. Please ensure you have delivered correct icon id or number.", + "44": "Unexpected end when loading stream audio, reason: '$1'", + "45": "Audio route with id of '$1' has already existed. New route will override old route.", "1001": "Item-detail extension needs 'floor-binder' and 'floor-damage' extension as dependency.", "1101": "Cannot add new effect to point effect instance, for there's no more reserve space for it. Please increase the max count of the instance." } diff --git a/src/module/audio/effect.ts b/src/module/audio/effect.ts new file mode 100644 index 0000000..25281f0 --- /dev/null +++ b/src/module/audio/effect.ts @@ -0,0 +1,266 @@ +import { isNil } from 'lodash-es'; +import { sleep } from 'mutate-animate'; + +export interface IAudioInput { + /** 输入节点 */ + input: AudioNode; +} + +export interface IAudioOutput { + /** 输出节点 */ + output: AudioNode; +} + +export abstract class AudioEffect implements IAudioInput, IAudioOutput { + /** 输出节点 */ + abstract output: AudioNode; + /** 输入节点 */ + abstract input: AudioNode; + + constructor(public readonly ac: AudioContext) {} + + /** + * 当音频播放结束时触发,可以用于节点结束后处理 + */ + abstract end(): void; + + /** + * 当音频开始播放时触发,可以用于节点初始化 + */ + abstract start(): void; + + /** + * 连接至其他效果器 + * @param target 目标输入 + * @param output 当前效果器输出通道 + * @param input 目标效果器的输入通道 + */ + connect(target: IAudioInput, output?: number, input?: number) { + this.output.connect(target.input, output, input); + } + + /** + * 与其他效果器取消连接 + * @param target 目标输入 + * @param output 当前效果器输出通道 + * @param input 目标效果器的输入通道 + */ + disconnect(target?: IAudioInput, output?: number, input?: number) { + if (!target) { + if (!isNil(output)) { + this.output.disconnect(output); + } else { + this.output.disconnect(); + } + } else { + if (!isNil(output)) { + if (!isNil(input)) { + this.output.disconnect(target.input, output, input); + } else { + this.output.disconnect(target.input, output); + } + } else { + this.output.disconnect(target.input); + } + } + } +} + +export class StereoEffect extends AudioEffect { + output: PannerNode; + input: PannerNode; + + constructor(ac: AudioContext) { + super(ac); + const panner = ac.createPanner(); + this.input = panner; + this.output = panner; + } + + /** + * 设置音频朝向,x正方形水平向右,y正方形垂直于地面向上,z正方向垂直屏幕远离用户 + * @param x 朝向x坐标 + * @param y 朝向y坐标 + * @param z 朝向z坐标 + */ + setOrientation(x: number, y: number, z: number) {} + + /** + * 设置音频位置,x正方形水平向右,y正方形垂直于地面向上,z正方向垂直屏幕远离用户 + * @param x 位置x坐标 + * @param y 位置y坐标 + * @param z 位置z坐标 + */ + setPosition(x: number, y: number, z: number) {} + + end(): void {} + + start(): void {} +} + +export class VolumeEffect extends AudioEffect { + output: GainNode; + input: GainNode; + + constructor(ac: AudioContext) { + super(ac); + const gain = ac.createGain(); + this.input = gain; + this.output = gain; + } + + /** + * 设置音量大小 + * @param volume 音量大小 + */ + setVolume(volume: number) {} + + /** + * 获取音量大小 + */ + getVolume(): number {} + + end(): void {} + + start(): void {} +} + +export class ChannelVolumeEffect extends AudioEffect { + output: ChannelMergerNode; + input: ChannelSplitterNode; + + /** 所有的音量控制节点 */ + private readonly gain: GainNode[] = []; + + constructor(ac: AudioContext) { + super(ac); + const splitter = ac.createChannelSplitter(); + const merger = ac.createChannelMerger(); + this.output = merger; + this.input = splitter; + for (let i = 0; i < 6; i++) { + const gain = ac.createGain(); + splitter.connect(gain, i); + gain.connect(merger, 0, i); + this.gain.push(gain); + } + } + + /** + * 设置某个声道的音量大小 + * @param channel 要设置的声道 + * @param volume 这个声道的音量大小 + */ + setVolume(channel: number, volume: number) {} + + /** + * 获取某个声道的音量大小 + * @param channel 要获取的声道 + */ + getVolume(channel: number): number {} + + end(): void {} + + start(): void {} +} + +export class DelayEffect extends AudioEffect { + output: DelayNode; + input: DelayNode; + + constructor(ac: AudioContext) { + super(ac); + const delay = ac.createDelay(); + this.input = delay; + this.output = delay; + } + + /** + * 设置延迟时长 + * @param delay 延迟时长,单位秒 + */ + setDelay(delay: number) {} + + /** + * 获取延迟时长 + */ + getDelay() {} + + end(): void {} + + start(): void {} +} + +export class EchoEffect extends AudioEffect { + output: DelayNode; + input: DelayNode; + + /** 延迟节点 */ + private readonly delay: DelayNode; + /** 反馈增益节点 */ + private readonly gainNode: GainNode; + /** 当前增益 */ + private gain: number = 0.5; + /** 是否正在播放 */ + private playing: boolean = false; + + constructor(ac: AudioContext) { + super(ac); + const delay = ac.createDelay(); + this.input = delay; + this.output = delay; + const gain = ac.createGain(); + gain.gain.value = 0.5; + delay.delayTime.value = 0.05; + delay.connect(gain); + gain.connect(delay); + this.delay = delay; + this.gainNode = gain; + } + + /** + * 设置回声反馈增益大小 + * @param gain 增益大小,范围 0-1,大于等于1的视为0.5,小于0的视为0 + */ + setFeedbackGain(gain: number) { + const resolved = gain >= 1 ? 0.5 : gain < 0 ? 0 : gain; + this.gain = resolved; + if (this.playing) this.gainNode.gain.value = resolved; + } + + /** + * 设置回声间隔时长 + * @param delay 回声时长,范围 0.01-Infinity,小于0.01的视为0.01 + */ + setEchoDelay(delay: number) { + const resolved = delay < 0.01 ? 0.01 : delay; + this.delay.delayTime.value = resolved; + } + + /** + * 获取反馈节点增益 + */ + getFeedbackGain() { + return this.gain; + } + + /** + * 获取回声间隔时长 + */ + getEchoDelay() { + return this.delay.delayTime.value; + } + + end(): void { + this.playing = false; + const echoTime = Math.ceil(Math.log(0.001) / Math.log(this.gain)) + 10; + sleep(this.delay.delayTime.value * echoTime).then(() => { + if (!this.playing) this.gainNode.gain.value = 0; + }); + } + + start(): void { + this.playing = true; + this.gainNode.gain.value = this.gain; + } +} diff --git a/src/module/audio/index.ts b/src/module/audio/index.ts new file mode 100644 index 0000000..1798591 --- /dev/null +++ b/src/module/audio/index.ts @@ -0,0 +1,4 @@ +export * from './support'; +export * from './effect'; +export * from './player'; +export * from './source'; diff --git a/src/module/audio/player.ts b/src/module/audio/player.ts new file mode 100644 index 0000000..7468a1f --- /dev/null +++ b/src/module/audio/player.ts @@ -0,0 +1,404 @@ +import EventEmitter from 'eventemitter3'; +import { + AudioBufferSource, + AudioElementSource, + AudioSource, + AudioStreamSource +} from './source'; +import { + AudioEffect, + ChannelVolumeEffect, + EchoEffect, + IAudioOutput, + StereoEffect, + VolumeEffect +} from './effect'; +import { isNil } from 'lodash-es'; +import { logger } from '@/core/common/logger'; +import { sleep } from 'mutate-animate'; + +interface AudioPlayerEvent {} + +export class AudioPlayer extends EventEmitter { + /** 音频播放上下文 */ + readonly ac: AudioContext; + + /** 所有的音频播放路由 */ + readonly audioRoutes: Map = new Map(); + /** 音量节点 */ + readonly gain: GainNode; + + constructor() { + super(); + this.ac = new AudioContext(); + this.gain = this.ac.createGain(); + this.gain.connect(this.ac.destination); + } + + /** + * 设置音量 + * @param volume 音量 + */ + setVolume(volume: number) { + this.gain.gain.value = volume; + } + + /** + * 获取音量 + */ + getVolume() { + return this.gain.gain.value; + } + + /** + * 创建一个音频源 + * @param Source 音频源类 + */ + createSource( + Source: new (ac: AudioContext) => T + ): T { + return new Source(this.ac); + } + + /** + * 创建一个兼容流式音频源,可以与流式加载相结合,主要用于处理 opus ogg 不兼容的情况 + */ + createStreamSource() { + return new AudioStreamSource(this.ac); + } + + /** + * 创建一个通过 audio 元素播放的音频源 + */ + createElementSource() { + return new AudioElementSource(this.ac); + } + + /** + * 创建一个通过 AudioBuffer 播放的音频源 + */ + createBufferSource() { + return new AudioBufferSource(this.ac); + } + + /** + * 获取音频目的地 + */ + getDestination() { + return this.gain; + } + + /** + * 创建一个音频效果器 + * @param Effect 效果器类 + */ + createEffect( + Effect: new (ac: AudioContext) => T + ): T { + return new Effect(this.ac); + } + + /** + * 创建一个修改音量的效果器 + */ + createVolumeEffect() { + return new VolumeEffect(this.ac); + } + + /** + * 创建一个立体声效果器 + */ + createStereoEffect() { + return new StereoEffect(this.ac); + } + + /** + * 创建一个修改单个声道音量的效果器 + */ + createChannelVolumeEffect() { + return new ChannelVolumeEffect(this.ac); + } + + /** + * 创建一个回声效果器 + */ + createEchoEffect() { + return new EchoEffect(this.ac); + } + + /** + * 创建一个音频播放路由 + * @param source 音频源 + */ + createRoute(source: AudioSource) { + return new AudioRoute(source, this); + } + + /** + * 添加一个音频播放路由,可以直接被播放 + * @param id 这个音频播放路由的名称 + * @param route 音频播放路由对象 + */ + addRoute(id: string, route: AudioRoute) { + if (this.audioRoutes.has(id)) { + logger.warn(45, id); + } + this.audioRoutes.set(id, route); + } + + /** + * 根据名称获取音频播放路由对象 + * @param id 音频播放路由的名称 + */ + getRoute(id: string) { + return this.audioRoutes.get(id); + } + + /** + * 播放音频 + * @param id 音频名称 + * @param when 从音频的哪个位置开始播放,单位秒 + */ + play(id: string, when?: number) { + this.getRoute(id)?.play(when); + } + + /** + * 设置听者位置,x正方形水平向右,y正方形垂直于地面向上,z正方向垂直屏幕远离用户 + * @param x 位置x坐标 + * @param y 位置y坐标 + * @param z 位置z坐标 + */ + setListenerPosition(x: number, y: number, z: number) { + const listener = this.ac.listener; + listener.positionX.value = x; + listener.positionY.value = y; + listener.positionZ.value = z; + } + + /** + * 设置听者朝向,x正方形水平向右,y正方形垂直于地面向上,z正方向垂直屏幕远离用户 + * @param x 朝向x坐标 + * @param y 朝向y坐标 + * @param z 朝向z坐标 + */ + setListenerOrientation(x: number, y: number, z: number) { + const listener = this.ac.listener; + listener.forwardX.value = x; + listener.forwardY.value = y; + listener.forwardZ.value = z; + } + + /** + * 设置听者头顶朝向,x正方形水平向右,y正方形垂直于地面向上,z正方向垂直屏幕远离用户 + * @param x 头顶朝向x坐标 + * @param y 头顶朝向y坐标 + * @param z 头顶朝向z坐标 + */ + setListenerUp(x: number, y: number, z: number) { + const listener = this.ac.listener; + listener.upX.value = x; + listener.upY.value = y; + listener.upZ.value = z; + } +} + +type AudioStartHook = (route: AudioRoute) => void; +type AudioEndHook = (time: number, route: AudioRoute) => void; + +interface AudioRouteEvent { + updateEffect: []; + play: []; + stop: []; + pause: []; + resume: []; +} + +export class AudioRoute + extends EventEmitter + implements IAudioOutput +{ + output: AudioNode; + + /** 效果器路由图 */ + readonly effectRoute: AudioEffect[] = []; + + /** 结束时长,当音频暂停或停止时,会经过这么长时间之后才真正终止播放,期间可以做音频淡入淡出等效果 */ + endTime: number = 0; + + /** 是否已暂停,注意停止播放是不算暂停的 */ + paused: boolean = false; + /** 暂停时刻 */ + private pauseTime: number = 0; + + private audioStartHook?: AudioStartHook; + private audioEndHook?: AudioEndHook; + + constructor( + public readonly source: AudioSource, + public readonly player: AudioPlayer + ) { + super(); + this.output = source.output; + } + + /** + * 设置结束时间,暂停或停止时,会经过这么长时间才终止音频的播放,这期间可以做一下音频淡出的效果。 + * @param time 暂停或停止时,经过多长时间之后才会结束音频的播放 + */ + setEndTime(time: number) { + this.endTime = time; + } + + /** + * 当音频播放时执行的函数,可以用于音频淡入效果 + * @param fn 音频开始播放时执行的函数 + */ + onStart(fn?: AudioStartHook) { + this.audioStartHook = fn; + } + + /** + * 当音频暂停或停止时执行的函数,可以用于音频淡出效果 + * @param fn 音频在暂停或停止时执行的函数,不填时表示取消这个钩子。 + * 包含两个参数,第一个参数是结束时长,第二个参数是当前音频播放路由对象 + */ + onEnd(fn?: AudioEndHook) { + this.audioEndHook = fn; + } + + /** + * 开始播放这个音频 + * @param when 从音频的什么时候开始播放,单位秒 + */ + play(when?: number) { + if (this.source.playing) return; + this.link(); + if (this.effectRoute.length > 0) { + const first = this.effectRoute[0]; + this.source.connect(first); + } else { + this.source.connect({ input: this.player.getDestination() }); + } + this.source.play(when); + this.paused = false; + this.pauseTime = 0; + this.audioStartHook?.(this); + this.startAllEffect(); + this.emit('play'); + } + + /** + * 暂停音频播放 + */ + async pause() { + if (this.paused || !this.source.playing) return; + if (this.audioEndHook) { + this.audioEndHook(this.endTime, this); + await sleep(this.endTime); + } + const time = this.source.stop(); + this.pauseTime = time; + this.paused = true; + this.endAllEffect(); + this.emit('pause'); + } + + /** + * 继续音频播放 + */ + resume() { + if (this.source.playing) return; + if (this.paused) { + this.play(this.pauseTime); + } else { + this.play(0); + } + this.paused = false; + this.pauseTime = 0; + this.audioStartHook?.(this); + this.startAllEffect(); + this.emit('resume'); + } + + /** + * 停止音频播放 + */ + async stop() { + if (!this.source.playing) return; + if (this.audioEndHook) { + this.audioEndHook(this.endTime, this); + await sleep(this.endTime); + } + this.source.stop(); + this.paused = false; + this.pauseTime = 0; + this.endAllEffect(); + this.emit('stop'); + } + + /** + * 添加效果器 + * @param effect 要添加的效果,可以是数组,表示一次添加多个 + * @param index 从哪个位置开始添加,如果大于数组长度,那么加到末尾,如果小于0,那么将会从后面往前数。默认添加到末尾 + */ + addEffect(effect: AudioEffect | AudioEffect[], index?: number) { + if (isNil(index)) { + if (effect instanceof Array) { + this.effectRoute.push(...effect); + } else { + this.effectRoute.push(effect); + } + } else { + if (effect instanceof Array) { + this.effectRoute.splice(index, 0, ...effect); + } else { + this.effectRoute.splice(index, 0, effect); + } + } + this.setOutput(); + if (this.source.playing) this.link(); + this.emit('updateEffect'); + } + + /** + * 移除一个效果器 + * @param effect 要移除的效果 + */ + removeEffect(effect: AudioEffect) { + const index = this.effectRoute.indexOf(effect); + if (index === -1) return; + this.effectRoute.splice(index, 1); + effect.disconnect(); + this.setOutput(); + if (this.source.playing) this.link(); + this.emit('updateEffect'); + } + + private setOutput() { + const effect = this.effectRoute.at(-1); + if (!effect) this.output = this.source.output; + else this.output = effect.output; + } + + /** + * 连接音频路由图 + */ + private link() { + this.effectRoute.forEach(v => v.disconnect()); + this.effectRoute.forEach((v, i) => { + const next = this.effectRoute[i + 1]; + if (next) { + v.connect(next); + } + }); + } + + private startAllEffect() { + this.effectRoute.forEach(v => v.start()); + } + + private endAllEffect() { + this.effectRoute.forEach(v => v.end()); + } +} diff --git a/src/module/audio/source.ts b/src/module/audio/source.ts new file mode 100644 index 0000000..d347a16 --- /dev/null +++ b/src/module/audio/source.ts @@ -0,0 +1,234 @@ +import EventEmitter from 'eventemitter3'; +import { IStreamController, IStreamReader } from '../loader'; +import { IAudioInput, IAudioOutput } from './effect'; +import { logger } from '@/core/common/logger'; + +interface AudioSourceEvent { + play: []; + end: []; +} + +export abstract class AudioSource + extends EventEmitter + implements IAudioOutput +{ + /** 音频源的输出节点 */ + abstract readonly output: AudioNode; + + /** 是否正在播放 */ + playing: boolean = false; + + constructor(public readonly ac: AudioContext) { + super(); + } + + /** + * 开始播放这个音频源 + */ + abstract play(when?: number): void; + + /** + * 停止播放这个音频源 + * @returns 音频暂停的时刻 + */ + abstract stop(): number; + + /** + * 连接到音频路由图上,每次调用播放的时候都会执行一次 + * @param target 连接至的目标 + */ + abstract connect(target: IAudioInput): void; + + /** + * 设置是否循环播放 + * @param loop 是否循环 + */ + abstract setLoop(loop: boolean): void; +} + +export class AudioStreamSource extends AudioSource implements IStreamReader { + output: AudioBufferSourceNode; + + /** 音频数据 */ + buffer?: AudioBuffer; + + /** 是否已经完全加载完毕 */ + loaded: boolean = false; + + private controller?: IStreamController; + private loop: boolean = false; + + /** 开始播放时刻 */ + private lastStartTime: number = 0; + + constructor(context: AudioContext) { + super(context); + this.output = context.createBufferSource(); + } + + piped(controller: IStreamController): void { + this.controller = controller; + } + + pump(data: Uint8Array | undefined): void { + if (!data) return; + } + + start(): void { + delete this.buffer; + } + + end(done: boolean, reason?: string): void { + if (done) { + this.loaded = true; + delete this.controller; + } else { + logger.warn(44, reason ?? ''); + } + } + + play(when?: number): void { + if (this.playing) return; + if (this.loaded && this.buffer) { + this.playing = true; + this.lastStartTime = this.ac.currentTime; + this.emit('play'); + this.output.start(when); + this.output.addEventListener('ended', () => { + this.playing = false; + this.emit('end'); + if (this.loop) this.play(0); + }); + } else { + this.controller?.start(); + } + } + + stop(): number { + this.output.stop(); + return this.ac.currentTime - this.lastStartTime; + } + + connect(target: IAudioInput): void { + if (!this.buffer) return; + const node = this.ac.createBufferSource(); + node.buffer = this.buffer; + this.output = node; + node.connect(target.input); + node.loop = this.loop; + } + + setLoop(loop: boolean): void { + this.loop = loop; + } +} + +export class AudioElementSource extends AudioSource { + output: MediaElementAudioSourceNode; + + /** audio 元素 */ + readonly audio: HTMLAudioElement; + + constructor(context: AudioContext) { + super(context); + const audio = new Audio(); + audio.preload = 'none'; + this.output = context.createMediaElementSource(audio); + this.audio = audio; + audio.addEventListener('play', () => { + this.playing = true; + this.emit('play'); + }); + audio.addEventListener('ended', () => { + this.playing = false; + this.emit('end'); + }); + } + + /** + * 设置音频源的路径 + * @param url 音频路径 + */ + setSource(url: string) { + this.audio.src = url; + } + + play(when: number): void { + if (this.playing) return; + this.audio.currentTime = when; + this.audio.play(); + } + + stop(): number { + this.audio.pause(); + this.playing = false; + this.emit('end'); + return this.audio.currentTime; + } + + connect(target: IAudioInput): void { + this.output.connect(target.input); + } + + setLoop(loop: boolean): void { + this.audio.loop = loop; + } +} + +export class AudioBufferSource extends AudioSource { + output: AudioBufferSourceNode; + + /** 音频数据 */ + buffer?: AudioBuffer; + /** 是否循环 */ + private loop: boolean = false; + + /** 播放开始时刻 */ + private lastStartTime: number = 0; + + constructor(context: AudioContext) { + super(context); + this.output = context.createBufferSource(); + } + + /** + * 设置音频源数据 + * @param buffer 音频源,可以是未解析的 ArrayBuffer,也可以是已解析的 AudioBuffer + */ + async setBuffer(buffer: ArrayBuffer | AudioBuffer) { + if (buffer instanceof ArrayBuffer) { + this.buffer = await this.ac.decodeAudioData(buffer); + } else { + this.buffer = buffer; + } + } + + play(when?: number): void { + if (this.playing) return; + this.playing = true; + this.lastStartTime = this.ac.currentTime; + this.emit('play'); + this.output.start(when); + this.output.addEventListener('ended', () => { + this.playing = false; + this.emit('end'); + if (this.loop) this.play(0); + }); + } + + stop(): number { + this.output.stop(); + return this.ac.currentTime - this.lastStartTime; + } + + connect(target: IAudioInput): void { + if (!this.buffer) return; + const node = this.ac.createBufferSource(); + node.buffer = this.buffer; + node.connect(target.input); + } + + setLoop(loop: boolean): void { + this.loop = loop; + } +} diff --git a/src/module/audio/support.ts b/src/module/audio/support.ts new file mode 100644 index 0000000..7489ee8 --- /dev/null +++ b/src/module/audio/support.ts @@ -0,0 +1,46 @@ +const audio = new Audio(); + +const supportMap = new Map(); + +/** + * 检查一种音频类型是否能被播放 + * @param type 音频类型 + */ +export function isAudioSupport(type: string): boolean { + if (supportMap.has(type)) return supportMap.get(type)!; + else { + const support = audio.canPlayType(type); + const canPlay = support === 'maybe' || support === 'probably'; + supportMap.set(type, canPlay); + return canPlay; + } +} + +const typeMap = new Map([ + ['ogg', 'audio/ogg; codecs="vorbis"'], + ['mp3', 'audio/mpeg'], + ['wav', 'audio/wav; codecs="1"'], + ['flac', 'audio/flac'], + ['opus', 'audio/ogg; codecs="opus"'], + ['acc', 'audio/acc'] +]); + +/** + * 根据文件名拓展猜测其类型 + * @param file 文件名 + */ +export function guessTypeByExt(file: string) { + const ext = /\.[a-zA-Z]$/.exec(file); + if (!ext?.[0]) return ''; + const type = ext[0].slice(1); + return typeMap.get(type.toLocaleLowerCase()) ?? ''; +} + +isAudioSupport('audio/ogg; codecs="vorbis"'); +isAudioSupport('audio/mpeg'); +isAudioSupport('audio/wav; codecs="1"'); +isAudioSupport('audio/flac'); +isAudioSupport('audio/ogg; codecs="opus"'); +isAudioSupport('audio/acc'); + +console.log(supportMap); diff --git a/src/module/index.ts b/src/module/index.ts index a4a8d52..dfb6154 100644 --- a/src/module/index.ts +++ b/src/module/index.ts @@ -7,3 +7,7 @@ Mota.register('module', 'Weather', { WeatherController, RainWeather }); + +export * from './weather'; +export * from './audio'; +export * from './loader'; diff --git a/src/module/loader/index.ts b/src/module/loader/index.ts new file mode 100644 index 0000000..3b1a34e --- /dev/null +++ b/src/module/loader/index.ts @@ -0,0 +1 @@ +export * from './stream'; diff --git a/src/module/loader/stream.ts b/src/module/loader/stream.ts index d137798..9657c86 100644 --- a/src/module/loader/stream.ts +++ b/src/module/loader/stream.ts @@ -22,6 +22,12 @@ export interface IStreamReader { */ pump(data: Uint8Array | undefined, done: boolean): void; + /** + * 当前对象被传递给加载流时执行的函数 + * @param controller 传输流控制对象 + */ + piped(controller: IStreamController): void; + /** * 开始流传输 * @param stream 传输流对象 @@ -50,6 +56,8 @@ export class StreamLoader /** 读取流对象 */ private stream?: ReadableStream; + private loading: boolean = false; + constructor(public readonly url: string) { super(); } @@ -64,6 +72,8 @@ export class StreamLoader } async start() { + if (this.loading) return; + this.loading = true; const response = await window.fetch(this.url); const stream = response.body; if (!stream) { @@ -82,12 +92,14 @@ export class StreamLoader if (done) break; } + this.loading = false; this.target.forEach(v => v.end(true)); } cancel(reason?: string) { if (!this.stream) return; this.stream.cancel(reason); + this.loading = false; this.target.forEach(v => v.end(false, reason)); } }