Compare commits

...

3 Commits

11 changed files with 346 additions and 118 deletions

View File

@ -38,7 +38,7 @@ var data_comment_c456ea59_6018_45ef_8bcc_211a24c627dc = {
"_range": "editor.mode.checkImages(thiseval, './project/images/')", "_range": "editor.mode.checkImages(thiseval, './project/images/')",
"_directory": "./project/images/", "_directory": "./project/images/",
"_transform": (function (one) { "_transform": (function (one) {
if (one.endsWith('.png') || one.endsWith('.jpg') || one.endsWith('.jpeg') || one.endsWith('.gif')) if (one.endsWith('.png') || one.endsWith('.jpg') || one.endsWith('.jpeg') || one.endsWith('.gif') || one.endsWith('.webp'))
return one; return one;
return null; return null;
}).toString(), }).toString(),
@ -96,7 +96,7 @@ var data_comment_c456ea59_6018_45ef_8bcc_211a24c627dc = {
"_range": "editor.mode.checkUnique(thiseval)", "_range": "editor.mode.checkUnique(thiseval)",
"_directory": "./project/bgms/", "_directory": "./project/bgms/",
"_transform": (function (one) { "_transform": (function (one) {
if (one.endsWith('.mp3') || one.endsWith('.ogg') || one.endsWith('.wav') || one.endsWith('.m4a') || one.endsWith('.flac')) if (one.endsWith('.mp3') || one.endsWith('.ogg') || one.endsWith('.wav') || one.endsWith('.m4a') || one.endsWith('.flac') || one.endsWith('.opus'))
return one; return one;
return null; return null;
}).toString(), }).toString(),

View File

@ -193,13 +193,14 @@ var data_a1e2fb4a_e986_4524_b0da_9b7ba7c0874d =
"zone" "zone"
], ],
"bgms": [ "bgms": [
"beforeBoss.mp3", "beforeBoss.opus",
"cave.mp3", "cave.mp3",
"escape.mp3", "escape.mp3",
"escape2.mp3", "escape2.mp3",
"grass.mp3", "grass.mp3",
"mount.mp3", "mount.opus",
"night.mp3", "night.mp3",
"output6.ogg",
"palaceCenter.mp3", "palaceCenter.mp3",
"palaceNorth.mp3", "palaceNorth.mp3",
"palaceSouth.mp3", "palaceSouth.mp3",

View File

@ -0,0 +1,28 @@
import { StreamLoader } from '../loader';
import { audioPlayer, AudioRoute } from './player';
import { guessTypeByExt, isAudioSupport } from './support';
export function loadAllBgm() {
const loading = Mota.require('var', 'loading');
loading.once('coreInit', () => {
const data = data_a1e2fb4a_e986_4524_b0da_9b7ba7c0874d;
for (const bgm of data.main.bgms) {
const type = guessTypeByExt(bgm);
if (!type) continue;
if (isAudioSupport(type)) {
const source = audioPlayer.createElementSource();
source.setSource(`project/bgms/${bgm}`);
source.setLoop(true);
const route = new AudioRoute(source, audioPlayer);
audioPlayer.addRoute(`bgms.${bgm}`, route);
} else {
const source = audioPlayer.createStreamSource();
const stream = new StreamLoader(`project/bgms/${bgm}`);
stream.pipe(source);
source.setLoop(true);
const route = new AudioRoute(source, audioPlayer);
audioPlayer.addRoute(`bgms.${bgm}`, route);
}
}
});
}

View File

@ -0,0 +1,45 @@
import { OggVorbisDecoder } from '@wasm-audio-decoders/ogg-vorbis';
import { IAudioDecodeData, IAudioDecoder } from './source';
import { OggOpusDecoder } from 'ogg-opus-decoder';
export class VorbisDecoder implements IAudioDecoder {
decoder?: OggVorbisDecoder;
async create(): Promise<void> {
this.decoder = new OggVorbisDecoder();
await this.decoder.ready;
}
destroy(): void {
this.decoder?.free();
}
async decode(data: Uint8Array): Promise<IAudioDecodeData | undefined> {
return this.decoder?.decode(data);
}
async flush(): Promise<IAudioDecodeData | undefined> {
return await this.decoder?.flush();
}
}
export class OpusDecoder implements IAudioDecoder {
decoder?: OggOpusDecoder;
async create(): Promise<void> {
this.decoder = new OggOpusDecoder();
await this.decoder.ready;
}
destroy(): void {
this.decoder?.free();
}
async decode(data: Uint8Array): Promise<IAudioDecodeData | undefined> {
return this.decoder?.decode(data);
}
async flush(): Promise<IAudioDecodeData | undefined> {
return await this.decoder?.flush();
}
}

View File

@ -83,7 +83,11 @@ export class StereoEffect extends AudioEffect {
* @param y y坐标 * @param y y坐标
* @param z z坐标 * @param z z坐标
*/ */
setOrientation(x: number, y: number, z: number) {} setOrientation(x: number, y: number, z: number) {
this.output.orientationX.value = x;
this.output.orientationY.value = y;
this.output.orientationZ.value = z;
}
/** /**
* x正方形水平向右y正方形垂直于地面向上z正方向垂直屏幕远离用户 * x正方形水平向右y正方形垂直于地面向上z正方向垂直屏幕远离用户
@ -91,7 +95,11 @@ export class StereoEffect extends AudioEffect {
* @param y y坐标 * @param y y坐标
* @param z z坐标 * @param z z坐标
*/ */
setPosition(x: number, y: number, z: number) {} setPosition(x: number, y: number, z: number) {
this.output.positionX.value = x;
this.output.positionY.value = y;
this.output.positionZ.value = z;
}
end(): void {} end(): void {}
@ -113,12 +121,16 @@ export class VolumeEffect extends AudioEffect {
* *
* @param volume * @param volume
*/ */
setVolume(volume: number) {} setVolume(volume: number) {
this.output.gain.value = volume;
}
/** /**
* *
*/ */
getVolume(): number {} getVolume(): number {
return this.output.gain.value;
}
end(): void {} end(): void {}
@ -148,16 +160,22 @@ export class ChannelVolumeEffect extends AudioEffect {
/** /**
* *
* @param channel * @param channel 0-5
* @param volume * @param volume
*/ */
setVolume(channel: number, volume: number) {} setVolume(channel: number, volume: number) {
if (!this.gain[channel]) return;
this.gain[channel].gain.value = volume;
}
/** /**
* * 0-5
* @param channel * @param channel
*/ */
getVolume(channel: number): number {} getVolume(channel: number): number {
if (!this.gain[channel]) return 0;
return this.gain[channel].gain.value;
}
end(): void {} end(): void {}
@ -179,12 +197,16 @@ export class DelayEffect extends AudioEffect {
* *
* @param delay * @param delay
*/ */
setDelay(delay: number) {} setDelay(delay: number) {
this.output.delayTime.value = delay;
}
/** /**
* *
*/ */
getDelay() {} getDelay() {
return this.output.delayTime.value;
}
end(): void {} end(): void {}

View File

@ -1,4 +1,14 @@
import { loadAllBgm } from './bgmLoader';
import { OpusDecoder, VorbisDecoder } from './decoder';
import { AudioStreamSource } from './source';
import { AudioType } from './support';
loadAllBgm();
AudioStreamSource.registerDecoder(AudioType.Ogg, VorbisDecoder);
AudioStreamSource.registerDecoder(AudioType.Opus, OpusDecoder);
export * from './support'; export * from './support';
export * from './effect'; export * from './effect';
export * from './player'; export * from './player';
export * from './source'; export * from './source';
export * from './bgmLoader';

View File

@ -8,6 +8,7 @@ import {
import { import {
AudioEffect, AudioEffect,
ChannelVolumeEffect, ChannelVolumeEffect,
DelayEffect,
EchoEffect, EchoEffect,
IAudioOutput, IAudioOutput,
StereoEffect, StereoEffect,
@ -138,6 +139,16 @@ export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
return new ChannelVolumeEffect(this.ac); return new ChannelVolumeEffect(this.ac);
} }
/**
*
* |-----------|
* Input ----> | DelayNode | ----> Output
* |-----------|
*/
createDelay() {
return new DelayEffect(this.ac);
}
/** /**
* *
* ```txt * ```txt
@ -187,10 +198,40 @@ export class AudioPlayer extends EventEmitter<AudioPlayerEvent> {
* @param id * @param id
* @param when * @param when
*/ */
play(id: string, when?: number) { play(id: string, when: number = 0) {
this.getRoute(id)?.play(when); this.getRoute(id)?.play(when);
} }
/**
*
* @param id
* @returns
*/
pause(id: string) {
const route = this.getRoute(id);
if (!route) return Promise.resolve();
else return route.pause();
}
/**
*
* @param id
* @returns
*/
stop(id: string) {
const route = this.getRoute(id);
if (!route) return Promise.resolve();
else return route.stop();
}
/**
*
* @param id
*/
resume(id: string) {
this.getRoute(id)?.resume();
}
/** /**
* x正方形水平向右y正方形垂直于地面向上z正方向垂直屏幕远离用户 * x正方形水平向右y正方形垂直于地面向上z正方向垂直屏幕远离用户
* @param x x坐标 * @param x x坐标
@ -299,7 +340,7 @@ export class AudioRoute
* *
* @param when * @param when
*/ */
play(when?: number) { play(when: number = 0) {
if (this.source.playing) return; if (this.source.playing) return;
this.link(); this.link();
if (this.effectRoute.length > 0) { if (this.effectRoute.length > 0) {
@ -430,3 +471,5 @@ export class AudioRoute
this.effectRoute.forEach(v => v.end()); this.effectRoute.forEach(v => v.end());
} }
} }
export const audioPlayer = new AudioPlayer();

View File

@ -80,23 +80,25 @@ export interface IAudioDecoder {
* *
* @param data * @param data
*/ */
decode(data: Uint8Array): Promise<IAudioDecodeData>; decode(data: Uint8Array): Promise<IAudioDecodeData | undefined>;
/** /**
* 使 * 使
*/ */
flush(): Promise<IAudioDecodeData>; flush(): Promise<IAudioDecodeData | undefined>;
} }
const fileSignatures: Map<string, AudioType> = new Map([ const fileSignatures: [AudioType, number[]][] = [
['49 44 33', AudioType.Mp3], [AudioType.Mp3, [0x49, 0x44, 0x33]],
['4F 67 67 53', AudioType.Ogg], [AudioType.Ogg, [0x4f, 0x67, 0x67, 0x53]],
['52 49 46 46', AudioType.Wav], [AudioType.Wav, [52, 0x49, 0x46, 0x46]],
['66 4C 61 43', AudioType.Flac], [AudioType.Flac, [0x66, 0x4c, 0x61, 0x43]],
['4F 70 75 73', AudioType.Opus], [AudioType.Aac, [0xff, 0xf1]],
['FF F1', AudioType.Aac], [AudioType.Aac, [0xff, 0xf9]]
['FF F9', AudioType.Aac] ];
]); const oggHeaders: [AudioType, number[]][] = [
[AudioType.Opus, [0x4f, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64]]
];
const mimeTypeMap: Record<AudioType, MimeType> = { const mimeTypeMap: Record<AudioType, MimeType> = {
[AudioType.Aac]: 'audio/aac', [AudioType.Aac]: 'audio/aac',
@ -112,7 +114,8 @@ function isOggPage(data: any): data is OggPage {
} }
export class AudioStreamSource extends AudioSource implements IStreamReader { export class AudioStreamSource extends AudioSource implements IStreamReader {
static readonly decoderMap: Map<AudioType, IAudioDecoder> = new Map(); static readonly decoderMap: Map<AudioType, new () => IAudioDecoder> =
new Map();
output: AudioBufferSourceNode; output: AudioBufferSourceNode;
/** 音频数据 */ /** 音频数据 */
@ -138,6 +141,8 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
/** 开始播放时刻 */ /** 开始播放时刻 */
private lastStartTime: number = 0; private lastStartTime: number = 0;
/** 上一次播放的缓存长度 */
private lastBufferSamples: number = 0;
/** 是否已经获取到头文件 */ /** 是否已经获取到头文件 */
private headerRecieved: boolean = false; private headerRecieved: boolean = false;
@ -152,12 +157,14 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
/** 缓存音频数据,每 bufferChunkSize 秒钟组成一个 Float32Array用于流式解码 */ /** 缓存音频数据,每 bufferChunkSize 秒钟组成一个 Float32Array用于流式解码 */
private audioData: Float32Array[][] = []; private audioData: Float32Array[][] = [];
private errored: boolean = false;
/** /**
* *
* @param type * @param type
* @param decoder * @param decoder
*/ */
static registerDecoder(type: AudioType, decoder: IAudioDecoder) { static registerDecoder(type: AudioType, decoder: new () => IAudioDecoder) {
if (this.decoderMap.has(type)) { if (this.decoderMap.has(type)) {
logger.warn(47, type); logger.warn(47, type);
return; return;
@ -184,42 +191,60 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
} }
async pump(data: Uint8Array | undefined, done: boolean): Promise<void> { async pump(data: Uint8Array | undefined, done: boolean): Promise<void> {
if (!data) return; if (!data || this.errored) return;
if (!this.headerRecieved) { if (!this.headerRecieved) {
// 检查头文件获取音频类型 // 检查头文件获取音频类型仅检查前256个字节
const toCheck = [...data.slice(0, 16)]; const toCheck = data.slice(0, 256);
const hexArray = toCheck.map(v => v.toString(16).padStart(2, '0')); for (const [type, value] of fileSignatures) {
const hex = hexArray.join(' '); if (value.every((v, i) => toCheck[i] === v)) {
for (const [key, value] of fileSignatures) { this.audioType = type;
if (hex.startsWith(key)) {
this.audioType = value;
break; break;
} }
} }
if (this.audioType === AudioType.Ogg) {
// 如果是ogg的话进一步判断是不是opus
for (const [key, value] of oggHeaders) {
const has = toCheck.some((_, i) => {
return value.every((v, ii) => toCheck[i + ii] === v);
});
if (has) {
this.audioType = key;
break;
}
}
}
if (!this.audioType) { if (!this.audioType) {
logger.error(25, hex); logger.error(
25,
[...toCheck]
.map(v => v.toString().padStart(2, '0'))
.join(' ')
.toUpperCase()
);
return; return;
} }
// 创建解码器 // 创建解码器
const decoder = AudioStreamSource.decoderMap.get(this.audioType); const Decoder = AudioStreamSource.decoderMap.get(this.audioType);
this.decoder = decoder; if (!Decoder) {
if (!decoder) { this.errored = true;
logger.error(24, this.audioType); logger.error(24, this.audioType);
return Promise.reject( return Promise.reject(
`Cannot decode stream source type of '${this.audioType}', since there is no registered decoder for that type.` `Cannot decode stream source type of '${this.audioType}', since there is no registered decoder for that type.`
); );
} }
this.decoder = new Decoder();
// 创建数据解析器 // 创建数据解析器
const mime = mimeTypeMap[this.audioType]; const mime = mimeTypeMap[this.audioType];
const parser = new CodecParser(mime); const parser = new CodecParser(mime);
this.parser = parser; this.parser = parser;
await decoder.create(); await this.decoder.create();
this.headerRecieved = true; this.headerRecieved = true;
} }
const decoder = this.decoder; const decoder = this.decoder;
const parser = this.parser; const parser = this.parser;
if (!decoder || !parser) { if (!decoder || !parser) {
this.errored = true;
return Promise.reject( return Promise.reject(
'No parser or decoder attached in this AudioStreamSource' 'No parser or decoder attached in this AudioStreamSource'
); );
@ -234,13 +259,13 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
* *
*/ */
private checkSampleRate(info: (OggPage | CodecFrame)[]) { private checkSampleRate(info: (OggPage | CodecFrame)[]) {
const first = info[0]; for (const one of info) {
if (first) { const frame = isOggPage(one) ? one.codecFrames[0] : one;
const frame = isOggPage(first) ? first.codecFrames[0] : first;
if (frame) { if (frame) {
const rate = frame.header.sampleRate; const rate = frame.header.sampleRate;
if (this.sampleRate === 0) { if (this.sampleRate === 0) {
this.sampleRate = rate; this.sampleRate = rate;
break;
} else { } else {
if (rate !== this.sampleRate) { if (rate !== this.sampleRate) {
logger.warn(48); logger.warn(48);
@ -260,6 +285,7 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
) { ) {
// 解析音频数据 // 解析音频数据
const audioData = await decoder.decode(data); const audioData = await decoder.decode(data);
if (!audioData) return;
// @ts-expect-error 库类型声明错误 // @ts-expect-error 库类型声明错误
const audioInfo = [...parser.parseChunk(data)] as ( const audioInfo = [...parser.parseChunk(data)] as (
| OggPage | OggPage
@ -277,6 +303,7 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
*/ */
private async decodeFlushData(decoder: IAudioDecoder, parser: CodecParser) { private async decodeFlushData(decoder: IAudioDecoder, parser: CodecParser) {
const audioData = await decoder.flush(); const audioData = await decoder.flush();
if (!audioData) return;
// @ts-expect-error 库类型声明错误 // @ts-expect-error 库类型声明错误
const audioInfo = [...parser.flush()] as (OggPage | CodecFrame)[]; const audioInfo = [...parser.flush()] as (OggPage | CodecFrame)[];
@ -303,23 +330,33 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
const chunk = this.sampleRate * this.bufferChunkSize; const chunk = this.sampleRate * this.bufferChunkSize;
const sampled = this.bufferedSamples; const sampled = this.bufferedSamples;
const pushIndex = Math.floor(sampled / chunk); const pushIndex = Math.floor(sampled / chunk);
const bufferIndex = sampled % (this.sampleRate * chunk); const bufferIndex = sampled % chunk;
const dataLength = data.channelData[0].length; const dataLength = data.channelData[0].length;
const restLength = chunk - bufferIndex; let buffered = 0;
// 把数据放入缓存 let nowIndex = pushIndex;
let toBuffer = bufferIndex;
while (buffered < dataLength) {
const rest = toBuffer !== 0 ? chunk - bufferIndex : chunk;
for (let i = 0; i < channels; i++) { for (let i = 0; i < channels; i++) {
const audioData = this.audioData[i]; const audioData = this.audioData[i];
if (!audioData[pushIndex]) { if (!audioData[nowIndex]) {
audioData.push(new Float32Array(chunk * this.sampleRate)); audioData.push(new Float32Array(chunk));
} }
audioData[pushIndex].set(data.channelData[i], bufferIndex); const toPush = data.channelData[i].slice(
if (restLength < dataLength) { buffered,
const nextData = new Float32Array(chunk * this.sampleRate); buffered + rest
nextData.set(data.channelData[i].slice(restLength), 0); );
audioData.push(nextData);
audioData[nowIndex].set(toPush, toBuffer);
} }
buffered += rest;
nowIndex++;
toBuffer = 0;
} }
this.buffered += info.reduce((prev, curr) => prev + curr.duration, 0);
this.buffered +=
info.reduce((prev, curr) => prev + curr.duration, 0) / 1000;
this.bufferedSamples += info.reduce( this.bufferedSamples += info.reduce(
(prev, curr) => prev + curr.samples, (prev, curr) => prev + curr.samples,
0 0
@ -330,71 +367,112 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
* *
*/ */
private checkBufferedPlay() { private checkBufferedPlay() {
if (this.playing || this.loaded) return; if (this.playing || this.sampleRate === 0) return;
const played = this.ac.currentTime - this.lastStartTime; const played = this.lastBufferSamples / this.sampleRate;
const dt = this.buffered - played; const dt = this.buffered - played;
if (this.loaded) {
this.playAudio(played);
return;
}
if (dt < this.bufferPlayDuration) return; if (dt < this.bufferPlayDuration) return;
console.log(played, this.lastBufferSamples, this.sampleRate);
this.lastBufferSamples = this.bufferedSamples;
// 需要播放 // 需要播放
this.mergeBuffers();
if (!this.buffer) return;
if (this.playing) this.output.stop();
this.createSourceNode(this.buffer);
this.output.loop = false;
this.output.start(0, played);
this.lastStartTime = this.ac.currentTime;
this.playing = true;
this.output.addEventListener('ended', () => {
this.playing = false;
this.checkBufferedPlay();
});
}
private mergeBuffers() {
const buffer = this.ac.createBuffer( const buffer = this.ac.createBuffer(
this.audioData.length, this.audioData.length,
this.bufferedSamples, this.bufferedSamples,
this.sampleRate this.sampleRate
); );
this.buffer = buffer;
const chunk = this.sampleRate * this.bufferChunkSize; const chunk = this.sampleRate * this.bufferChunkSize;
const bufferedChunks = Math.floor(this.buffered / chunk); const bufferedChunks = Math.floor(this.bufferedSamples / chunk);
const restLength = this.buffered % chunk; const restLength = this.bufferedSamples % chunk;
for (let i = 0; i < this.audioData.length; i++) { for (let i = 0; i < this.audioData.length; i++) {
const audio = this.audioData[i]; const audio = this.audioData[i];
const data = new Float32Array(this.bufferedSamples); const data = new Float32Array(this.bufferedSamples);
for (let j = 0; j < bufferedChunks; j++) { for (let j = 0; j < bufferedChunks; j++) {
data.set(audio[j], chunk * j); data.set(audio[j], chunk * j);
} }
if (restLength !== 0) data.set(audio[bufferedChunks], 0); if (restLength !== 0) {
buffer.copyToChannel(data, i, 0); data.set(
} audio[bufferedChunks].slice(0, restLength),
this.createSourceNode(buffer); chunk * bufferedChunks
this.output.start(played); );
this.lastStartTime = this.ac.currentTime;
this.output.addEventListener('ended', () => {
this.checkBufferedPlay();
});
} }
private mergeBuffers() {} buffer.copyToChannel(data, i, 0);
}
this.buffer = buffer;
}
async start() { async start() {
delete this.buffer; delete this.buffer;
this.headerRecieved = false; this.headerRecieved = false;
this.audioType = ''; this.audioType = '';
this.errored = false;
this.buffered = 0;
this.sampleRate = 0;
this.bufferedSamples = 0;
this.duration = 0;
this.loaded = false;
if (this.playing) this.output.stop();
this.playing = false;
this.lastStartTime = this.ac.currentTime;
} }
end(done: boolean, reason?: string): void { end(done: boolean, reason?: string): void {
if (done) { if (done && this.buffer) {
this.loaded = true; this.loaded = true;
delete this.controller; delete this.controller;
this.mergeBuffers(); this.mergeBuffers();
const played = this.ac.currentTime - this.lastStartTime; // const played = this.lastBufferSamples / this.sampleRate;
this.output.stop(); // this.playAudio(played);
this.play(played); this.duration = this.buffered;
this.audioData = [];
this.decoder?.destroy();
delete this.decoder;
delete this.parser;
} else { } else {
logger.warn(44, reason ?? ''); logger.warn(44, reason ?? '');
} }
} }
play(when?: number): void { private playAudio(when?: number) {
if (this.playing) return; if (!this.buffer) return;
if (this.loaded && this.buffer) {
this.playing = true;
this.lastStartTime = this.ac.currentTime; this.lastStartTime = this.ac.currentTime;
if (this.playing) this.output.stop();
this.emit('play'); this.emit('play');
this.createSourceNode(this.buffer); this.createSourceNode(this.buffer);
this.output.start(when); this.output.start(0, when);
this.playing = true;
console.log(when);
this.output.addEventListener('ended', () => { this.output.addEventListener('ended', () => {
this.playing = false; this.playing = false;
this.emit('end'); this.emit('end');
if (this.loop && !this.output.loop) this.play(0); if (this.loop && !this.output.loop) this.play(0);
}); });
}
play(when?: number): void {
if (this.playing || this.errored) return;
if (this.loaded && this.buffer) {
this.playing = true;
this.playAudio(when);
} else { } else {
this.controller?.start(); this.controller?.start();
} }
@ -404,13 +482,16 @@ export class AudioStreamSource extends AudioSource implements IStreamReader {
if (!this.target) return; if (!this.target) return;
const node = this.ac.createBufferSource(); const node = this.ac.createBufferSource();
node.buffer = buffer; node.buffer = buffer;
if (this.playing) this.output.stop();
this.playing = false;
this.output = node; this.output = node;
node.connect(this.target.input); node.connect(this.target.input);
node.loop = this.loop; node.loop = this.loop;
} }
stop(): number { stop(): number {
this.output.stop(); if (this.playing) this.output.stop();
this.playing = false;
return this.ac.currentTime - this.lastStartTime; return this.ac.currentTime - this.lastStartTime;
} }
@ -453,7 +534,7 @@ export class AudioElementSource extends AudioSource {
this.audio.src = url; this.audio.src = url;
} }
play(when: number): void { play(when: number = 0): void {
if (this.playing) return; if (this.playing) return;
this.audio.currentTime = when; this.audio.currentTime = when;
this.audio.play(); this.audio.play();
@ -510,7 +591,7 @@ export class AudioBufferSource extends AudioSource {
this.lastStartTime = this.ac.currentTime; this.lastStartTime = this.ac.currentTime;
this.emit('play'); this.emit('play');
this.createSourceNode(this.buffer); this.createSourceNode(this.buffer);
this.output.start(when); this.output.start(0, when);
this.output.addEventListener('ended', () => { this.output.addEventListener('ended', () => {
this.playing = false; this.playing = false;
this.emit('end'); this.emit('end');

View File

@ -25,21 +25,21 @@ export function isAudioSupport(type: AudioType): boolean {
} }
} }
const typeMap = new Map<string, string>([ const typeMap = new Map<string, AudioType>([
['ogg', 'audio/ogg; codecs="vorbis"'], ['ogg', AudioType.Ogg],
['mp3', 'audio/mpeg'], ['mp3', AudioType.Mp3],
['wav', 'audio/wav; codecs="1"'], ['wav', AudioType.Wav],
['flac', 'audio/flac'], ['flac', AudioType.Flac],
['opus', 'audio/ogg; codecs="opus"'], ['opus', AudioType.Opus],
['aac', 'audio/aac'] ['aac', AudioType.Aac]
]); ]);
/** /**
* *
* @param file * @param file
*/ */
export function guessTypeByExt(file: string) { export function guessTypeByExt(file: string): AudioType | '' {
const ext = /\.[a-zA-Z]$/.exec(file); const ext = /\.[a-zA-Z\d]+$/.exec(file);
if (!ext?.[0]) return ''; if (!ext?.[0]) return '';
const type = ext[0].slice(1); const type = ext[0].slice(1);
return typeMap.get(type.toLocaleLowerCase()) ?? ''; return typeMap.get(type.toLocaleLowerCase()) ?? '';

View File

@ -82,6 +82,7 @@ export class StreamLoader
return; return;
} }
this.target.add(reader); this.target.add(reader);
reader.piped(this);
return this; return this;
} }
@ -98,25 +99,21 @@ export class StreamLoader
this.stream = stream; this.stream = stream;
const reader = response.body?.getReader(); const reader = response.body?.getReader();
const targets = [...this.target]; const targets = [...this.target];
try { // try {
await Promise.all( await Promise.all(targets.map(v => v.start(stream, this, response)));
targets.map(v => v.start(stream, this, response))
);
// 开始流传输 // 开始流传输
while (true) { while (true) {
const { value, done } = await reader.read(); const { value, done } = await reader.read();
await Promise.all( await Promise.all(targets.map(v => v.pump(value, done, response)));
targets.map(v => v.pump(value, done, response))
);
if (done) break; if (done) break;
} }
this.loading = false; this.loading = false;
targets.forEach(v => v.end(true)); targets.forEach(v => v.end(true));
} catch (e) { // } catch (e) {
logger.error(26, this.url, String(e)); // logger.error(26, this.url, String(e));
} // }
} }
cancel(reason?: string) { cancel(reason?: string) {

View File

@ -210,13 +210,14 @@ type SoundIds =
| 'zone.mp3' | 'zone.mp3'
type BgmIds = type BgmIds =
| 'beforeBoss.mp3' | 'beforeBoss.opus'
| 'cave.mp3' | 'cave.mp3'
| 'escape.mp3' | 'escape.mp3'
| 'escape2.mp3' | 'escape2.mp3'
| 'grass.mp3' | 'grass.mp3'
| 'mount.mp3' | 'mount.opus'
| 'night.mp3' | 'night.mp3'
| 'output6.ogg'
| 'palaceCenter.mp3' | 'palaceCenter.mp3'
| 'palaceNorth.mp3' | 'palaceNorth.mp3'
| 'palaceSouth.mp3' | 'palaceSouth.mp3'