557 lines
16 KiB
JavaScript
Executable File
557 lines
16 KiB
JavaScript
Executable File
// js/audio/audio_audio.js
|
|
import { appState } from "../state.js";
|
|
import {
|
|
updateAudioEditorUI,
|
|
updatePlayheadVisual,
|
|
resetPlayheadVisual,
|
|
} from "./audio_ui.js";
|
|
import {
|
|
initializeAudioContext,
|
|
getAudioContext,
|
|
getMainGainNode,
|
|
} from "../audio.js";
|
|
import {
|
|
startSongPatternPlaybackOnTransport,
|
|
stopSongPatternPlaybackOnTransport,
|
|
} from "../pattern/pattern_audio.js";
|
|
|
|
import { getPixelsPerSecond } from "../utils.js";
|
|
// 🔊 ADIÇÃO: usar a MESMA instância do Tone que o projeto usa
|
|
import * as Tone from "https://esm.sh/tone";
|
|
|
|
// --- Configurações do Scheduler ---
|
|
const LOOKAHEAD_INTERVAL_MS = 25.0;
|
|
const SCHEDULE_AHEAD_TIME_SEC = 0.5; // 500ms
|
|
|
|
// --- Estado Interno do Engine ---
|
|
let audioCtx = null;
|
|
let isPlaying = false;
|
|
let schedulerIntervalId = null;
|
|
let animationFrameId = null;
|
|
|
|
// Sincronização de Tempo
|
|
let startTime = 0;
|
|
// (seek/logical ficam em appState.audio)
|
|
|
|
// Configurações de Loop
|
|
let isLoopActive = false;
|
|
let loopStartTimeSec = 0;
|
|
let loopEndTimeSec = 8;
|
|
|
|
// estado runtime
|
|
const runtimeClipState = new Map();
|
|
// ⚠️ agora armazenamos Tone.Player em vez de BufferSource
|
|
const scheduledNodes = new Map(); // eventId -> { player, clipId }
|
|
let nextEventId = 0;
|
|
|
|
const callbacks = {
|
|
onClipScheduled: null,
|
|
onClipPlayed: null,
|
|
};
|
|
|
|
// --- Funções Auxiliares de Tempo (sem alterações) ---
|
|
function _getBpm() {
|
|
const bpmInput = document.getElementById("bpm-input");
|
|
return parseFloat(bpmInput.value) || 120;
|
|
}
|
|
function _getSecondsPerBeat() {
|
|
return 60.0 / _getBpm();
|
|
}
|
|
function _convertBeatToSeconds(beat) {
|
|
return beat * _getSecondsPerBeat();
|
|
}
|
|
function _convertSecondsToBeat(seconds) {
|
|
return seconds / _getSecondsPerBeat();
|
|
}
|
|
|
|
// garante um único contexto — o rawContext do Tone
|
|
function _initContext() {
|
|
if (!audioCtx) {
|
|
initializeAudioContext();
|
|
// ✅ garante que é o AudioContext nativo
|
|
audioCtx = Tone.getContext().rawContext;
|
|
} else if (audioCtx.rawContext) {
|
|
audioCtx = audioCtx.rawContext;
|
|
}
|
|
}
|
|
|
|
// helper: normaliza AudioBuffer → ToneAudioBuffer (mesmo contexto)
|
|
function _toToneBuffer(buffer) {
|
|
if (!buffer) return null;
|
|
if (buffer._buffer) return buffer; // já é Tone.ToneAudioBuffer
|
|
const tab = new Tone.ToneAudioBuffer();
|
|
tab._buffer = buffer; // injeta o AudioBuffer (já no rawContext do Tone)
|
|
return tab;
|
|
}
|
|
|
|
// --- Lógica Principal do Scheduler (mantida) ---
|
|
|
|
function _scheduleClip(clip, absolutePlayTime, durationSec, overrideOffsetSec) {
|
|
if (!clip.buffer) {
|
|
console.warn(`Clip ${clip.id} não possui áudio buffer carregado.`);
|
|
return;
|
|
}
|
|
|
|
const toneBuf = _toToneBuffer(clip.buffer);
|
|
if (!toneBuf) return;
|
|
|
|
const gain =
|
|
clip.gainNode instanceof Tone.Gain
|
|
? clip.gainNode
|
|
: new Tone.Gain(clip.volume ?? 1);
|
|
const pan =
|
|
clip.pannerNode instanceof Tone.Panner
|
|
? clip.pannerNode
|
|
: new Tone.Panner(clip.pan ?? 0);
|
|
|
|
try { gain.disconnect(); } catch {}
|
|
try { pan.disconnect(); } catch {}
|
|
gain.connect(pan).connect(getMainGainNode());
|
|
|
|
const player = new Tone.Player(toneBuf).sync().connect(gain);
|
|
|
|
const rate = clip.pitch && clip.pitch !== 0 ? Math.pow(2, clip.pitch / 12) : 1;
|
|
player.playbackRate = rate;
|
|
|
|
// tempo no Transport (em segundos)
|
|
const occurrenceInTransportSec =
|
|
absolutePlayTime - startTime + (appState.audio.audioEditorSeekTime || 0);
|
|
|
|
const baseOffset = clip.offsetInSeconds ?? clip.offset ?? 0;
|
|
const offset = overrideOffsetSec ?? baseOffset;
|
|
const dur = durationSec ?? clip.durationInSeconds ?? toneBuf.duration;
|
|
|
|
const safeOccurrence = Math.max(0, occurrenceInTransportSec);
|
|
const safeOffset = Math.max(0, offset);
|
|
const safeDur = dur == null ? undefined : Math.max(0, dur);
|
|
|
|
// ✅ USE O POSICIONAMENTO ATUAL (já atualizado pelo seu código)
|
|
const transportNow = Tone.Transport.seconds;
|
|
|
|
// folga pequena pra não “perder” start
|
|
const EPS = 0.005;
|
|
const startAt = Math.max(safeOccurrence, transportNow + EPS);
|
|
|
|
player.start(startAt, safeOffset, safeDur);
|
|
|
|
const eventId = nextEventId++;
|
|
scheduledNodes.set(eventId, { player, clipId: clip.id });
|
|
|
|
if (callbacks.onClipScheduled) callbacks.onClipScheduled(clip);
|
|
|
|
player.onstop = () => {
|
|
_handleClipEnd(eventId, clip.id);
|
|
try { player.unsync(); } catch {}
|
|
try { player.dispose(); } catch {}
|
|
};
|
|
}
|
|
|
|
function _handleClipEnd(eventId, clipId) {
|
|
scheduledNodes.delete(eventId);
|
|
runtimeClipState.delete(clipId);
|
|
|
|
if (callbacks.onClipPlayed) {
|
|
const clip = appState.audio.clips.find((c) => c.id == clipId);
|
|
if (clip) callbacks.onClipPlayed(clip);
|
|
}
|
|
}
|
|
|
|
function _schedulerTick() {
|
|
if (!isPlaying || !audioCtx) return;
|
|
|
|
const now = audioCtx.currentTime;
|
|
const logicalTime =
|
|
now - startTime + (appState.audio.audioEditorSeekTime || 0);
|
|
|
|
const scheduleWindowStartSec = logicalTime;
|
|
const scheduleWindowEndSec = logicalTime + SCHEDULE_AHEAD_TIME_SEC;
|
|
|
|
for (const clip of appState.audio.clips) {
|
|
const clipRuntime = runtimeClipState.get(clip.id) || { isScheduled: false };
|
|
if (clipRuntime.isScheduled) continue;
|
|
if (!clip?.buffer) continue;
|
|
|
|
const clipStartTimeSec = clip.startTimeInSeconds;
|
|
const clipDurationSec =
|
|
clip.durationInSeconds ?? clip.buffer?.duration;
|
|
|
|
if (typeof clipStartTimeSec === "undefined") continue;
|
|
if (typeof clipDurationSec === "undefined") continue;
|
|
|
|
// ✅ Em modo loop: NÃO “trazer” starts de antes do loopStart pra dentro do loop.
|
|
// Esses casos são tratados por overlap (clip atravessando loopStart).
|
|
if (isLoopActive) {
|
|
const loopDuration = loopEndTimeSec - loopStartTimeSec;
|
|
if (loopDuration <= 0) continue;
|
|
|
|
// start fora da janela do loop -> não agenda (senão toca errado e pode “matar” o resto)
|
|
if (clipStartTimeSec < loopStartTimeSec || clipStartTimeSec >= loopEndTimeSec) {
|
|
continue;
|
|
}
|
|
}
|
|
|
|
const occurrenceStartTimeSec = clipStartTimeSec;
|
|
|
|
if (
|
|
occurrenceStartTimeSec >= scheduleWindowStartSec &&
|
|
occurrenceStartTimeSec < scheduleWindowEndSec
|
|
) {
|
|
const absolutePlayTime =
|
|
startTime +
|
|
(occurrenceStartTimeSec - (appState.audio.audioEditorSeekTime || 0));
|
|
|
|
_scheduleClip(clip, absolutePlayTime, clipDurationSec);
|
|
|
|
clipRuntime.isScheduled = true;
|
|
runtimeClipState.set(clip.id, clipRuntime);
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* ✅ Se a agulha está no meio de um clip (clip começou antes e ainda não acabou),
|
|
* precisamos iniciar o Player “agora”, com offset adequado.
|
|
* Isso resolve: seek no meio + reinício do loop + play sem stop em certos casos.
|
|
*/
|
|
function _scheduleOverlappingClipsAtTime(playheadSec) {
|
|
if (!audioCtx) return;
|
|
|
|
const t = Number(playheadSec);
|
|
if (!isFinite(t) || t < 0) return;
|
|
|
|
for (const clip of appState.audio.clips) {
|
|
if (!clip?.buffer) continue;
|
|
|
|
const s = Number(clip.startTimeInSeconds);
|
|
if (!isFinite(s)) continue;
|
|
|
|
const d =
|
|
Number(clip.durationInSeconds) ||
|
|
clip.buffer?.duration ||
|
|
0;
|
|
|
|
if (!(d > 0)) continue;
|
|
|
|
const e = s + d;
|
|
|
|
// clip já começou e ainda estaria tocando nesse playhead
|
|
if (!(s < t && e > t)) continue;
|
|
|
|
// já foi agendado nessa “volta”?
|
|
const clipRuntime = runtimeClipState.get(clip.id) || { isScheduled: false };
|
|
if (clipRuntime.isScheduled) continue;
|
|
|
|
const baseOffset = clip.offsetInSeconds ?? clip.offset ?? 0;
|
|
|
|
// quanto “dentro” do clip estamos
|
|
const delta = t - s;
|
|
|
|
const offset = Math.max(0, baseOffset + delta);
|
|
const remaining = Math.max(0, e - t);
|
|
|
|
// agenda para tocar imediatamente (em termos de AudioContext),
|
|
// mas sincronizado ao Transport via _scheduleClip()
|
|
_scheduleClip(clip, audioCtx.currentTime, remaining, offset);
|
|
|
|
clipRuntime.isScheduled = true;
|
|
runtimeClipState.set(clip.id, clipRuntime);
|
|
}
|
|
}
|
|
|
|
function _scheduleOverlappingClipsAtLoopStart(loopStartSec, loopEndSec) {
|
|
const loopLen = loopEndSec - loopStartSec;
|
|
if (loopLen <= 0) return;
|
|
|
|
for (const clip of appState.audio.clips) {
|
|
if (!clip?.buffer) continue;
|
|
|
|
const s = Number(clip.startTimeInSeconds) || 0;
|
|
const d =
|
|
Number(clip.durationInSeconds) ||
|
|
clip.buffer?.duration ||
|
|
0;
|
|
|
|
if (d <= 0) continue;
|
|
|
|
const e = s + d;
|
|
|
|
// clip atravessa o loopStart (começou antes e ainda estaria tocando no loopStart)
|
|
if (!(s < loopStartSec && e > loopStartSec)) continue;
|
|
|
|
// offset interno = offsetDoClip + (loopStart - startDoClip)
|
|
const baseOffset = clip.offsetInSeconds ?? clip.offset ?? 0;
|
|
const offset = Math.max(0, baseOffset + (loopStartSec - s));
|
|
|
|
// duração restante, mas não deixa vazar além do loopEnd
|
|
const remainingToClipEnd = e - loopStartSec;
|
|
const remainingToLoopEnd = loopEndSec - loopStartSec;
|
|
const dur = Math.max(0, Math.min(remainingToClipEnd, remainingToLoopEnd));
|
|
|
|
// neste instante do loop, startTime foi resetado para "agora" e seekTime virou loopStart
|
|
// então absolutePlayTime = startTime dispara exatamente no retorno.
|
|
_scheduleClip(clip, startTime, dur, offset);
|
|
|
|
// marca como agendado pra não duplicar no tick seguinte
|
|
runtimeClipState.set(clip.id, { isScheduled: true });
|
|
}
|
|
}
|
|
|
|
|
|
function _animationLoop() {
|
|
if (!isPlaying) {
|
|
animationFrameId = null;
|
|
return;
|
|
}
|
|
|
|
const now = audioCtx.currentTime;
|
|
let newLogicalTime =
|
|
now - startTime + (appState.audio.audioEditorSeekTime || 0);
|
|
|
|
if (isLoopActive) {
|
|
if (newLogicalTime >= loopEndTimeSec) {
|
|
// ✅ volta exatamente pro início do loop (sem remainder/jitter)
|
|
newLogicalTime = loopStartTimeSec;
|
|
|
|
// realinha relógio interno do seu scheduler
|
|
startTime = now;
|
|
appState.audio.audioEditorSeekTime = newLogicalTime;
|
|
|
|
// limpa players/estado pra permitir reagendamento limpo
|
|
runtimeClipState.clear();
|
|
scheduledNodes.forEach(({ player }) => {
|
|
try { player.unsync(); } catch {}
|
|
try { player.stop(); } catch {}
|
|
try { player.dispose(); } catch {}
|
|
});
|
|
scheduledNodes.clear();
|
|
|
|
// ✅ reinicia patterns e Transport de forma confiável
|
|
try { stopSongPatternPlaybackOnTransport(); } catch {}
|
|
|
|
try {
|
|
const bpm = parseFloat(document.getElementById("bpm-input")?.value) || 120;
|
|
|
|
Tone.Transport.stop();
|
|
Tone.Transport.bpm.value = bpm;
|
|
|
|
// posiciona exatamente no loopStart
|
|
Tone.Transport.seconds = newLogicalTime;
|
|
|
|
Tone.Transport.start();
|
|
} catch {}
|
|
|
|
try { startSongPatternPlaybackOnTransport(); } catch {}
|
|
|
|
// ✅ recomeça clips que atravessam o loopStart (mais preciso que AtTime)
|
|
_scheduleOverlappingClipsAtLoopStart(loopStartTimeSec, loopEndTimeSec);
|
|
|
|
// agenda os próximos inícios sem esperar o próximo interval tick
|
|
_schedulerTick();
|
|
}
|
|
}
|
|
|
|
appState.audio.audioEditorLogicalTime = newLogicalTime;
|
|
|
|
// fim do song sem loop
|
|
if (!isLoopActive) {
|
|
let maxTime = 0;
|
|
appState.audio.clips.forEach((clip) => {
|
|
const clipStartTime = clip.startTimeInSeconds || 0;
|
|
const clipDuration = clip.durationInSeconds || 0;
|
|
const endTime = clipStartTime + clipDuration;
|
|
if (endTime > maxTime) maxTime = endTime;
|
|
});
|
|
|
|
if (maxTime > 0 && appState.audio.audioEditorLogicalTime >= maxTime) {
|
|
stopAudioEditorPlayback(true);
|
|
resetPlayheadVisual();
|
|
return;
|
|
}
|
|
}
|
|
|
|
const pixelsPerSecond = getPixelsPerSecond();
|
|
const newPositionPx = appState.audio.audioEditorLogicalTime * pixelsPerSecond;
|
|
updatePlayheadVisual(newPositionPx);
|
|
|
|
animationFrameId = requestAnimationFrame(_animationLoop);
|
|
}
|
|
|
|
// --- API Pública ---
|
|
|
|
export function updateTransportLoop() {
|
|
isLoopActive = appState.global.isLoopActive;
|
|
loopStartTimeSec = appState.global.loopStartTime;
|
|
loopEndTimeSec = appState.global.loopEndTime;
|
|
|
|
runtimeClipState.clear();
|
|
|
|
// parar e descartar players agendados
|
|
scheduledNodes.forEach(({ player }) => {
|
|
try {
|
|
player.unsync();
|
|
} catch {}
|
|
try {
|
|
player.stop();
|
|
} catch {}
|
|
try {
|
|
player.dispose();
|
|
} catch {}
|
|
});
|
|
scheduledNodes.clear();
|
|
}
|
|
|
|
export async function startAudioEditorPlayback(seekTime) {
|
|
// 1. Aceita 'seekTime' como parâmetro
|
|
if (isPlaying) return;
|
|
_initContext();
|
|
|
|
// garante contexto ativo do Tone (gesto do usuário já ocorreu antes)
|
|
await Tone.start();
|
|
|
|
const raw = audioCtx?.rawContext || audioCtx;
|
|
if (raw?.state === "suspended" && typeof raw.resume === "function") {
|
|
await raw.resume();
|
|
}
|
|
audioCtx = raw;
|
|
// alinhamento de relógio próprio
|
|
startTime = audioCtx.currentTime;
|
|
isPlaying = true;
|
|
appState.global.isAudioEditorPlaying = true;
|
|
|
|
// 1. Determine o tempo de início:
|
|
let timeToStart =
|
|
seekTime !== null && seekTime !== undefined && !isNaN(seekTime)
|
|
? seekTime
|
|
: appState.audio.audioEditorSeekTime || 0; // 👈 Usa sua variável de estado
|
|
|
|
// 2. Clampa o valor (parte da correção do RangeError)
|
|
timeToStart = Math.max(0, timeToStart);
|
|
|
|
// 3. Atualize o estado global (para a agulha pular)
|
|
appState.audio.audioEditorSeekTime = timeToStart;
|
|
|
|
// 4. Alinhe o Tone.Transport a esse tempo
|
|
try {
|
|
Tone.Transport.seconds = timeToStart; // 👈 Usa o tempo sincronizado
|
|
} catch {}
|
|
|
|
updateTransportLoop();
|
|
|
|
console.log("%cIniciando Playback...", "color: #3498db;");
|
|
|
|
// inicia o Transport (para disparar os Players .sync())
|
|
try {
|
|
startSongPatternPlaybackOnTransport();
|
|
// garante que o Transport está no BPM do projeto (impacta scheduleRepeat("16n"))
|
|
const bpm = parseFloat(document.getElementById("bpm-input")?.value) || 120;
|
|
Tone.Transport.bpm.value = bpm;
|
|
Tone.Transport.start();
|
|
// ✅ se começou no meio de algum clip, inicia com offset correto
|
|
_scheduleOverlappingClipsAtTime(timeToStart);
|
|
} catch {}
|
|
|
|
// mantém seu scheduler/animador
|
|
_schedulerTick();
|
|
schedulerIntervalId = setInterval(_schedulerTick, LOOKAHEAD_INTERVAL_MS);
|
|
animationFrameId = requestAnimationFrame(_animationLoop);
|
|
updateAudioEditorUI();
|
|
const playBtn = document.getElementById("audio-editor-play-btn");
|
|
if (playBtn) playBtn.className = "fa-solid fa-pause";
|
|
}
|
|
|
|
export function stopAudioEditorPlayback(rewind = false) {
|
|
if (!isPlaying) return;
|
|
|
|
isPlaying = false;
|
|
appState.global.isAudioEditorPlaying = false;
|
|
|
|
console.log(`%cParando Playback... (Rewind: ${rewind})`, "color: #d9534f;");
|
|
|
|
// para o Transport (para Players .sync())
|
|
try {
|
|
stopSongPatternPlaybackOnTransport();
|
|
Tone.Transport.stop();
|
|
} catch {}
|
|
|
|
clearInterval(schedulerIntervalId);
|
|
schedulerIntervalId = null;
|
|
cancelAnimationFrame(animationFrameId);
|
|
animationFrameId = null;
|
|
|
|
appState.audio.audioEditorSeekTime =
|
|
appState.audio.audioEditorLogicalTime || 0;
|
|
appState.audio.audioEditorLogicalTime = 0;
|
|
if (rewind) {
|
|
appState.audio.audioEditorSeekTime = 0;
|
|
try {
|
|
Tone.Transport.seconds = 0;
|
|
} catch {}
|
|
}
|
|
|
|
// parar e descartar players agendados
|
|
scheduledNodes.forEach(({ player }) => {
|
|
try {
|
|
player.unsync();
|
|
} catch {}
|
|
try {
|
|
player.stop();
|
|
} catch {}
|
|
try {
|
|
player.dispose();
|
|
} catch {}
|
|
});
|
|
scheduledNodes.clear();
|
|
runtimeClipState.clear();
|
|
|
|
updateAudioEditorUI();
|
|
const playBtn = document.getElementById("audio-editor-play-btn");
|
|
if (playBtn) playBtn.className = "fa-solid fa-play";
|
|
|
|
if (rewind) {
|
|
resetPlayheadVisual();
|
|
}
|
|
}
|
|
|
|
export function restartAudioEditorIfPlaying() {
|
|
if (isPlaying) {
|
|
stopAudioEditorPlayback(false); // Pausa
|
|
startAudioEditorPlayback();
|
|
}
|
|
}
|
|
|
|
export function seekAudioEditor(newTime) {
|
|
const wasPlaying = isPlaying;
|
|
if (wasPlaying) {
|
|
stopAudioEditorPlayback(false); // Pausa
|
|
}
|
|
|
|
// Clampa o novo tempo
|
|
newTime = Math.max(0, newTime);
|
|
|
|
appState.audio.audioEditorSeekTime = newTime;
|
|
appState.audio.audioEditorLogicalTime = newTime;
|
|
|
|
try {
|
|
Tone.Transport.seconds = newTime;
|
|
const bpm = parseFloat(document.getElementById("bpm-input")?.value) || 120;
|
|
Tone.Transport.bpm.value = bpm;
|
|
} catch {}
|
|
|
|
const pixelsPerSecond = getPixelsPerSecond();
|
|
const newPositionPx = newTime * pixelsPerSecond;
|
|
updatePlayheadVisual(newPositionPx);
|
|
|
|
if (wasPlaying) {
|
|
startAudioEditorPlayback();
|
|
}
|
|
}
|
|
|
|
export function registerCallbacks(newCallbacks) {
|
|
if (newCallbacks.onClipScheduled) {
|
|
callbacks.onClipScheduled = newCallbacks.onClipScheduled;
|
|
}
|
|
if (newCallbacks.onClipPlayed) {
|
|
callbacks.onClipPlayed = newCallbacks.onClipPlayed;
|
|
}
|
|
}
|