369 lines
12 KiB
JavaScript
369 lines
12 KiB
JavaScript
// js/audio/audio_audio.js
|
|
import { appState } from "../state.js";
|
|
import { updateAudioEditorUI, updatePlayheadVisual, resetPlayheadVisual } from "./audio_ui.js";
|
|
import { initializeAudioContext, getAudioContext, getMainGainNode } from "../audio.js";
|
|
import { getPixelsPerSecond } from "../utils.js";
|
|
// 🔊 ADIÇÃO: usar a MESMA instância do Tone que o projeto usa
|
|
import * as Tone from "https://esm.sh/tone";
|
|
|
|
// --- Configurações do Scheduler ---
|
|
const LOOKAHEAD_INTERVAL_MS = 25.0;
|
|
const SCHEDULE_AHEAD_TIME_SEC = 0.5; // 500ms
|
|
|
|
// --- Estado Interno do Engine ---
|
|
let audioCtx = null;
|
|
let isPlaying = false;
|
|
let schedulerIntervalId = null;
|
|
let animationFrameId = null;
|
|
|
|
// Sincronização de Tempo
|
|
let startTime = 0;
|
|
// (seek/logical ficam em appState.audio)
|
|
|
|
// Configurações de Loop
|
|
let isLoopActive = false;
|
|
let loopStartTimeSec = 0;
|
|
let loopEndTimeSec = 8;
|
|
|
|
// estado runtime
|
|
const runtimeClipState = new Map();
|
|
// ⚠️ agora armazenamos Tone.Player em vez de BufferSource
|
|
const scheduledNodes = new Map(); // eventId -> { player, clipId }
|
|
let nextEventId = 0;
|
|
|
|
const callbacks = {
|
|
onClipScheduled: null,
|
|
onClipPlayed: null,
|
|
};
|
|
|
|
// --- Funções Auxiliares de Tempo (sem alterações) ---
|
|
function _getBpm() {
|
|
const bpmInput = document.getElementById("bpm-input");
|
|
return parseFloat(bpmInput.value) || 120;
|
|
}
|
|
function _getSecondsPerBeat() { return 60.0 / _getBpm(); }
|
|
function _convertBeatToSeconds(beat) { return beat * _getSecondsPerBeat(); }
|
|
function _convertSecondsToBeat(seconds) { return seconds / _getSecondsPerBeat(); }
|
|
|
|
// garante um único contexto — o rawContext do Tone
|
|
function _initContext() {
|
|
if (!audioCtx) {
|
|
initializeAudioContext();
|
|
audioCtx = getAudioContext(); // deve ser o rawContext do Tone
|
|
}
|
|
}
|
|
|
|
// helper: normaliza AudioBuffer → ToneAudioBuffer (mesmo contexto)
|
|
function _toToneBuffer(buffer) {
|
|
if (!buffer) return null;
|
|
if (buffer._buffer) return buffer; // já é Tone.ToneAudioBuffer
|
|
const tab = new Tone.ToneAudioBuffer();
|
|
tab._buffer = buffer; // injeta o AudioBuffer (já no rawContext do Tone)
|
|
return tab;
|
|
}
|
|
|
|
// --- Lógica Principal do Scheduler (mantida) ---
|
|
|
|
function _scheduleClip(clip, absolutePlayTime, durationSec) {
|
|
if (!clip.buffer) {
|
|
console.warn(`Clip ${clip.id} não possui áudio buffer carregado.`);
|
|
return;
|
|
}
|
|
|
|
// usamos Player .sync() conectando no mesmo grafo do Tone
|
|
const toneBuf = _toToneBuffer(clip.buffer);
|
|
if (!toneBuf) return;
|
|
|
|
// cadeia de ganho/pan por clipe (se já tiver no estado, use; aqui garantimos)
|
|
const gain = clip.gainNode instanceof Tone.Gain ? clip.gainNode : new Tone.Gain(clip.volume ?? 1);
|
|
const pan = clip.pannerNode instanceof Tone.Panner ? clip.pannerNode : new Tone.Panner(clip.pan ?? 0);
|
|
|
|
// conecta no destino principal (é um ToneAudioNode)
|
|
try {
|
|
gain.disconnect(); // evita duplicatas caso exista de execuções anteriores
|
|
} catch {}
|
|
try {
|
|
pan.disconnect();
|
|
} catch {}
|
|
gain.connect(pan).connect(getMainGainNode());
|
|
|
|
// player sincronizado no Transport
|
|
const player = new Tone.Player(toneBuf).sync().connect(gain);
|
|
|
|
// aplica pitch como rate (semitons → rate)
|
|
const rate = (clip.pitch && clip.pitch !== 0) ? Math.pow(2, clip.pitch / 12) : 1;
|
|
player.playbackRate = rate;
|
|
|
|
// calculamos o "when" no tempo do Transport:
|
|
// absolutePlayTime é em audioCtx.currentTime; o "zero" lógico foi quando demos play:
|
|
// logical = (now - startTime) + seek; => occurrence = (absolutePlayTime - startTime) + seek
|
|
const occurrenceInTransportSec = (absolutePlayTime - startTime) + (appState.audio.audioEditorSeekTime || 0);
|
|
|
|
const offset = clip.offsetInSeconds ?? clip.offset ?? 0;
|
|
const dur = durationSec ?? toneBuf.duration;
|
|
|
|
// agenda
|
|
player.start(occurrenceInTransportSec, offset, dur);
|
|
|
|
const eventId = nextEventId++;
|
|
scheduledNodes.set(eventId, { player, clipId: clip.id });
|
|
|
|
if (callbacks.onClipScheduled) {
|
|
callbacks.onClipScheduled(clip);
|
|
}
|
|
|
|
// quando parar naturalmente, limpamos runtime
|
|
player.onstop = () => {
|
|
_handleClipEnd(eventId, clip.id);
|
|
try { player.unsync(); } catch {}
|
|
try { player.dispose(); } catch {}
|
|
};
|
|
}
|
|
|
|
function _handleClipEnd(eventId, clipId) {
|
|
scheduledNodes.delete(eventId);
|
|
runtimeClipState.delete(clipId);
|
|
|
|
if (callbacks.onClipPlayed) {
|
|
const clip = appState.audio.clips.find(c => c.id == clipId);
|
|
if (clip) callbacks.onClipPlayed(clip);
|
|
}
|
|
}
|
|
|
|
function _schedulerTick() {
|
|
if (!isPlaying || !audioCtx) return;
|
|
|
|
const now = audioCtx.currentTime;
|
|
const logicalTime = (now - startTime) + (appState.audio.audioEditorSeekTime || 0);
|
|
const scheduleWindowStartSec = logicalTime;
|
|
const scheduleWindowEndSec = logicalTime + SCHEDULE_AHEAD_TIME_SEC;
|
|
|
|
for (const clip of appState.audio.clips) {
|
|
const clipRuntime = runtimeClipState.get(clip.id) || { isScheduled: false };
|
|
if (clipRuntime.isScheduled) continue;
|
|
if (!clip.buffer) continue;
|
|
|
|
const clipStartTimeSec = clip.startTimeInSeconds;
|
|
const clipDurationSec = clip.durationInSeconds;
|
|
if (typeof clipStartTimeSec === 'undefined' || typeof clipDurationSec === 'undefined') continue;
|
|
|
|
let occurrenceStartTimeSec = clipStartTimeSec;
|
|
|
|
if (isLoopActive) {
|
|
const loopDuration = loopEndTimeSec - loopStartTimeSec;
|
|
if (loopDuration <= 0) continue;
|
|
if (occurrenceStartTimeSec < loopStartTimeSec && logicalTime >= loopStartTimeSec) {
|
|
const offsetFromLoopStart = (occurrenceStartTimeSec - loopStartTimeSec) % loopDuration;
|
|
occurrenceStartTimeSec = loopStartTimeSec + (offsetFromLoopStart < 0 ? offsetFromLoopStart + loopDuration : offsetFromLoopStart);
|
|
}
|
|
if (occurrenceStartTimeSec < logicalTime) {
|
|
const loopsMissed = Math.floor((logicalTime - occurrenceStartTimeSec) / loopDuration) + 1;
|
|
occurrenceStartTimeSec += loopsMissed * loopDuration;
|
|
}
|
|
}
|
|
|
|
if (
|
|
occurrenceStartTimeSec >= scheduleWindowStartSec &&
|
|
occurrenceStartTimeSec < scheduleWindowEndSec
|
|
) {
|
|
const absolutePlayTime = startTime + (occurrenceStartTimeSec - (appState.audio.audioEditorSeekTime || 0));
|
|
_scheduleClip(clip, absolutePlayTime, clipDurationSec);
|
|
clipRuntime.isScheduled = true;
|
|
runtimeClipState.set(clip.id, clipRuntime);
|
|
}
|
|
}
|
|
}
|
|
|
|
// --- Loop de Animação (mantido) ---
|
|
function _animationLoop() {
|
|
if (!isPlaying) {
|
|
animationFrameId = null;
|
|
return;
|
|
}
|
|
const now = audioCtx.currentTime;
|
|
let newLogicalTime = (now - startTime) + (appState.audio.audioEditorSeekTime || 0);
|
|
|
|
if (isLoopActive) {
|
|
if (newLogicalTime >= loopEndTimeSec) {
|
|
const loopDuration = loopEndTimeSec - loopStartTimeSec;
|
|
newLogicalTime = loopStartTimeSec + ((newLogicalTime - loopStartTimeSec) % loopDuration);
|
|
startTime = now;
|
|
appState.audio.audioEditorSeekTime = newLogicalTime;
|
|
}
|
|
}
|
|
|
|
appState.audio.audioEditorLogicalTime = newLogicalTime;
|
|
|
|
if (!isLoopActive) {
|
|
let maxTime = 0;
|
|
appState.audio.clips.forEach(clip => {
|
|
const clipStartTime = clip.startTimeInSeconds || 0;
|
|
const clipDuration = clip.durationInSeconds || 0;
|
|
const endTime = clipStartTime + clipDuration;
|
|
if (endTime > maxTime) maxTime = endTime;
|
|
});
|
|
if (maxTime > 0 && appState.audio.audioEditorLogicalTime >= maxTime) {
|
|
stopAudioEditorPlayback(true); // Rebobina no fim
|
|
resetPlayheadVisual();
|
|
return;
|
|
}
|
|
}
|
|
const pixelsPerSecond = getPixelsPerSecond();
|
|
const newPositionPx = appState.audio.audioEditorLogicalTime * pixelsPerSecond;
|
|
updatePlayheadVisual(newPositionPx);
|
|
animationFrameId = requestAnimationFrame(_animationLoop);
|
|
}
|
|
|
|
// --- API Pública ---
|
|
|
|
export function updateTransportLoop() {
|
|
isLoopActive = appState.global.isLoopActive;
|
|
loopStartTimeSec = appState.global.loopStartTime;
|
|
loopEndTimeSec = appState.global.loopEndTime;
|
|
|
|
runtimeClipState.clear();
|
|
|
|
// parar e descartar players agendados
|
|
scheduledNodes.forEach(({ player }) => {
|
|
try { player.unsync(); } catch {}
|
|
try { player.stop(); } catch {}
|
|
try { player.dispose(); } catch {}
|
|
});
|
|
scheduledNodes.clear();
|
|
}
|
|
|
|
export async function startAudioEditorPlayback(seekTime) { // 1. Aceita 'seekTime' como parâmetro
|
|
if (isPlaying) return;
|
|
_initContext();
|
|
|
|
// garante contexto ativo do Tone (gesto do usuário já ocorreu antes)
|
|
await Tone.start();
|
|
if (audioCtx.state === 'suspended') {
|
|
await audioCtx.resume();
|
|
}
|
|
|
|
isPlaying = true;
|
|
appState.global.isAudioEditorPlaying = true;
|
|
|
|
// alinhamento de relógio próprio (mantido para o seu scheduler)
|
|
startTime = audioCtx.currentTime;
|
|
|
|
// =================================================================
|
|
// 👇 INÍCIO DA CORREÇÃO (Bugs 1 & 2)
|
|
// =================================================================
|
|
|
|
// 1. Determine o tempo de início:
|
|
// Use o 'seekTime' recebido (da ação global) se for um número válido (>= 0).
|
|
// Caso contrário, use o tempo de seek local atual.
|
|
const timeToStart = (seekTime !== null && seekTime !== undefined && !isNaN(seekTime))
|
|
? seekTime
|
|
: (appState.audio.audioEditorSeekTime || 0); // 👈 Usa sua variável de estado
|
|
|
|
// 2. Atualize o estado global (para a agulha pular)
|
|
// Isso garante que o estado local E o Tone estejam sincronizados.
|
|
appState.audio.audioEditorSeekTime = timeToStart;
|
|
|
|
// 3. Alinhe o Tone.Transport a esse tempo
|
|
try {
|
|
Tone.Transport.seconds = timeToStart; // 👈 Usa o tempo sincronizado
|
|
} catch {}
|
|
|
|
// =================================================================
|
|
// 👆 FIM DA CORREÇÃO
|
|
// =================================================================
|
|
|
|
updateTransportLoop();
|
|
|
|
console.log("%cIniciando Playback...", "color: #3498db;");
|
|
|
|
// inicia o Transport (para disparar os Players .sync())
|
|
try {
|
|
Tone.Transport.start();
|
|
} catch {}
|
|
|
|
// mantém seu scheduler/animador
|
|
_schedulerTick();
|
|
schedulerIntervalId = setInterval(_schedulerTick, LOOKAHEAD_INTERVAL_MS);
|
|
animationFrameId = requestAnimationFrame(_animationLoop);
|
|
updateAudioEditorUI();
|
|
const playBtn = document.getElementById("audio-editor-play-btn");
|
|
if (playBtn) playBtn.className = "fa-solid fa-pause";
|
|
}
|
|
|
|
export function stopAudioEditorPlayback(rewind = false) {
|
|
if (!isPlaying) return;
|
|
|
|
isPlaying = false;
|
|
appState.global.isAudioEditorPlaying = false;
|
|
|
|
console.log(`%cParando Playback... (Rewind: ${rewind})`, "color: #d9534f;");
|
|
|
|
// para o Transport (para Players .sync())
|
|
try { Tone.Transport.stop(); } catch {}
|
|
|
|
clearInterval(schedulerIntervalId);
|
|
schedulerIntervalId = null;
|
|
cancelAnimationFrame(animationFrameId);
|
|
animationFrameId = null;
|
|
|
|
appState.audio.audioEditorSeekTime = appState.audio.audioEditorLogicalTime || 0;
|
|
appState.audio.audioEditorLogicalTime = 0;
|
|
if (rewind) {
|
|
appState.audio.audioEditorSeekTime = 0;
|
|
try { Tone.Transport.seconds = 0; } catch {}
|
|
}
|
|
|
|
// parar e descartar players agendados
|
|
scheduledNodes.forEach(({ player }) => {
|
|
try { player.unsync(); } catch {}
|
|
try { player.stop(); } catch {}
|
|
try { player.dispose(); } catch {}
|
|
});
|
|
scheduledNodes.clear();
|
|
runtimeClipState.clear();
|
|
|
|
updateAudioEditorUI();
|
|
const playBtn = document.getElementById("audio-editor-play-btn");
|
|
if (playBtn) playBtn.className = 'fa-solid fa-play';
|
|
|
|
if (rewind) {
|
|
resetPlayheadVisual();
|
|
}
|
|
}
|
|
|
|
export function restartAudioEditorIfPlaying() {
|
|
if (isPlaying) {
|
|
stopAudioEditorPlayback(false); // Pausa
|
|
startAudioEditorPlayback();
|
|
}
|
|
}
|
|
|
|
export function seekAudioEditor(newTime) {
|
|
const wasPlaying = isPlaying;
|
|
if (wasPlaying) {
|
|
stopAudioEditorPlayback(false); // Pausa
|
|
}
|
|
|
|
appState.audio.audioEditorSeekTime = newTime;
|
|
appState.audio.audioEditorLogicalTime = newTime;
|
|
|
|
try { Tone.Transport.seconds = newTime; } catch {}
|
|
|
|
const pixelsPerSecond = getPixelsPerSecond();
|
|
const newPositionPx = newTime * pixelsPerSecond;
|
|
updatePlayheadVisual(newPositionPx);
|
|
|
|
if (wasPlaying) {
|
|
startAudioEditorPlayback();
|
|
}
|
|
}
|
|
|
|
export function registerCallbacks(newCallbacks) {
|
|
if (newCallbacks.onClipScheduled) {
|
|
callbacks.onClipScheduled = newCallbacks.onClipScheduled;
|
|
}
|
|
if (newCallbacks.onClipPlayed) {
|
|
callbacks.onClipPlayed = newCallbacks.onClipPlayed;
|
|
}
|
|
}
|