mmpSearch/assets/js/creations/audio/audio_audio.js

565 lines
16 KiB
JavaScript
Executable File

// js/audio/audio_audio.js
import { appState } from "../state.js";
import {
updateAudioEditorUI,
updatePlayheadVisual,
resetPlayheadVisual,
} from "./audio_ui.js";
import {
initializeAudioContext,
getAudioContext,
getMainGainNode,
} from "../audio.js";
import {
startSongPatternPlaybackOnTransport,
stopSongPatternPlaybackOnTransport,
} from "../pattern/pattern_audio.js";
import { getPixelsPerSecond } from "../utils.js";
// 🔊 ADIÇÃO: usar a MESMA instância do Tone que o projeto usa
import * as Tone from "https://esm.sh/tone";
// --- Configurações do Scheduler ---
const LOOKAHEAD_INTERVAL_MS = 25.0;
const SCHEDULE_AHEAD_TIME_SEC = 0.5; // 500ms
// --- Estado Interno do Engine ---
let audioCtx = null;
let isPlaying = false;
let schedulerIntervalId = null;
let animationFrameId = null;
// Sincronização de Tempo
let startTime = 0;
// (seek/logical ficam em appState.audio)
// Configurações de Loop
let isLoopActive = false;
let loopStartTimeSec = 0;
let loopEndTimeSec = 8;
// estado runtime
const runtimeClipState = new Map();
// ⚠️ agora armazenamos Tone.Player em vez de BufferSource
const scheduledNodes = new Map(); // eventId -> { player, clipId }
let nextEventId = 0;
const callbacks = {
onClipScheduled: null,
onClipPlayed: null,
};
// --- Funções Auxiliares de Tempo (sem alterações) ---
function _getBpm() {
const bpmInput = document.getElementById("bpm-input");
return parseFloat(bpmInput.value) || 120;
}
function _getSecondsPerBeat() {
return 60.0 / _getBpm();
}
function _convertBeatToSeconds(beat) {
return beat * _getSecondsPerBeat();
}
function _convertSecondsToBeat(seconds) {
return seconds / _getSecondsPerBeat();
}
// garante um único contexto — o rawContext do Tone
function _initContext() {
if (!audioCtx) {
initializeAudioContext();
audioCtx = getAudioContext(); // deve ser o rawContext do Tone
}
}
// helper: normaliza AudioBuffer → ToneAudioBuffer (mesmo contexto)
function _toToneBuffer(buffer) {
if (!buffer) return null;
if (buffer._buffer) return buffer; // já é Tone.ToneAudioBuffer
const tab = new Tone.ToneAudioBuffer();
tab._buffer = buffer; // injeta o AudioBuffer (já no rawContext do Tone)
return tab;
}
// --- Lógica Principal do Scheduler (mantida) ---
function _scheduleClip(clip, absolutePlayTime, durationSec, overrideOffsetSec) {
if (!clip.buffer) {
console.warn(`Clip ${clip.id} não possui áudio buffer carregado.`);
return;
}
const toneBuf = _toToneBuffer(clip.buffer);
if (!toneBuf) return;
const gain =
clip.gainNode instanceof Tone.Gain
? clip.gainNode
: new Tone.Gain(clip.volume ?? 1);
const pan =
clip.pannerNode instanceof Tone.Panner
? clip.pannerNode
: new Tone.Panner(clip.pan ?? 0);
try { gain.disconnect(); } catch {}
try { pan.disconnect(); } catch {}
gain.connect(pan).connect(getMainGainNode());
const player = new Tone.Player(toneBuf).sync().connect(gain);
const rate =
clip.pitch && clip.pitch !== 0 ? Math.pow(2, clip.pitch / 12) : 1;
player.playbackRate = rate;
// --- tempo no Transport (em segundos) ---
const occurrenceInTransportSec =
absolutePlayTime - startTime + (appState.audio.audioEditorSeekTime || 0);
const baseOffset = clip.offsetInSeconds ?? clip.offset ?? 0;
const offset = overrideOffsetSec ?? baseOffset;
const dur = durationSec ?? clip.durationInSeconds ?? toneBuf.duration;
const safeOccurrence = Math.max(0, occurrenceInTransportSec);
const safeOffset = Math.max(0, offset);
const safeDur = dur == null ? undefined : Math.max(0, dur);
// ✅ blindagem: nunca agenda no passado (especialmente após “virada” do loop)
let transportNow =
Tone.Transport.getSecondsAtTime
? Tone.Transport.getSecondsAtTime(Tone.now())
: Tone.Transport.seconds;
// pequena folga pra não “perder” o start por alguns ms
const EPS = 0.003;
const startAt = Math.max(safeOccurrence, transportNow + EPS);
player.start(startAt, safeOffset, safeDur);
const eventId = nextEventId++;
scheduledNodes.set(eventId, { player, clipId: clip.id });
if (callbacks.onClipScheduled) callbacks.onClipScheduled(clip);
player.onstop = () => {
_handleClipEnd(eventId, clip.id);
try { player.unsync(); } catch {}
try { player.dispose(); } catch {}
};
}
function _handleClipEnd(eventId, clipId) {
scheduledNodes.delete(eventId);
runtimeClipState.delete(clipId);
if (callbacks.onClipPlayed) {
const clip = appState.audio.clips.find((c) => c.id == clipId);
if (clip) callbacks.onClipPlayed(clip);
}
}
function _schedulerTick() {
if (!isPlaying || !audioCtx) return;
const now = audioCtx.currentTime;
const logicalTime =
now - startTime + (appState.audio.audioEditorSeekTime || 0);
const scheduleWindowStartSec = logicalTime;
const scheduleWindowEndSec = logicalTime + SCHEDULE_AHEAD_TIME_SEC;
for (const clip of appState.audio.clips) {
const clipRuntime = runtimeClipState.get(clip.id) || { isScheduled: false };
if (clipRuntime.isScheduled) continue;
if (!clip?.buffer) continue;
const clipStartTimeSec = clip.startTimeInSeconds;
const clipDurationSec =
clip.durationInSeconds ?? clip.buffer?.duration;
if (typeof clipStartTimeSec === "undefined") continue;
if (typeof clipDurationSec === "undefined") continue;
// ✅ Em modo loop: NÃO “trazer” starts de antes do loopStart pra dentro do loop.
// Esses casos são tratados por overlap (clip atravessando loopStart).
if (isLoopActive) {
const loopDuration = loopEndTimeSec - loopStartTimeSec;
if (loopDuration <= 0) continue;
// start fora da janela do loop -> não agenda (senão toca errado e pode “matar” o resto)
if (clipStartTimeSec < loopStartTimeSec || clipStartTimeSec >= loopEndTimeSec) {
continue;
}
}
const occurrenceStartTimeSec = clipStartTimeSec;
if (
occurrenceStartTimeSec >= scheduleWindowStartSec &&
occurrenceStartTimeSec < scheduleWindowEndSec
) {
const absolutePlayTime =
startTime +
(occurrenceStartTimeSec - (appState.audio.audioEditorSeekTime || 0));
_scheduleClip(clip, absolutePlayTime, clipDurationSec);
clipRuntime.isScheduled = true;
runtimeClipState.set(clip.id, clipRuntime);
}
}
}
/**
* ✅ Se a agulha está no meio de um clip (clip começou antes e ainda não acabou),
* precisamos iniciar o Player “agora”, com offset adequado.
* Isso resolve: seek no meio + reinício do loop + play sem stop em certos casos.
*/
function _scheduleOverlappingClipsAtTime(playheadSec) {
if (!audioCtx) return;
const t = Number(playheadSec);
if (!isFinite(t) || t < 0) return;
for (const clip of appState.audio.clips) {
if (!clip?.buffer) continue;
const s = Number(clip.startTimeInSeconds);
if (!isFinite(s)) continue;
const d =
Number(clip.durationInSeconds) ||
clip.buffer?.duration ||
0;
if (!(d > 0)) continue;
const e = s + d;
// clip já começou e ainda estaria tocando nesse playhead
if (!(s < t && e > t)) continue;
// já foi agendado nessa “volta”?
const clipRuntime = runtimeClipState.get(clip.id) || { isScheduled: false };
if (clipRuntime.isScheduled) continue;
const baseOffset = clip.offsetInSeconds ?? clip.offset ?? 0;
// quanto “dentro” do clip estamos
const delta = t - s;
const offset = Math.max(0, baseOffset + delta);
const remaining = Math.max(0, e - t);
// agenda para tocar imediatamente (em termos de AudioContext),
// mas sincronizado ao Transport via _scheduleClip()
_scheduleClip(clip, audioCtx.currentTime, remaining, offset);
clipRuntime.isScheduled = true;
runtimeClipState.set(clip.id, clipRuntime);
}
}
function _scheduleOverlappingClipsAtLoopStart(loopStartSec, loopEndSec) {
const loopLen = loopEndSec - loopStartSec;
if (loopLen <= 0) return;
for (const clip of appState.audio.clips) {
if (!clip?.buffer) continue;
const s = Number(clip.startTimeInSeconds) || 0;
const d =
Number(clip.durationInSeconds) ||
clip.buffer?.duration ||
0;
if (d <= 0) continue;
const e = s + d;
// clip atravessa o loopStart (começou antes e ainda estaria tocando no loopStart)
if (!(s < loopStartSec && e > loopStartSec)) continue;
// offset interno = offsetDoClip + (loopStart - startDoClip)
const baseOffset = clip.offsetInSeconds ?? clip.offset ?? 0;
const offset = Math.max(0, baseOffset + (loopStartSec - s));
// duração restante, mas não deixa vazar além do loopEnd
const remainingToClipEnd = e - loopStartSec;
const remainingToLoopEnd = loopEndSec - loopStartSec;
const dur = Math.max(0, Math.min(remainingToClipEnd, remainingToLoopEnd));
// neste instante do loop, startTime foi resetado para "agora" e seekTime virou loopStart
// então absolutePlayTime = startTime dispara exatamente no retorno.
_scheduleClip(clip, startTime, dur, offset);
// marca como agendado pra não duplicar no tick seguinte
runtimeClipState.set(clip.id, { isScheduled: true });
}
}
function _animationLoop() {
if (!isPlaying) {
animationFrameId = null;
return;
}
const now = audioCtx.currentTime;
let newLogicalTime =
now - startTime + (appState.audio.audioEditorSeekTime || 0);
if (isLoopActive) {
if (newLogicalTime >= loopEndTimeSec) {
const loopDuration = loopEndTimeSec - loopStartTimeSec;
if (loopDuration > 0) {
newLogicalTime =
loopStartTimeSec +
((newLogicalTime - loopStartTimeSec) % loopDuration);
} else {
newLogicalTime = loopStartTimeSec;
}
// realinha relógio interno
startTime = now;
appState.audio.audioEditorSeekTime = newLogicalTime;
// ✅ força o Transport “pular” junto na virada do loop
try {
// (desativa loop do Transport aqui pra não brigar com a sua lógica de loop da playlist)
Tone.Transport.loop = false;
} catch {}
try {
Tone.Transport.seconds = newLogicalTime;
} catch {}
// ✅ limpa players/estado pra permitir reagendamento limpo
runtimeClipState.clear();
scheduledNodes.forEach(({ player }) => {
try { player.unsync(); } catch {}
try { player.stop(); } catch {}
try { player.dispose(); } catch {}
});
scheduledNodes.clear();
// ✅ reinicia patterns do song (seu scheduler da playlist)
try {
stopSongPatternPlaybackOnTransport();
startSongPatternPlaybackOnTransport();
} catch {}
// ✅ IMPORTANTÍSSIMO: recomeça clips que atravessam o loopStart
_scheduleOverlappingClipsAtTime(newLogicalTime);
// e já agenda os próximos inícios sem esperar o próximo interval tick
_schedulerTick();
}
}
appState.audio.audioEditorLogicalTime = newLogicalTime;
// fim do song sem loop
if (!isLoopActive) {
let maxTime = 0;
appState.audio.clips.forEach((clip) => {
const clipStartTime = clip.startTimeInSeconds || 0;
const clipDuration = clip.durationInSeconds || 0;
const endTime = clipStartTime + clipDuration;
if (endTime > maxTime) maxTime = endTime;
});
if (maxTime > 0 && appState.audio.audioEditorLogicalTime >= maxTime) {
stopAudioEditorPlayback(true);
resetPlayheadVisual();
return;
}
}
const pixelsPerSecond = getPixelsPerSecond();
const newPositionPx = appState.audio.audioEditorLogicalTime * pixelsPerSecond;
updatePlayheadVisual(newPositionPx);
animationFrameId = requestAnimationFrame(_animationLoop);
}
// --- API Pública ---
export function updateTransportLoop() {
isLoopActive = appState.global.isLoopActive;
loopStartTimeSec = appState.global.loopStartTime;
loopEndTimeSec = appState.global.loopEndTime;
runtimeClipState.clear();
// parar e descartar players agendados
scheduledNodes.forEach(({ player }) => {
try {
player.unsync();
} catch {}
try {
player.stop();
} catch {}
try {
player.dispose();
} catch {}
});
scheduledNodes.clear();
}
export async function startAudioEditorPlayback(seekTime) {
// 1. Aceita 'seekTime' como parâmetro
if (isPlaying) return;
_initContext();
// garante contexto ativo do Tone (gesto do usuário já ocorreu antes)
await Tone.start();
if (audioCtx.state === "suspended") {
await audioCtx.resume();
}
isPlaying = true;
appState.global.isAudioEditorPlaying = true;
// alinhamento de relógio próprio (mantido para o seu scheduler)
startTime = audioCtx.currentTime;
// 1. Determine o tempo de início:
let timeToStart =
seekTime !== null && seekTime !== undefined && !isNaN(seekTime)
? seekTime
: appState.audio.audioEditorSeekTime || 0; // 👈 Usa sua variável de estado
// 2. Clampa o valor (parte da correção do RangeError)
timeToStart = Math.max(0, timeToStart);
// 3. Atualize o estado global (para a agulha pular)
appState.audio.audioEditorSeekTime = timeToStart;
// 4. Alinhe o Tone.Transport a esse tempo
try {
Tone.Transport.seconds = timeToStart; // 👈 Usa o tempo sincronizado
} catch {}
updateTransportLoop();
console.log("%cIniciando Playback...", "color: #3498db;");
// inicia o Transport (para disparar os Players .sync())
try {
startSongPatternPlaybackOnTransport();
// garante que o Transport está no BPM do projeto (impacta scheduleRepeat("16n"))
const bpm = parseFloat(document.getElementById("bpm-input")?.value) || 120;
Tone.Transport.bpm.value = bpm;
Tone.Transport.start();
// ✅ se começou no meio de algum clip, inicia com offset correto
_scheduleOverlappingClipsAtTime(timeToStart);
} catch {}
// mantém seu scheduler/animador
_schedulerTick();
schedulerIntervalId = setInterval(_schedulerTick, LOOKAHEAD_INTERVAL_MS);
animationFrameId = requestAnimationFrame(_animationLoop);
updateAudioEditorUI();
const playBtn = document.getElementById("audio-editor-play-btn");
if (playBtn) playBtn.className = "fa-solid fa-pause";
}
export function stopAudioEditorPlayback(rewind = false) {
if (!isPlaying) return;
isPlaying = false;
appState.global.isAudioEditorPlaying = false;
console.log(`%cParando Playback... (Rewind: ${rewind})`, "color: #d9534f;");
// para o Transport (para Players .sync())
try {
stopSongPatternPlaybackOnTransport();
Tone.Transport.stop();
} catch {}
clearInterval(schedulerIntervalId);
schedulerIntervalId = null;
cancelAnimationFrame(animationFrameId);
animationFrameId = null;
appState.audio.audioEditorSeekTime =
appState.audio.audioEditorLogicalTime || 0;
appState.audio.audioEditorLogicalTime = 0;
if (rewind) {
appState.audio.audioEditorSeekTime = 0;
try {
Tone.Transport.seconds = 0;
} catch {}
}
// parar e descartar players agendados
scheduledNodes.forEach(({ player }) => {
try {
player.unsync();
} catch {}
try {
player.stop();
} catch {}
try {
player.dispose();
} catch {}
});
scheduledNodes.clear();
runtimeClipState.clear();
updateAudioEditorUI();
const playBtn = document.getElementById("audio-editor-play-btn");
if (playBtn) playBtn.className = "fa-solid fa-play";
if (rewind) {
resetPlayheadVisual();
}
}
export function restartAudioEditorIfPlaying() {
if (isPlaying) {
stopAudioEditorPlayback(false); // Pausa
startAudioEditorPlayback();
}
}
export function seekAudioEditor(newTime) {
const wasPlaying = isPlaying;
if (wasPlaying) {
stopAudioEditorPlayback(false); // Pausa
}
// Clampa o novo tempo
newTime = Math.max(0, newTime);
appState.audio.audioEditorSeekTime = newTime;
appState.audio.audioEditorLogicalTime = newTime;
try {
Tone.Transport.seconds = newTime;
const bpm = parseFloat(document.getElementById("bpm-input")?.value) || 120;
Tone.Transport.bpm.value = bpm;
} catch {}
const pixelsPerSecond = getPixelsPerSecond();
const newPositionPx = newTime * pixelsPerSecond;
updatePlayheadVisual(newPositionPx);
if (wasPlaying) {
startAudioEditorPlayback();
}
}
export function registerCallbacks(newCallbacks) {
if (newCallbacks.onClipScheduled) {
callbacks.onClipScheduled = newCallbacks.onClipScheduled;
}
if (newCallbacks.onClipPlayed) {
callbacks.onClipPlayed = newCallbacks.onClipPlayed;
}
}