mmpSearch/assets/js/creations/pattern/pattern_audio.js

410 lines
13 KiB
JavaScript

// js/pattern_audio.js
import * as Tone from "https://esm.sh/tone";
import { appState } from "../state.js";
import { highlightStep } from "./pattern_ui.js";
import { getTotalSteps } from "../utils.js";
import { initializeAudioContext } from "../audio.js";
const timerDisplay = document.getElementById("timer-display");
// Variável para armazenar as "Parts" (sequências melódicas) do Tone.js
let activeParts = [];
function formatTime(milliseconds) {
const totalSeconds = Math.floor(milliseconds / 1000);
const minutes = Math.floor(totalSeconds / 60)
.toString()
.padStart(2, "0");
const seconds = (totalSeconds % 60).toString().padStart(2, "0");
const centiseconds = Math.floor((milliseconds % 1000) / 10)
.toString()
.padStart(2, "0");
return `${minutes}:${seconds}:${centiseconds}`;
}
export function playMetronomeSound(isDownbeat) {
initializeAudioContext();
const synth = new Tone.Synth().toDestination();
const freq = isDownbeat ? 1000 : 800;
synth.triggerAttackRelease(freq, "8n", Tone.now());
}
// Dispara o sample de uma track, garantindo que o player esteja roteado corretamente
export function playSample(filePath, trackId) {
initializeAudioContext();
const track = trackId
? appState.pattern.tracks.find((t) => t.id == trackId)
: null;
// Se a faixa existe e tem um player pré-carregado
if (track && track.player) {
if (track.player.loaded) {
// Ajusta volume/pan sempre que tocar (robustez a alterações em tempo real)
if (track.volumeNode) {
track.volumeNode.volume.value =
track.volume === 0 ? -Infinity : Tone.gainToDb(track.volume);
}
if (track.pannerNode) {
track.pannerNode.pan.value = track.pan ?? 0;
}
// Garante conexão: player -> volumeNode
try {
track.player.disconnect();
} catch {}
if (track.volumeNode) {
track.player.connect(track.volumeNode);
}
// Dispara imediatamente
track.player.start(Tone.now());
} else {
console.warn(
`Player da trilha "${track.name}" ainda não carregado — pulando este tick.`
);
}
}
// Fallback para preview de sample sem trackId
else if (!trackId && filePath) {
const previewPlayer = new Tone.Player(filePath).toDestination();
previewPlayer.autostart = true;
}
}
function tick() {
if (!appState.global.isPlaying) {
stopPlayback();
return;
}
const totalSteps = getTotalSteps();
const lastStepIndex =
appState.global.currentStep === 0
? totalSteps - 1
: appState.global.currentStep - 1;
highlightStep(lastStepIndex, false);
const bpm = parseInt(document.getElementById("bpm-input").value, 10) || 120;
const stepInterval = (60 * 1000) / (bpm * 4);
const currentTime = appState.global.currentStep * stepInterval;
if (timerDisplay) {
timerDisplay.textContent = formatTime(currentTime);
}
// Metrônomo
if (appState.global.metronomeEnabled) {
const noteValue =
parseInt(document.getElementById("compasso-b-input").value, 10) || 4;
const stepsPerBeat = 16 / noteValue;
if (appState.global.currentStep % stepsPerBeat === 0) {
playMetronomeSound(
appState.global.currentStep % (stepsPerBeat * 4) === 0
);
}
}
// Percorre tracks e toca o step atual se ativo
appState.pattern.tracks.forEach((track) => {
if (track.muted) return; // Respeita o Mute
if (!track.patterns || track.patterns.length === 0) return;
const activePattern = track.patterns[track.activePatternIndex];
// Verifica se o step atual está ativo
if (activePattern && activePattern.steps[appState.global.currentStep]) {
// CASO 1: SAMPLER (Arquivo de Áudio)
if (track.samplePath) {
playSample(track.samplePath, track.id);
}
// CASO 2: PLUGIN (Sintetizador)
// Se for plugin e tiver step marcado, toca nota padrão (C5)
else if (track.type === 'plugin' && track.instrument) {
// "16n" é a duração de uma semicolcheia
// Usamos um try/catch para evitar travar o loop se o plugin falhar
try {
track.instrument.triggerAttackRelease("C5", "16n", Tone.now());
} catch (e) {
console.warn("Falha ao tocar step do plugin:", e);
}
}
}
});
highlightStep(appState.global.currentStep, true);
appState.global.currentStep = (appState.global.currentStep + 1) % totalSteps;
}
export function startPlayback() {
if (appState.global.isPlaying || appState.pattern.tracks.length === 0) return;
initializeAudioContext();
// Garante que o contexto do Tone esteja rodando
if (Tone.context.state !== "running") {
Tone.start();
}
if (appState.global.currentStep === 0) {
rewindPlayback();
}
const bpm = parseInt(document.getElementById("bpm-input").value, 10) || 120;
Tone.Transport.bpm.value = bpm;
const stepInterval = (60 * 1000) / (bpm * 4);
if (appState.global.playbackIntervalId)
clearInterval(appState.global.playbackIntervalId);
// --- NOVO: Agenda o Piano Roll (Melodias) ---
schedulePianoRoll();
Tone.Transport.start(); // Inicia o relógio para as notas melódicas
// --------------------------------------------
appState.global.isPlaying = true;
const playBtn = document.getElementById("play-btn");
if (playBtn) {
playBtn.classList.remove("fa-play");
playBtn.classList.add("fa-pause");
}
tick();
appState.global.playbackIntervalId = setInterval(tick, stepInterval);
}
export function stopPlayback() {
if (appState.global.playbackIntervalId) {
clearInterval(appState.global.playbackIntervalId);
}
appState.global.playbackIntervalId = null;
appState.global.isPlaying = false;
// --- NOVO: Para o Transport e Limpa Synths ---
Tone.Transport.stop();
// Limpa agendamentos melódicos
activeParts.forEach(part => part.dispose());
activeParts = [];
// Solta notas travadas de todos os plugins
appState.pattern.tracks.forEach(track => {
try { track.player?.stop(); } catch {}
try { track.instrument?.releaseAll?.(); } catch {} // Para PolySynths
try { track.instrument?.triggerRelease?.(); } catch {} // Para MonoSynths
});
// --------------------------------------------
document
.querySelectorAll(".step.playing")
.forEach((s) => s.classList.remove("playing"));
appState.global.currentStep = 0;
if (timerDisplay) timerDisplay.textContent = "00:00:00";
const playBtn = document.getElementById("play-btn");
if (playBtn) {
playBtn.classList.remove("fa-pause");
playBtn.classList.add("fa-play");
}
}
export function rewindPlayback() {
const lastStep =
appState.global.currentStep > 0
? appState.global.currentStep - 1
: getTotalSteps() - 1;
appState.global.currentStep = 0;
Tone.Transport.position = 0; // Reseta o tempo do Tone.js
if (!appState.global.isPlaying) {
if (timerDisplay) timerDisplay.textContent = "00:00:00";
highlightStep(lastStep, false);
}
}
export function togglePlayback() {
initializeAudioContext();
if (appState.global.isPlaying) {
stopPlayback();
} else {
appState.global.currentStep = 0;
startPlayback();
}
}
// =========================================================================
// AGENDADOR DE PIANO ROLL (NOVA FUNÇÃO)
// Agenda as notas melódicas (desenhadas no Piano Roll) para tocar no Tone.Transport
// =========================================================================
function schedulePianoRoll() {
// Limpa anteriores por segurança
activeParts.forEach(part => part.dispose());
activeParts = [];
appState.pattern.tracks.forEach(track => {
if (track.muted) return;
const pattern = track.patterns[track.activePatternIndex];
// Só agenda se tiver notas E for um instrumento
if (pattern && pattern.notes && pattern.notes.length > 0 && track.instrument) {
// Mapeia as notas para o formato de evento do Tone.js
const events = pattern.notes.map(note => {
return {
// Converte Ticks (pos) para Tempo Musical do Tone.js
// Assumindo 192 PPQ (padrão LMMS) -> Tone PPQ
time: 0 + (note.pos * (Tone.Transport.PPQ / 192) / Tone.Transport.PPQ),
midi: note.key,
duration: note.len + "i", // 'i' em Tone.js significa ticks
velocity: (note.vol || 100) / 100
};
});
// Cria uma Part (sequência)
const part = new Tone.Part((time, value) => {
if (track.muted) return;
const freq = Tone.Frequency(value.midi, "midi");
// Dispara o método que padronizamos em todos os plugins
if (track.instrument.triggerAttackRelease) {
track.instrument.triggerAttackRelease(freq, value.duration, time, value.velocity);
}
}, events).start(0);
// Configura o Loop da Melodia
const bars = parseInt(document.getElementById('bars-input')?.value || 1);
part.loop = true;
part.loopEnd = bars + "m"; // 'm' = measure (compasso)
activeParts.push(part);
}
});
}
// =========================================================================
// Renderizar o Pattern atual para um Blob de Áudio (MANTIDO ORIGINAL)
// =========================================================================
export async function renderActivePatternToBlob() {
initializeAudioContext();
const bpm = parseInt(document.getElementById("bpm-input").value, 10) || 120;
const totalSteps = getTotalSteps();
const stepInterval = 60 / (bpm * 4);
const duration = totalSteps * stepInterval;
const activePatternIndex =
appState.pattern.tracks[0]?.activePatternIndex || 0;
const buffer = await Tone.Offline(async () => {
const masterGain = new Tone.Gain().toDestination();
const offlineTracksParts = appState.pattern.tracks
.map((track) => {
const pattern = track.patterns[activePatternIndex];
// Nota: O render atual suporta apenas Samplers (buffers carregados)
// Para suportar Plugins no futuro, precisaríamos recriar o synth aqui dentro.
if (!pattern || !track.buffer || !pattern.steps.includes(true)) {
return null;
}
const trackBuffer = track.buffer;
const panner = new Tone.Panner(track.pan).connect(masterGain);
const volume = new Tone.Volume(Tone.gainToDb(track.volume)).connect(
panner
);
const events = [];
pattern.steps.forEach((isActive, stepIndex) => {
if (isActive) {
const time = stepIndex * stepInterval;
events.push(time);
}
});
const part = new Tone.Part((time) => {
new Tone.Player(trackBuffer)
.connect(volume)
.start(time);
}, events);
return part;
})
.filter((t) => t !== null);
offlineTracksParts.forEach((part) => {
part.start(0);
});
Tone.Transport.bpm.value = bpm;
Tone.Transport.start();
}, duration);
const blob = bufferToWave(buffer);
return blob;
}
// =========================================================================
// FUNÇÃO UTILITÁRIA: Converte AudioBuffer para Blob WAV (MANTIDO ORIGINAL)
// =========================================================================
function bufferToWave(abuffer) {
let numOfChan = abuffer.numberOfChannels;
let length = abuffer.length * numOfChan * 2 + 44;
let buffer = new ArrayBuffer(length);
let view = new DataView(buffer);
let channels = [],
i,
sample;
let offset = 0;
let pos = 0;
function setAll(data) {
for (i = 0; i < data.length; i++) {
view.setUint8(pos + i, data[i]);
}
pos += data.length;
}
function setString(s) {
setAll(s.split("").map((c) => c.charCodeAt(0)));
}
setString("RIFF");
view.setUint32(pos, length - 8, true);
pos += 4;
setString("WAVE");
setString("fmt ");
view.setUint32(pos, 16, true);
pos += 4;
view.setUint16(pos, 1, true);
pos += 2;
view.setUint16(pos, numOfChan, true);
pos += 2;
view.setUint32(pos, abuffer.sampleRate, true);
pos += 4;
view.setUint32(pos, abuffer.sampleRate * 2 * numOfChan, true);
pos += 4;
view.setUint16(pos, numOfChan * 2, true);
pos += 2;
view.setUint16(pos, 16, true);
pos += 2;
setString("data");
view.setUint32(pos, length - 44, true);
pos += 4;
for (i = 0; i < numOfChan; i++) {
channels.push(abuffer.getChannelData(i));
}
for (i = 0; i < abuffer.length; i++) {
for (let j = 0; j < numOfChan; j++) {
sample = Math.max(-1, Math.min(1, channels[j][i]));
sample = (0.5 + sample * 32767.5) | 0;
view.setInt16(pos, sample, true);
pos += 2;
}
}
return new Blob([buffer], { type: "audio/wav" });
}