685 lines
22 KiB
JavaScript
Executable File
685 lines
22 KiB
JavaScript
Executable File
// js/pattern_audio.js
|
|
import * as Tone from "https://esm.sh/tone";
|
|
|
|
import { appState } from "../state.js";
|
|
import { highlightStep } from "./pattern_ui.js";
|
|
import { getTotalSteps } from "../utils.js";
|
|
import { initializeAudioContext } from "../audio.js";
|
|
|
|
import { TripleOscillator } from "../../audio/plugins/TripleOscillator.js";
|
|
import { Nes } from "../../audio/plugins/Nes.js";
|
|
import { SuperSaw } from "../../audio/plugins/SuperSaw.js";
|
|
import { Lb302 } from "../../audio/plugins/Lb302.js";
|
|
import { Kicker } from "../../audio/plugins/Kicker.js";
|
|
|
|
const TICKS_PER_STEP = 12; // LMMS: 12 ticks por 1/16
|
|
const STEPS_PER_BAR = 16; // 4/4 em 1/16
|
|
|
|
// Mapa para facilitar a criação dinâmica
|
|
const PLUGIN_CLASSES = {
|
|
tripleoscillator: TripleOscillator,
|
|
nes: Nes,
|
|
supersaw: SuperSaw,
|
|
lb302: Lb302,
|
|
kicker: Kicker,
|
|
};
|
|
|
|
const timerDisplay = document.getElementById("timer-display");
|
|
|
|
// Variável para armazenar as "Parts" (sequências melódicas) do Tone.js
|
|
let activeParts = [];
|
|
|
|
function formatTime(milliseconds) {
|
|
const totalSeconds = Math.floor(milliseconds / 1000);
|
|
const minutes = Math.floor(totalSeconds / 60)
|
|
.toString()
|
|
.padStart(2, "0");
|
|
const seconds = (totalSeconds % 60).toString().padStart(2, "0");
|
|
const centiseconds = Math.floor((milliseconds % 1000) / 10)
|
|
.toString()
|
|
.padStart(2, "0");
|
|
return `${minutes}:${seconds}:${centiseconds}`;
|
|
}
|
|
|
|
export function playMetronomeSound(isDownbeat) {
|
|
initializeAudioContext();
|
|
const synth = new Tone.Synth().toDestination();
|
|
const freq = isDownbeat ? 1000 : 800;
|
|
synth.triggerAttackRelease(freq, "8n", Tone.now());
|
|
}
|
|
|
|
// Dispara o sample de uma track, garantindo que o player esteja roteado corretamente
|
|
export function playSample(filePath, trackId) {
|
|
initializeAudioContext();
|
|
const track = trackId
|
|
? appState.pattern.tracks.find((t) => t.id == trackId)
|
|
: null;
|
|
|
|
// Se a faixa existe e tem um player pré-carregado
|
|
if (track && track.player) {
|
|
if (track.player.loaded) {
|
|
// Ajusta volume/pan sempre que tocar (robustez a alterações em tempo real)
|
|
if (track.volumeNode) {
|
|
track.volumeNode.volume.value =
|
|
track.volume === 0 ? -Infinity : Tone.gainToDb(track.volume);
|
|
}
|
|
if (track.pannerNode) {
|
|
track.pannerNode.pan.value = track.pan ?? 0;
|
|
}
|
|
|
|
// Garante conexão: player -> volumeNode
|
|
try {
|
|
track.player.disconnect();
|
|
} catch {}
|
|
if (track.volumeNode) {
|
|
track.player.connect(track.volumeNode);
|
|
}
|
|
|
|
// Dispara imediatamente
|
|
track.player.start(Tone.now());
|
|
} else {
|
|
console.warn(
|
|
`Player da trilha "${track.name}" ainda não carregado — pulando este tick.`
|
|
);
|
|
}
|
|
}
|
|
// Fallback para preview de sample sem trackId
|
|
else if (!trackId && filePath) {
|
|
const previewPlayer = new Tone.Player(filePath).toDestination();
|
|
previewPlayer.autostart = true;
|
|
}
|
|
}
|
|
|
|
function tick() {
|
|
if (!appState.global.isPlaying) {
|
|
stopPlayback();
|
|
return;
|
|
}
|
|
|
|
const totalSteps = getTotalSteps();
|
|
const lastStepIndex =
|
|
appState.global.currentStep === 0
|
|
? totalSteps - 1
|
|
: appState.global.currentStep - 1;
|
|
highlightStep(lastStepIndex, false);
|
|
|
|
const bpm = parseInt(document.getElementById("bpm-input").value, 10) || 120;
|
|
const stepInterval = (60 * 1000) / (bpm * 4);
|
|
const currentTime = appState.global.currentStep * stepInterval;
|
|
if (timerDisplay) {
|
|
timerDisplay.textContent = formatTime(currentTime);
|
|
}
|
|
|
|
// Metrônomo
|
|
if (appState.global.metronomeEnabled) {
|
|
const noteValue =
|
|
parseInt(document.getElementById("compasso-b-input").value, 10) || 4;
|
|
const stepsPerBeat = 16 / noteValue;
|
|
if (appState.global.currentStep % stepsPerBeat === 0) {
|
|
playMetronomeSound(
|
|
appState.global.currentStep % (stepsPerBeat * 4) === 0
|
|
);
|
|
}
|
|
}
|
|
|
|
// PERCORRE AS TRACKS
|
|
appState.pattern.tracks.forEach((track) => {
|
|
if (track.muted) return;
|
|
if (!track.patterns || track.patterns.length === 0) return;
|
|
|
|
const activePattern = track.patterns[track.activePatternIndex];
|
|
if (!activePattern) return;
|
|
|
|
// Verifica se o step atual está ativo
|
|
if (activePattern.steps[appState.global.currentStep]) {
|
|
// CASO 1: SAMPLER (Sempre toca no step)
|
|
if (track.samplePath) {
|
|
playSample(track.samplePath, track.id);
|
|
}
|
|
// CASO 2: PLUGIN (Sintetizador)
|
|
else if (track.type === "plugin" && track.instrument) {
|
|
// --- CORREÇÃO DO SOM DUPLICADO ---
|
|
// Verifica se existem notas no Piano Roll.
|
|
// Se houver notas (array notes > 0), IGNORA o step sequencer.
|
|
// O som será gerado APENAS pelo 'schedulePianoRoll'.
|
|
|
|
const hasNotes = activePattern.notes && activePattern.notes.length > 0;
|
|
|
|
if (!hasNotes) {
|
|
// Só toca o C5 do step se NÃO houver melodia desenhada
|
|
try {
|
|
track.instrument.triggerAttackRelease("C5", "16n", Tone.now());
|
|
} catch (e) {}
|
|
}
|
|
}
|
|
}
|
|
});
|
|
|
|
highlightStep(appState.global.currentStep, true);
|
|
appState.global.currentStep = (appState.global.currentStep + 1) % totalSteps;
|
|
}
|
|
|
|
export function startPlayback() {
|
|
if (appState.global.isPlaying || appState.pattern.tracks.length === 0) return;
|
|
initializeAudioContext();
|
|
|
|
// Garante que o contexto do Tone esteja rodando
|
|
if (Tone.context.state !== "running") {
|
|
Tone.start();
|
|
}
|
|
|
|
if (appState.global.currentStep === 0) {
|
|
rewindPlayback();
|
|
}
|
|
|
|
const bpm = parseInt(document.getElementById("bpm-input").value, 10) || 120;
|
|
Tone.Transport.bpm.value = bpm;
|
|
const stepInterval = (60 * 1000) / (bpm * 4);
|
|
|
|
if (appState.global.playbackIntervalId)
|
|
clearInterval(appState.global.playbackIntervalId);
|
|
|
|
// --- NOVO: Agenda o Piano Roll (Melodias) ---
|
|
schedulePianoRoll();
|
|
Tone.Transport.start(); // Inicia o relógio para as notas melódicas
|
|
// --------------------------------------------
|
|
|
|
appState.global.isPlaying = true;
|
|
const playBtn = document.getElementById("play-btn");
|
|
if (playBtn) {
|
|
playBtn.classList.remove("fa-play");
|
|
playBtn.classList.add("fa-pause");
|
|
}
|
|
|
|
tick();
|
|
appState.global.playbackIntervalId = setInterval(tick, stepInterval);
|
|
}
|
|
|
|
export function stopPlayback() {
|
|
if (appState.global.playbackIntervalId) {
|
|
clearInterval(appState.global.playbackIntervalId);
|
|
}
|
|
appState.global.playbackIntervalId = null;
|
|
appState.global.isPlaying = false;
|
|
|
|
// --- NOVO: Para o Transport e Limpa Synths ---
|
|
Tone.Transport.stop();
|
|
|
|
// Limpa agendamentos melódicos
|
|
activeParts.forEach((part) => part.dispose());
|
|
activeParts = [];
|
|
|
|
// Solta notas travadas de todos os plugins
|
|
appState.pattern.tracks.forEach((track) => {
|
|
try {
|
|
track.player?.stop();
|
|
} catch {}
|
|
try {
|
|
track.instrument?.releaseAll?.();
|
|
} catch {} // Para PolySynths
|
|
try {
|
|
track.instrument?.triggerRelease?.();
|
|
} catch {} // Para MonoSynths
|
|
});
|
|
// --------------------------------------------
|
|
|
|
document
|
|
.querySelectorAll(".step.playing")
|
|
.forEach((s) => s.classList.remove("playing"));
|
|
appState.global.currentStep = 0;
|
|
if (timerDisplay) timerDisplay.textContent = "00:00:00";
|
|
|
|
const playBtn = document.getElementById("play-btn");
|
|
if (playBtn) {
|
|
playBtn.classList.remove("fa-pause");
|
|
playBtn.classList.add("fa-play");
|
|
}
|
|
}
|
|
|
|
export function rewindPlayback() {
|
|
const lastStep =
|
|
appState.global.currentStep > 0
|
|
? appState.global.currentStep - 1
|
|
: getTotalSteps() - 1;
|
|
appState.global.currentStep = 0;
|
|
|
|
Tone.Transport.position = 0; // Reseta o tempo do Tone.js
|
|
|
|
if (!appState.global.isPlaying) {
|
|
if (timerDisplay) timerDisplay.textContent = "00:00:00";
|
|
highlightStep(lastStep, false);
|
|
}
|
|
}
|
|
|
|
export function togglePlayback() {
|
|
initializeAudioContext();
|
|
if (appState.global.isPlaying) {
|
|
stopPlayback();
|
|
} else {
|
|
appState.global.currentStep = 0;
|
|
startPlayback();
|
|
}
|
|
}
|
|
|
|
// 2. Agendador de Piano Roll (Melodia)
|
|
function schedulePianoRoll() {
|
|
activeParts.forEach((part) => part.dispose());
|
|
activeParts = [];
|
|
|
|
appState.pattern.tracks.forEach((track) => {
|
|
if (track.muted) return;
|
|
const pattern = track.patterns[track.activePatternIndex];
|
|
|
|
if (
|
|
pattern &&
|
|
pattern.notes &&
|
|
pattern.notes.length > 0 &&
|
|
track.instrument
|
|
) {
|
|
// Converte notas para eventos Tone.js
|
|
const bpm = parseInt(document.getElementById("bpm-input")?.value, 10) || 120;
|
|
const stepSec = 60 / (bpm * 4); // 1/16
|
|
|
|
const events = pattern.notes.map((note) => {
|
|
const posSteps = (note.pos || 0) / TICKS_PER_STEP;
|
|
|
|
const rawLen = note.len || 0;
|
|
const lenTicks = rawLen < 0 ? TICKS_PER_STEP : rawLen; // defesa extra
|
|
const lenSteps = Math.max(1, lenTicks / TICKS_PER_STEP);
|
|
|
|
return {
|
|
time: posSteps * stepSec, // segundos
|
|
midi: note.key,
|
|
duration: lenSteps * stepSec, // segundos
|
|
velocity: (note.vol || 100) / 100,
|
|
};
|
|
});
|
|
|
|
|
|
const part = new Tone.Part((time, value) => {
|
|
if (track.muted) return;
|
|
const freq = Tone.Frequency(value.midi, "midi");
|
|
|
|
// Dispara nota
|
|
if (track.instrument.triggerAttackRelease) {
|
|
// Se a duração calculada for muito curta ou inválida, usa 16n
|
|
const dur = value.duration || "16n";
|
|
track.instrument.triggerAttackRelease(
|
|
freq,
|
|
dur,
|
|
time,
|
|
value.velocity
|
|
);
|
|
}
|
|
}, events).start(0);
|
|
|
|
// Loop deve cobrir toda a extensão do pianoroll (última nota)
|
|
const barsInput =
|
|
parseInt(document.getElementById("bars-input")?.value || 1, 10) || 1;
|
|
|
|
let maxEndTick = 0;
|
|
|
|
for (const n of pattern.notes) {
|
|
const pos = Number(n.pos) || 0;
|
|
const rawLen = Number(n.len) || 0;
|
|
|
|
// len negativo acontece em alguns casos (one-shot/edge do LMMS)
|
|
const lenTicks = rawLen < 0 ? TICKS_PER_STEP : rawLen;
|
|
|
|
// garante no mínimo 1 step
|
|
const endTick = pos + Math.max(lenTicks, TICKS_PER_STEP);
|
|
if (endTick > maxEndTick) maxEndTick = endTick;
|
|
}
|
|
|
|
const stepsNeeded = Math.max(1, Math.ceil(maxEndTick / TICKS_PER_STEP));
|
|
const barsNeeded = Math.max(1, Math.ceil(stepsNeeded / STEPS_PER_BAR));
|
|
|
|
// respeita o bars-input se o usuário colocar maior, mas nunca menor que o necessário
|
|
const loopBars = Math.max(barsInput, barsNeeded);
|
|
|
|
part.loop = true;
|
|
part.loopEnd = `${loopBars}m`;
|
|
|
|
}
|
|
});
|
|
}
|
|
|
|
// =========================================================================
|
|
// Renderizar o Pattern atual para um Blob de Áudio
|
|
// =========================================================================
|
|
|
|
export async function renderActivePatternToBlob() {
|
|
initializeAudioContext();
|
|
|
|
const bpm = parseInt(document.getElementById("bpm-input").value, 10) || 120;
|
|
|
|
// =========================================================
|
|
// 1. CÁLCULO DE DURAÇÃO INTELIGENTE
|
|
// =========================================================
|
|
const stepInterval = 60 / (bpm * 4);
|
|
const activePatternIndex =
|
|
appState.pattern.tracks[0]?.activePatternIndex || 0;
|
|
let maxStepFound = getTotalSteps(); // Mínimo: tamanho da tela
|
|
|
|
// Varre todas as tracks para achar a última nota ou step
|
|
appState.pattern.tracks.forEach((track) => {
|
|
const p = track.patterns[activePatternIndex];
|
|
if (!p) return;
|
|
|
|
// A. Steps (Bateria)
|
|
if (p.steps && p.steps.includes(true)) {
|
|
const lastIdx = p.steps.lastIndexOf(true);
|
|
if (lastIdx + 1 > maxStepFound) maxStepFound = lastIdx + 1;
|
|
}
|
|
|
|
// B. Notas (Piano Roll) - Assumindo 192 ticks/beat e steps de 1/16 (48 ticks)
|
|
if (p.notes && p.notes.length > 0) {
|
|
p.notes.forEach((n) => {
|
|
const endTick = n.pos + n.len;
|
|
const endStep = Math.ceil(endTick / 48);
|
|
if (endStep > maxStepFound) maxStepFound = endStep;
|
|
});
|
|
}
|
|
});
|
|
|
|
// Arredonda para o próximo compasso cheio (múltiplo de 16)
|
|
const stepsPerBar = 16;
|
|
const totalSteps = Math.ceil(maxStepFound / stepsPerBar) * stepsPerBar;
|
|
const duration = totalSteps * stepInterval;
|
|
|
|
// =========================================================
|
|
// 2. RENDERIZAÇÃO OFFLINE
|
|
// =========================================================
|
|
const buffer = await Tone.Offline(async ({ transport }) => {
|
|
const masterGain = new Tone.Gain().toDestination();
|
|
|
|
// Loop por cada trilha do projeto
|
|
appState.pattern.tracks.forEach((track) => {
|
|
const pattern = track.patterns[activePatternIndex];
|
|
// Se não tem pattern, ou se é uma track muda/vazia, pula
|
|
if (!pattern || track.muted) return;
|
|
|
|
// Verifica se tem conteúdo (buffer de áudio OU notas MIDI OU steps ativos)
|
|
const hasAudio = track.buffer;
|
|
const hasNotes = pattern.notes && pattern.notes.length > 0;
|
|
const hasSteps = pattern.steps && pattern.steps.includes(true);
|
|
|
|
if (!hasAudio && !hasNotes && !hasSteps) return;
|
|
|
|
// Cria canal de volume/pan para essa track no mundo Offline
|
|
const panner = new Tone.Panner(track.pan || 0).connect(masterGain);
|
|
const volume = new Tone.Volume(
|
|
track.volume === 0 ? -100 : Tone.gainToDb(track.volume)
|
|
).connect(panner);
|
|
|
|
// --- CENÁRIO A: É um SAMPLE (Áudio gravado) ---
|
|
if (track.samplePath && track.buffer) {
|
|
// Lógica original de steps para samples
|
|
if (pattern.steps) {
|
|
const events = [];
|
|
pattern.steps.forEach((isActive, stepIndex) => {
|
|
if (isActive) events.push(stepIndex * stepInterval);
|
|
});
|
|
|
|
if (events.length > 0) {
|
|
new Tone.Part((time) => {
|
|
new Tone.Player(track.buffer).connect(volume).start(time);
|
|
}, events).start(0);
|
|
}
|
|
}
|
|
}
|
|
|
|
// --- CENÁRIO B: É um PLUGIN (Sintetizador) ---
|
|
else if (track.type === "plugin") {
|
|
// Normaliza o nome (ex: "TripleOscillator" -> "tripleoscillator")
|
|
// Tenta pegar o nome da propriedade 'pluginName', 'instrument.name' ou do próprio objeto params
|
|
const pluginName = (
|
|
track.pluginName ||
|
|
track.instrument?.constructor?.name ||
|
|
""
|
|
).toLowerCase();
|
|
|
|
const PluginClass = PLUGIN_CLASSES[pluginName];
|
|
|
|
if (PluginClass) {
|
|
// INSTANCIA O PLUGIN NO MUNDO OFFLINE
|
|
// Passamos 'track.params' ou 'track.pluginData' (ajuste conforme seu appState salva os dados)
|
|
const instrumentInstance = new PluginClass(
|
|
null,
|
|
track.params || track.pluginData || {}
|
|
);
|
|
|
|
// Conecta na cadeia de áudio offline
|
|
instrumentInstance.connect(volume);
|
|
|
|
// 1. Agendar Notas do Piano Roll
|
|
if (hasNotes) {
|
|
const events = pattern.notes.map((note) => ({
|
|
time: 0 + note.pos * (48 / 192) * stepInterval, // Conversão aproximada Ticks -> Segundos
|
|
// Se quiser precisão exata do Tone, use: note.pos * (Tone.Transport.PPQ / 192) / Tone.Transport.PPQ
|
|
midi: note.key,
|
|
duration: (note.len / 192) * (60 / bpm), // Duração em segundos
|
|
velocity: (note.vol || 100) / 100,
|
|
}));
|
|
|
|
new Tone.Part((time, val) => {
|
|
const freq = Tone.Frequency(val.midi, "midi");
|
|
instrumentInstance.triggerAttackRelease(freq, val.duration, time);
|
|
}, events).start(0);
|
|
}
|
|
|
|
// 2. Agendar Steps (Caso use o TripleOscillator como bateria/efeito no step sequencer)
|
|
else if (hasSteps) {
|
|
const stepEvents = [];
|
|
pattern.steps.forEach((isActive, idx) => {
|
|
if (isActive) stepEvents.push(idx * stepInterval);
|
|
});
|
|
|
|
new Tone.Part((time) => {
|
|
// Toca um C5 padrão para steps sem nota definida
|
|
instrumentInstance.triggerAttackRelease(
|
|
Tone.Frequency("C5"),
|
|
0.1,
|
|
time
|
|
);
|
|
}, stepEvents).start(0);
|
|
}
|
|
} else {
|
|
console.warn(
|
|
`Render: Plugin não suportado ou não encontrado: ${pluginName}`
|
|
);
|
|
}
|
|
}
|
|
});
|
|
|
|
// Configura e inicia o Transport Offline
|
|
transport.bpm.value = bpm;
|
|
transport.start();
|
|
}, duration);
|
|
|
|
const blob = bufferToWave(buffer);
|
|
return blob;
|
|
}
|
|
|
|
// =========================================================================
|
|
// FUNÇÃO UTILITÁRIA: Converte AudioBuffer para Blob WAV
|
|
// =========================================================================
|
|
|
|
function bufferToWave(abuffer) {
|
|
let numOfChan = abuffer.numberOfChannels;
|
|
let length = abuffer.length * numOfChan * 2 + 44;
|
|
let buffer = new ArrayBuffer(length);
|
|
let view = new DataView(buffer);
|
|
let channels = [],
|
|
i,
|
|
sample;
|
|
let offset = 0;
|
|
let pos = 0;
|
|
|
|
function setAll(data) {
|
|
for (i = 0; i < data.length; i++) {
|
|
view.setUint8(pos + i, data[i]);
|
|
}
|
|
pos += data.length;
|
|
}
|
|
function setString(s) {
|
|
setAll(s.split("").map((c) => c.charCodeAt(0)));
|
|
}
|
|
|
|
setString("RIFF");
|
|
view.setUint32(pos, length - 8, true);
|
|
pos += 4;
|
|
setString("WAVE");
|
|
setString("fmt ");
|
|
view.setUint32(pos, 16, true);
|
|
pos += 4;
|
|
view.setUint16(pos, 1, true);
|
|
pos += 2;
|
|
view.setUint16(pos, numOfChan, true);
|
|
pos += 2;
|
|
view.setUint32(pos, abuffer.sampleRate, true);
|
|
pos += 4;
|
|
view.setUint32(pos, abuffer.sampleRate * 2 * numOfChan, true);
|
|
pos += 4;
|
|
view.setUint16(pos, numOfChan * 2, true);
|
|
pos += 2;
|
|
view.setUint16(pos, 16, true);
|
|
pos += 2;
|
|
setString("data");
|
|
view.setUint32(pos, length - 44, true);
|
|
pos += 4;
|
|
|
|
for (i = 0; i < numOfChan; i++) {
|
|
channels.push(abuffer.getChannelData(i));
|
|
}
|
|
|
|
for (i = 0; i < abuffer.length; i++) {
|
|
for (let j = 0; j < numOfChan; j++) {
|
|
sample = Math.max(-1, Math.min(1, channels[j][i]));
|
|
sample = (0.5 + sample * 32767.5) | 0;
|
|
view.setInt16(pos, sample, true);
|
|
pos += 2;
|
|
}
|
|
}
|
|
|
|
return new Blob([buffer], { type: "audio/wav" });
|
|
}
|
|
|
|
// ===============================
|
|
// Song/Playlist Pattern Scheduler
|
|
// (toca patterns arranjadas na Playlist)
|
|
// ===============================
|
|
|
|
const LMMS_TICKS_PER_STEP = 12;
|
|
let songPatternScheduleId = null;
|
|
|
|
export function startSongPatternPlaybackOnTransport() {
|
|
initializeAudioContext();
|
|
if (songPatternScheduleId !== null) return;
|
|
|
|
songPatternScheduleId = Tone.Transport.scheduleRepeat((time) => {
|
|
// bpm atual
|
|
const bpm = parseInt(document.getElementById("bpm-input")?.value, 10) || 120;
|
|
const stepIntervalSec = 60 / (bpm * 4);
|
|
|
|
// step absoluto do song (considera seek do Transport)
|
|
const songStep = Math.floor(Tone.Transport.seconds / stepIntervalSec + 1e-6);
|
|
const songTick = songStep * LMMS_TICKS_PER_STEP;
|
|
|
|
// quais patterns (colunas) estão ativas neste tick?
|
|
const basslineTracks = appState.pattern.tracks.filter(
|
|
(t) => t.type === "bassline" && !t.isMuted
|
|
);
|
|
|
|
const activePatternHits = [];
|
|
for (const b of basslineTracks) {
|
|
const clips = b.playlist_clips || [];
|
|
const clip = clips.find((c) => songTick >= c.pos && songTick < c.pos + c.len);
|
|
if (!clip) continue;
|
|
|
|
const localStep = Math.floor((songTick - clip.pos) / LMMS_TICKS_PER_STEP);
|
|
activePatternHits.push({ patternIndex: b.patternIndex, localStep });
|
|
}
|
|
|
|
if (activePatternHits.length === 0) return;
|
|
|
|
// dispara instrumentos reais (samplers/plugins)
|
|
for (const track of appState.pattern.tracks) {
|
|
if (track.type === "bassline") continue;
|
|
if (track.muted) continue;
|
|
|
|
for (const hit of activePatternHits) {
|
|
const patt = track.patterns?.[hit.patternIndex];
|
|
if (!patt?.steps) continue;
|
|
|
|
// 👇 ADD
|
|
const pattLen = patt.steps.length;
|
|
const stepInPattern = pattLen > 0 ? (hit.localStep % pattLen) : hit.localStep;
|
|
|
|
// ✅ 1) PLUGIN com piano roll (notes)
|
|
if (
|
|
track.type === "plugin" &&
|
|
track.instrument &&
|
|
Array.isArray(patt.notes) &&
|
|
patt.notes.length > 0
|
|
) {
|
|
const stepStartTick = hit.localStep * LMMS_TICKS_PER_STEP;
|
|
const stepEndTick = stepStartTick + LMMS_TICKS_PER_STEP;
|
|
|
|
for (const n of patt.notes) {
|
|
const nPos = Number(n.pos) || 0;
|
|
if (nPos < stepStartTick || nPos >= stepEndTick) continue;
|
|
|
|
const offsetTicks = nPos - stepStartTick;
|
|
const t2 = time + ticksToSec(offsetTicks, stepIntervalSec);
|
|
|
|
const lenTicks = Math.max(1, Number(n.len) || LMMS_TICKS_PER_STEP);
|
|
const durSec = Math.max(0.01, ticksToSec(lenTicks, stepIntervalSec));
|
|
|
|
const vel = Math.max(0, Math.min(1, (Number(n.vol) || 100) / 100));
|
|
const noteName = midiToNoteName(n.key);
|
|
|
|
try {
|
|
track.instrument.triggerAttackRelease(noteName, durSec, t2, vel);
|
|
} catch {
|
|
try {
|
|
track.instrument.triggerAttackRelease(noteName, durSec, t2);
|
|
} catch {}
|
|
}
|
|
}
|
|
|
|
continue; // 👈 importante: não cair na lógica de steps abaixo
|
|
}
|
|
|
|
// ✅ 2) Lógica antiga de STEP (sampler / plugin sem notes)
|
|
if (!patt.steps) continue;
|
|
|
|
if (patt.steps[stepInPattern]) {
|
|
if (track.type === "sampler" && track.player) {
|
|
track.player.restart = true;
|
|
try {
|
|
// Verifica se já está tocando. Se sim, para imediatamente antes de recomeçar.
|
|
if (track.player.state === "started") {
|
|
track.player.stop();
|
|
}
|
|
track.player.start(time); }
|
|
catch {}
|
|
} else if (track.type === "plugin" && track.instrument) {
|
|
// plugin sem piano roll
|
|
try { track.instrument.triggerAttackRelease("C5", "16n", time); } catch {}
|
|
}
|
|
}
|
|
|
|
}
|
|
}
|
|
}, "16n");
|
|
}
|
|
|
|
export function stopSongPatternPlaybackOnTransport() {
|
|
if (songPatternScheduleId === null) return;
|
|
try {
|
|
Tone.Transport.clear(songPatternScheduleId);
|
|
} catch {}
|
|
songPatternScheduleId = null;
|
|
}
|