diff --git a/assets/js/creations/audio/audio_audio.js b/assets/js/creations/audio/audio_audio.js index 9e306f0f..e2c350a2 100755 --- a/assets/js/creations/audio/audio_audio.js +++ b/assets/js/creations/audio/audio_audio.js @@ -10,6 +10,11 @@ import { getAudioContext, getMainGainNode, } from "../audio.js"; +import { + startSongPatternPlaybackOnTransport, + stopSongPatternPlaybackOnTransport, +} from "../pattern/pattern_audio.js"; + import { getPixelsPerSecond } from "../utils.js"; // 🔊 ADIÇÃO: usar a MESMA instância do Tone que o projeto usa import * as Tone from "https://esm.sh/tone"; @@ -313,10 +318,6 @@ export async function startAudioEditorPlayback(seekTime) { // alinhamento de relógio próprio (mantido para o seu scheduler) startTime = audioCtx.currentTime; - // ================================================================= - // 👇 INÍCIO DA CORREÇÃO (Bugs 1 & 2) - // ================================================================= - // 1. Determine o tempo de início: let timeToStart = seekTime !== null && seekTime !== undefined && !isNaN(seekTime) @@ -334,16 +335,13 @@ export async function startAudioEditorPlayback(seekTime) { Tone.Transport.seconds = timeToStart; // 👈 Usa o tempo sincronizado } catch {} - // ================================================================= - // 👆 FIM DA CORREÇÃO - // ================================================================= - updateTransportLoop(); console.log("%cIniciando Playback...", "color: #3498db;"); // inicia o Transport (para disparar os Players .sync()) try { + startSongPatternPlaybackOnTransport(); Tone.Transport.start(); } catch {} @@ -366,6 +364,7 @@ export function stopAudioEditorPlayback(rewind = false) { // para o Transport (para Players .sync()) try { + stopSongPatternPlaybackOnTransport(); Tone.Transport.stop(); } catch {} diff --git a/assets/js/creations/file.js b/assets/js/creations/file.js index 41395c92..c993bbff 100755 --- a/assets/js/creations/file.js +++ b/assets/js/creations/file.js @@ -520,7 +520,7 @@ export async function parseMmpContent(xmlString) { const firstInst = newTracks.find((t) => t.type !== "bassline"); appState.pattern.activeTrackId = firstInst ? firstInst.id : null; - //appState.pattern.activePatternIndex = 0; + appState.pattern.activePatternIndex = 0; loadStateFromSession(); diff --git a/assets/js/creations/pattern/pattern_audio.js b/assets/js/creations/pattern/pattern_audio.js index ff21bce6..68f273eb 100755 --- a/assets/js/creations/pattern/pattern_audio.js +++ b/assets/js/creations/pattern/pattern_audio.js @@ -320,7 +320,7 @@ function schedulePianoRoll() { } // ========================================================================= -// Renderizar o Pattern atual para um Blob de Áudio (MANTIDO ORIGINAL) +// Renderizar o Pattern atual para um Blob de Áudio // ========================================================================= export async function renderActivePatternToBlob() { @@ -477,7 +477,7 @@ export async function renderActivePatternToBlob() { } // ========================================================================= -// FUNÇÃO UTILITÁRIA: Converte AudioBuffer para Blob WAV (MANTIDO ORIGINAL) +// FUNÇÃO UTILITÁRIA: Converte AudioBuffer para Blob WAV // ========================================================================= function bufferToWave(abuffer) { @@ -539,3 +539,81 @@ function bufferToWave(abuffer) { return new Blob([buffer], { type: "audio/wav" }); } + +// =============================== +// Song/Playlist Pattern Scheduler +// (toca patterns arranjadas na Playlist) +// =============================== + +const LMMS_TICKS_PER_STEP = 12; +let songPatternScheduleId = null; + +export function startSongPatternPlaybackOnTransport() { + initializeAudioContext(); + if (songPatternScheduleId !== null) return; + + songPatternScheduleId = Tone.Transport.scheduleRepeat((time) => { + // bpm atual + const bpm = parseInt(document.getElementById("bpm-input")?.value, 10) || 120; + const stepIntervalSec = 60 / (bpm * 4); + + // step absoluto do song (considera seek do Transport) + const songStep = Math.floor(Tone.Transport.seconds / stepIntervalSec + 1e-6); + const songTick = songStep * LMMS_TICKS_PER_STEP; + + // quais patterns (colunas) estão ativas neste tick? + const basslineTracks = appState.pattern.tracks.filter( + (t) => t.type === "bassline" && !t.isMuted + ); + + const activePatternHits = []; + for (const b of basslineTracks) { + const clips = b.playlist_clips || []; + const clip = clips.find((c) => songTick >= c.pos && songTick < c.pos + c.len); + if (!clip) continue; + + const localStep = Math.floor((songTick - clip.pos) / LMMS_TICKS_PER_STEP); + activePatternHits.push({ patternIndex: b.patternIndex, localStep }); + } + + if (activePatternHits.length === 0) return; + + // dispara instrumentos reais (samplers/plugins) + for (const track of appState.pattern.tracks) { + if (track.type === "bassline") continue; + if (track.muted) continue; + + for (const hit of activePatternHits) { + const patt = track.patterns?.[hit.patternIndex]; + if (!patt?.steps) continue; + + if (patt.steps[hit.localStep]) { + // SAMPLER + if (track.type === "sampler" && track.player) { + track.player.restart = true; // baterias precisam retrigger + try { + track.player.start(time); + } catch {} + } + // PLUGIN (step sem piano roll) + else if (track.type === "plugin" && track.instrument) { + const hasNotes = patt.notes && patt.notes.length > 0; + if (!hasNotes) { + try { + track.instrument.triggerAttackRelease("C5", "16n", time); + } catch {} + } + } + } + } + } + }, "16n"); +} + +export function stopSongPatternPlaybackOnTransport() { + if (songPatternScheduleId === null) return; + try { + Tone.Transport.clear(songPatternScheduleId); + } catch {} + songPatternScheduleId = null; +}