diff --git a/assets/js/creations/audio/audio_audio.js b/assets/js/creations/audio/audio_audio.js index a98f62ec..7e1c88de 100755 --- a/assets/js/creations/audio/audio_audio.js +++ b/assets/js/creations/audio/audio_audio.js @@ -68,10 +68,7 @@ function _convertSecondsToBeat(seconds) { function _initContext() { if (!audioCtx) { initializeAudioContext(); - // ✅ garante que é o AudioContext nativo - audioCtx = Tone.getContext().rawContext; - } else if (audioCtx.rawContext) { - audioCtx = audioCtx.rawContext; + audioCtx = getAudioContext(); // deve ser o rawContext do Tone } } @@ -407,17 +404,16 @@ export async function startAudioEditorPlayback(seekTime) { // garante contexto ativo do Tone (gesto do usuário já ocorreu antes) await Tone.start(); - - const raw = audioCtx?.rawContext || audioCtx; - if (raw?.state === "suspended" && typeof raw.resume === "function") { - await raw.resume(); + if (audioCtx.state === "suspended") { + await audioCtx.resume(); } - audioCtx = raw; - // alinhamento de relógio próprio - startTime = audioCtx.currentTime; + isPlaying = true; appState.global.isAudioEditorPlaying = true; + // alinhamento de relógio próprio (mantido para o seu scheduler) + startTime = audioCtx.currentTime; + // 1. Determine o tempo de início: let timeToStart = seekTime !== null && seekTime !== undefined && !isNaN(seekTime) diff --git a/assets/js/creations/main.js b/assets/js/creations/main.js index 7b31a877..959038c5 100755 --- a/assets/js/creations/main.js +++ b/assets/js/creations/main.js @@ -129,19 +129,6 @@ document.addEventListener("DOMContentLoaded", () => { const removePatternBtn = document.getElementById("remove-pattern-btn"); const downloadPackageBtn = document.getElementById("download-package-btn"); - // Renderizar projeto - const renderAudioBtn = document.getElementById("render-audio-btn"); - - // salvar projeto .mmp (faz mais sentido com o ícone de save) - saveMmpBtn?.addEventListener("click", generateMmpFile); - - // renderizar áudio - renderAudioBtn?.addEventListener("click", () => { - console.log("[UI] Cliquei em render-audio-btn"); - renderProjectAndDownload(); - }); - - // Download projeto downloadPackageBtn?.addEventListener("click", generateMmpFile); @@ -382,12 +369,13 @@ document.addEventListener("DOMContentLoaded", () => { if (file) handleFileLoad(file).then(() => closeOpenProjectModal()); }); uploadSampleBtn?.addEventListener("click", () => sampleFileInput?.click()); + saveMmpBtn?.addEventListener("click", renderProjectAndDownload); addInstrumentBtn?.addEventListener("click", () => { initializeAudioContext(); sendAction({ type: "ADD_TRACK" }); }); - + removeInstrumentBtn?.addEventListener("click", () => { initializeAudioContext(); diff --git a/assets/js/creations/pattern/pattern_audio.js b/assets/js/creations/pattern/pattern_audio.js index b7bce8e5..c479d21c 100755 --- a/assets/js/creations/pattern/pattern_audio.js +++ b/assets/js/creations/pattern/pattern_audio.js @@ -211,7 +211,7 @@ function getSongInstrument(track, patternIndex) { } const mix = refreshSongMixFor(track, patternIndex); - const inst = new Cls(Tone.getContext(), track.params || track.pluginData || {}); + const inst = new Cls(null, track.params || track.pluginData || {}); inst.connect(mix.instVol); track._songInstrument[patternIndex] = inst; @@ -447,11 +447,11 @@ export async function renderActivePatternToBlob() { if (lastIdx + 1 > maxStepFound) maxStepFound = lastIdx + 1; } - // B. Notas (Piano Roll) - Assumindo 192 ticks/beat e steps de 1/16 (12 ticks) + // B. Notas (Piano Roll) - Assumindo 192 ticks/beat e steps de 1/16 (48 ticks) if (p.notes && p.notes.length > 0) { p.notes.forEach((n) => { const endTick = n.pos + n.len; - const endStep = Math.ceil(endTick / 12); + const endStep = Math.ceil(endTick / 48); if (endStep > maxStepFound) maxStepFound = endStep; }); } @@ -466,9 +466,7 @@ export async function renderActivePatternToBlob() { // 2. RENDERIZAÇÃO OFFLINE // ========================================================= const buffer = await Tone.Offline(async ({ transport }) => { - const rawCtx = Tone.getContext().rawContext; - const masterGain = new Tone.Gain(1); - masterGain.connect(rawCtx.destination); + const masterGain = new Tone.Gain().toDestination(); // Loop por cada trilha do projeto appState.pattern.tracks.forEach((track) => { @@ -500,8 +498,7 @@ export async function renderActivePatternToBlob() { if (events.length > 0) { new Tone.Part((time) => { - const buf = track.buffer?.get?.() || track.buffer; // ✅ pega AudioBuffer se for ToneAudioBuffer - new Tone.Player(buf).connect(volume).start(time); + new Tone.Player(track.buffer).connect(volume).start(time); }, events).start(0); } } @@ -522,27 +519,27 @@ export async function renderActivePatternToBlob() { if (PluginClass) { // INSTANCIA O PLUGIN NO MUNDO OFFLINE // Passamos 'track.params' ou 'track.pluginData' (ajuste conforme seu appState salva os dados) - const instrumentInstance = new PluginClass(Tone.getContext(), track.params || track.pluginData || {}); + const instrumentInstance = new PluginClass( + null, + track.params || track.pluginData || {} + ); // Conecta na cadeia de áudio offline instrumentInstance.connect(volume); // 1. Agendar Notas do Piano Roll if (hasNotes) { - const TICKS_PER_BEAT = 192; - const SECONDS_PER_BEAT = 60 / bpm; - const TICKS_PER_STEP = 12; // ✅ 12 ticks por 1/16 const events = pattern.notes.map((note) => ({ - time: (note.pos / TICKS_PER_STEP) * stepInterval, + time: 0 + note.pos * (48 / 192) * stepInterval, // Conversão aproximada Ticks -> Segundos + // Se quiser precisão exata do Tone, use: note.pos * (Tone.Transport.PPQ / 192) / Tone.Transport.PPQ midi: note.key, - duration: (note.len / TICKS_PER_STEP) * stepInterval, + duration: (note.len / 192) * (60 / bpm), // Duração em segundos velocity: (note.vol || 100) / 100, })); - new Tone.Part((time, val) => { - const freq = Tone.Frequency(val.midi, "midi").toFrequency(); - instrumentInstance.triggerAttackRelease(freq, val.duration, time, val.velocity); + const freq = Tone.Frequency(val.midi, "midi"); + instrumentInstance.triggerAttackRelease(freq, val.duration, time); }, events).start(0); } @@ -555,7 +552,11 @@ export async function renderActivePatternToBlob() { new Tone.Part((time) => { // Toca um C5 padrão para steps sem nota definida - instrumentInstance.triggerAttackRelease("C5", 0.1, time); + instrumentInstance.triggerAttackRelease( + Tone.Frequency("C5"), + 0.1, + time + ); }, stepEvents).start(0); } } else { @@ -571,11 +572,6 @@ export async function renderActivePatternToBlob() { transport.start(); }, duration); - const ch = buffer.getChannelData(0); - let peak = 0; - for (let i = 0; i < ch.length; i++) peak = Math.max(peak, Math.abs(ch[i])); - console.log("[Render] peak =", peak); - const blob = bufferToWave(buffer); return blob; } @@ -1048,64 +1044,31 @@ function _playOneShot(buffer, time, dest, stopTime = null, playbackRate = 1) { export async function renderProjectToBlob({ tailSec = 0.25 } = {}) { initializeAudioContext(); - // (Opcional, mas ajuda em alguns browsers) - try { await Tone.start(); } catch {} - const bpm = parseInt(document.getElementById("bpm-input")?.value, 10) || 120; + const stepSec = _secondsPerStep(bpm); const duration = _projectDurationSeconds(bpm) + Math.max(0, Number(tailSec) || 0); - // ========================================================= - // 1) PRÉ-CARREGA BUFFERS (FORA do Tone.Offline) ✅ - // ========================================================= - const decodeCtx = Tone.getContext().rawContext; - - const urls = new Set(); - - // áudio timeline - for (const clip of (appState.audio?.clips || [])) { - const url = clip.sourcePath || clip.src || clip.url; - if (url) urls.add(String(url)); - } - - // sampler tracks - const samplerTracks = (appState.pattern?.tracks || []).filter( - t => t.type === "sampler" && t.samplePath - ); - for (const t of samplerTracks) { - urls.add(String(t.samplePath)); - } - - const bufByUrl = new Map(); - await Promise.all([...urls].map(async (url) => { - const b = await _fetchAudioBuffer(url, decodeCtx); - if (b) bufByUrl.set(url, b); - })); - - const samplerBufByTrackId = new Map(); - for (const t of samplerTracks) { - samplerBufByTrackId.set(String(t.id), bufByUrl.get(String(t.samplePath)) || null); - } - - console.log("[Render] buffers carregados:", bufByUrl.size); - - // ========================================================= - // 2) OFFLINE RENDER (SEM await aqui dentro) ✅ - // ========================================================= - const buffer = await Tone.Offline(({ transport }) => { + const buffer = await Tone.Offline(async ({ transport }) => { transport.bpm.value = bpm; const rawCtx = Tone.getContext().rawContext; - - // master no contexto OFFLINE - const master = new Tone.Gain(1); - master.connect(rawCtx.destination); - - let scheduledAudio = 0; - let scheduledNotes = 0; - let scheduledSteps = 0; + const master = new Tone.Gain(1).toDestination(); // ------------------------------------------------------------ - // (A) AUDIO TIMELINE + // CACHE de buffers (para não baixar/decodificar repetido) + // ------------------------------------------------------------ + const bufferCache = new Map(); + const getBuf = async (url) => { + const key = String(url || ""); + if (!key) return null; + if (bufferCache.has(key)) return bufferCache.get(key); + const b = await _fetchAudioBuffer(key, rawCtx); + bufferCache.set(key, b); + return b; + }; + + // ------------------------------------------------------------ + // (A) Render do AUDIO TIMELINE (appState.audio.clips) // ------------------------------------------------------------ for (const clip of (appState.audio?.clips || [])) { const muted = !!clip.muted || (_n(clip.volume, 1) <= 0); @@ -1114,7 +1077,7 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) { const url = clip.sourcePath || clip.src || clip.url; if (!url) continue; - const buf = bufByUrl.get(String(url)); + const buf = await getBuf(url); if (!buf) continue; const start = _n(clip.startTimeInSeconds, 0); @@ -1131,8 +1094,7 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) { volNode.connect(panNode); panNode.connect(master); - const player = new Tone.Player(); - player.buffer = buf; // ✅ usa AudioBuffer pré-decodado + const player = new Tone.Player(buf); player.connect(volNode); player.start(start, offset, dur); @@ -1143,18 +1105,19 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) { try { volNode.dispose(); } catch {} try { panNode.dispose(); } catch {} }; - - scheduledAudio++; } // ------------------------------------------------------------ - // (B) PLAYLIST (patterns) + // (B) Render da PLAYLIST (patterns via bassline.playlist_clips) // ------------------------------------------------------------ const arrangements = _collectArrangements(); const instrumentTracks = (appState.pattern?.tracks || []).filter(t => t.type !== "bassline"); + // mix por (trackId + patternIndex) no OFFLINE: + // instVol -> instPan -> pattVol -> pattPan -> master const mixCache = new Map(); const pluginCache = new Map(); + const samplerBufCache = new Map(); const getMix = (track, bassline) => { const pi = _n(bassline.patternIndex, 0); @@ -1192,14 +1155,28 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) { return null; } - // ✅ IMPORTANTE: criar no contexto atual (OFFLINE) - const inst = new Cls(Tone.getContext(), track.params || track.pluginData || {}); + const inst = new Cls(null, track.params || track.pluginData || {}); inst.connect(mix.instVol); pluginCache.set(key, inst); return inst; }; + const getSamplerBuf = async (track) => { + const key = String(track.id); + if (samplerBufCache.has(key)) return samplerBufCache.get(key); + + const url = track.samplePath; + if (!url) { + samplerBufCache.set(key, null); + return null; + } + + const b = await getBuf(url); + samplerBufCache.set(key, b); + return b; + }; + for (const b of arrangements) { const pi = _n(b.patternIndex, 0); @@ -1224,10 +1201,13 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) { const pattLenTicks = _patternLengthTicks(patt); const mix = getMix(track, b); - const pluginInst = (track.type === "plugin") ? getPluginInst(track, b, mix) : null; - const samplerBuf = (track.type === "sampler") ? (samplerBufByTrackId.get(String(track.id)) || null) : null; + // prepara recursos do track + let pluginInst = null; + let samplerBuf = null; + if (track.type === "plugin") pluginInst = getPluginInst(track, b, mix); + if (track.type === "sampler") samplerBuf = await getSamplerBuf(track); - // Piano roll + // --- Piano roll (notes) --- if (Array.isArray(patt.notes) && patt.notes.length > 0) { for (const n of patt.notes) { const notePos = _n(n.pos, 0); @@ -1246,18 +1226,16 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) { if (track.type === "plugin" && pluginInst) { const freq = Tone.Frequency(midi, "midi").toFrequency(); try { pluginInst.triggerAttackRelease(freq, durSec, tSec, vel); } catch {} - scheduledNotes++; } else if (track.type === "sampler" && samplerBuf) { const base = _n(track.baseNote, 60); const rate = Math.pow(2, (midi - base) / 12); _playOneShot(samplerBuf, tSec, mix.instVol, tSec + durSec, rate); - scheduledNotes++; } } } } - // Step sequencer + // --- Step sequencer (steps) --- else if (Array.isArray(patt.steps) && patt.steps.length > 0) { for (let s = 0; s < patt.steps.length; s++) { if (!patt.steps[s]) continue; @@ -1269,10 +1247,9 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) { if (track.type === "plugin" && pluginInst) { try { pluginInst.triggerAttackRelease("C5", stepSec, tSec); } catch {} - scheduledSteps++; } else if (track.type === "sampler" && samplerBuf) { + // one-shot (sem transposição) _playOneShot(samplerBuf, tSec, mix.instVol, clipEndSec, 1); - scheduledSteps++; } } } @@ -1282,16 +1259,8 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) { } transport.start(); - - console.log("[Render] agendados:", { scheduledAudio, scheduledNotes, scheduledSteps }); }, duration); - // debug peak - const ch = buffer.getChannelData(0); - let peak = 0; - for (let i = 0; i < ch.length; i++) peak = Math.max(peak, Math.abs(ch[i])); - console.log("[Render] peak =", peak); - return bufferToWave(buffer); }