renderizando projetos no mmpCreator sem erros
Deploy / Deploy (push) Successful in 2m9s Details

This commit is contained in:
JotaChina 2025-12-28 13:51:08 -03:00
parent c2f5b1018f
commit f363cfb211
2 changed files with 87 additions and 66 deletions

View File

@ -68,7 +68,10 @@ function _convertSecondsToBeat(seconds) {
function _initContext() { function _initContext() {
if (!audioCtx) { if (!audioCtx) {
initializeAudioContext(); initializeAudioContext();
audioCtx = getAudioContext(); // deve ser o rawContext do Tone // ✅ garante que é o AudioContext nativo
audioCtx = Tone.getContext().rawContext;
} else if (audioCtx.rawContext) {
audioCtx = audioCtx.rawContext;
} }
} }
@ -404,16 +407,17 @@ export async function startAudioEditorPlayback(seekTime) {
// garante contexto ativo do Tone (gesto do usuário já ocorreu antes) // garante contexto ativo do Tone (gesto do usuário já ocorreu antes)
await Tone.start(); await Tone.start();
if (audioCtx.state === "suspended") {
await audioCtx.resume();
}
const raw = audioCtx?.rawContext || audioCtx;
if (raw?.state === "suspended" && typeof raw.resume === "function") {
await raw.resume();
}
audioCtx = raw;
// alinhamento de relógio próprio
startTime = audioCtx.currentTime;
isPlaying = true; isPlaying = true;
appState.global.isAudioEditorPlaying = true; appState.global.isAudioEditorPlaying = true;
// alinhamento de relógio próprio (mantido para o seu scheduler)
startTime = audioCtx.currentTime;
// 1. Determine o tempo de início: // 1. Determine o tempo de início:
let timeToStart = let timeToStart =
seekTime !== null && seekTime !== undefined && !isNaN(seekTime) seekTime !== null && seekTime !== undefined && !isNaN(seekTime)

View File

@ -447,11 +447,11 @@ export async function renderActivePatternToBlob() {
if (lastIdx + 1 > maxStepFound) maxStepFound = lastIdx + 1; if (lastIdx + 1 > maxStepFound) maxStepFound = lastIdx + 1;
} }
// B. Notas (Piano Roll) - Assumindo 192 ticks/beat e steps de 1/16 (48 ticks) // B. Notas (Piano Roll) - Assumindo 192 ticks/beat e steps de 1/16 (12 ticks)
if (p.notes && p.notes.length > 0) { if (p.notes && p.notes.length > 0) {
p.notes.forEach((n) => { p.notes.forEach((n) => {
const endTick = n.pos + n.len; const endTick = n.pos + n.len;
const endStep = Math.ceil(endTick / 48); const endStep = Math.ceil(endTick / 12);
if (endStep > maxStepFound) maxStepFound = endStep; if (endStep > maxStepFound) maxStepFound = endStep;
}); });
} }
@ -466,7 +466,9 @@ export async function renderActivePatternToBlob() {
// 2. RENDERIZAÇÃO OFFLINE // 2. RENDERIZAÇÃO OFFLINE
// ========================================================= // =========================================================
const buffer = await Tone.Offline(async ({ transport }) => { const buffer = await Tone.Offline(async ({ transport }) => {
const masterGain = new Tone.Gain().toDestination(); const rawCtx = Tone.getContext().rawContext;
const masterGain = new Tone.Gain(1);
masterGain.connect(rawCtx.destination);
// Loop por cada trilha do projeto // Loop por cada trilha do projeto
appState.pattern.tracks.forEach((track) => { appState.pattern.tracks.forEach((track) => {
@ -528,19 +530,19 @@ export async function renderActivePatternToBlob() {
// 1. Agendar Notas do Piano Roll // 1. Agendar Notas do Piano Roll
if (hasNotes) { if (hasNotes) {
const TICKS_PER_BEAT = 192; const TICKS_PER_BEAT = 192;
const TICKS_PER_STEP = 48; // 1/16
const SECONDS_PER_BEAT = 60 / bpm; const SECONDS_PER_BEAT = 60 / bpm;
const TICKS_PER_STEP = 12; // ✅ 12 ticks por 1/16
const events = pattern.notes.map((note) => ({ const events = pattern.notes.map((note) => ({
time: (note.pos / TICKS_PER_BEAT) * SECONDS_PER_BEAT, // ✅ correto time: (note.pos / TICKS_PER_STEP) * stepInterval,
midi: note.key, midi: note.key,
duration: (note.len / TICKS_PER_BEAT) * SECONDS_PER_BEAT, // ✅ já estava ok duration: (note.len / TICKS_PER_STEP) * stepInterval,
velocity: (note.vol || 100) / 100, velocity: (note.vol || 100) / 100,
})); }));
new Tone.Part((time, val) => { new Tone.Part((time, val) => {
const freq = Tone.Frequency(val.midi, "midi"); const freq = Tone.Frequency(val.midi, "midi").toFrequency();
instrumentInstance.triggerAttackRelease(freq, val.duration, time); instrumentInstance.triggerAttackRelease(freq, val.duration, time, val.velocity);
}, events).start(0); }, events).start(0);
} }
@ -553,11 +555,7 @@ export async function renderActivePatternToBlob() {
new Tone.Part((time) => { new Tone.Part((time) => {
// Toca um C5 padrão para steps sem nota definida // Toca um C5 padrão para steps sem nota definida
instrumentInstance.triggerAttackRelease( instrumentInstance.triggerAttackRelease("C5", 0.1, time);
Tone.Frequency("C5"),
0.1,
time
);
}, stepEvents).start(0); }, stepEvents).start(0);
} }
} else { } else {
@ -1050,34 +1048,64 @@ function _playOneShot(buffer, time, dest, stopTime = null, playbackRate = 1) {
export async function renderProjectToBlob({ tailSec = 0.25 } = {}) { export async function renderProjectToBlob({ tailSec = 0.25 } = {}) {
initializeAudioContext(); initializeAudioContext();
// (Opcional, mas ajuda em alguns browsers)
try { await Tone.start(); } catch {}
const bpm = parseInt(document.getElementById("bpm-input")?.value, 10) || 120; const bpm = parseInt(document.getElementById("bpm-input")?.value, 10) || 120;
const stepSec = _secondsPerStep(bpm);
const duration = _projectDurationSeconds(bpm) + Math.max(0, Number(tailSec) || 0); const duration = _projectDurationSeconds(bpm) + Math.max(0, Number(tailSec) || 0);
const buffer = await Tone.Offline(async ({ transport }) => { // =========================================================
// 1) PRÉ-CARREGA BUFFERS (FORA do Tone.Offline) ✅
// =========================================================
const decodeCtx = Tone.getContext().rawContext;
const urls = new Set();
// áudio timeline
for (const clip of (appState.audio?.clips || [])) {
const url = clip.sourcePath || clip.src || clip.url;
if (url) urls.add(String(url));
}
// sampler tracks
const samplerTracks = (appState.pattern?.tracks || []).filter(
t => t.type === "sampler" && t.samplePath
);
for (const t of samplerTracks) {
urls.add(String(t.samplePath));
}
const bufByUrl = new Map();
await Promise.all([...urls].map(async (url) => {
const b = await _fetchAudioBuffer(url, decodeCtx);
if (b) bufByUrl.set(url, b);
}));
const samplerBufByTrackId = new Map();
for (const t of samplerTracks) {
samplerBufByTrackId.set(String(t.id), bufByUrl.get(String(t.samplePath)) || null);
}
console.log("[Render] buffers carregados:", bufByUrl.size);
// =========================================================
// 2) OFFLINE RENDER (SEM await aqui dentro) ✅
// =========================================================
const buffer = await Tone.Offline(({ transport }) => {
transport.bpm.value = bpm; transport.bpm.value = bpm;
const rawCtx = Tone.getContext().rawContext; const rawCtx = Tone.getContext().rawContext;
// ✅ master no OFFLINE e conectando explicitamente no destination do OFFLINE // master no contexto OFFLINE
const master = new Tone.Gain(1); const master = new Tone.Gain(1);
master.connect(rawCtx.destination); master.connect(rawCtx.destination);
// ------------------------------------------------------------ let scheduledAudio = 0;
// CACHE de buffers (para não baixar/decodificar repetido) let scheduledNotes = 0;
// ------------------------------------------------------------ let scheduledSteps = 0;
const bufferCache = new Map();
const getBuf = async (url) => {
const key = String(url || "");
if (!key) return null;
if (bufferCache.has(key)) return bufferCache.get(key);
const b = await _fetchAudioBuffer(key, rawCtx);
bufferCache.set(key, b);
return b;
};
// ------------------------------------------------------------ // ------------------------------------------------------------
// (A) Render do AUDIO TIMELINE (appState.audio.clips) // (A) AUDIO TIMELINE
// ------------------------------------------------------------ // ------------------------------------------------------------
for (const clip of (appState.audio?.clips || [])) { for (const clip of (appState.audio?.clips || [])) {
const muted = !!clip.muted || (_n(clip.volume, 1) <= 0); const muted = !!clip.muted || (_n(clip.volume, 1) <= 0);
@ -1086,7 +1114,7 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) {
const url = clip.sourcePath || clip.src || clip.url; const url = clip.sourcePath || clip.src || clip.url;
if (!url) continue; if (!url) continue;
const buf = await getBuf(url); const buf = bufByUrl.get(String(url));
if (!buf) continue; if (!buf) continue;
const start = _n(clip.startTimeInSeconds, 0); const start = _n(clip.startTimeInSeconds, 0);
@ -1103,7 +1131,8 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) {
volNode.connect(panNode); volNode.connect(panNode);
panNode.connect(master); panNode.connect(master);
const player = new Tone.Player(buf); const player = new Tone.Player();
player.buffer = buf; // ✅ usa AudioBuffer pré-decodado
player.connect(volNode); player.connect(volNode);
player.start(start, offset, dur); player.start(start, offset, dur);
@ -1114,19 +1143,18 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) {
try { volNode.dispose(); } catch {} try { volNode.dispose(); } catch {}
try { panNode.dispose(); } catch {} try { panNode.dispose(); } catch {}
}; };
scheduledAudio++;
} }
// ------------------------------------------------------------ // ------------------------------------------------------------
// (B) Render da PLAYLIST (patterns via bassline.playlist_clips) // (B) PLAYLIST (patterns)
// ------------------------------------------------------------ // ------------------------------------------------------------
const arrangements = _collectArrangements(); const arrangements = _collectArrangements();
const instrumentTracks = (appState.pattern?.tracks || []).filter(t => t.type !== "bassline"); const instrumentTracks = (appState.pattern?.tracks || []).filter(t => t.type !== "bassline");
// mix por (trackId + patternIndex) no OFFLINE:
// instVol -> instPan -> pattVol -> pattPan -> master
const mixCache = new Map(); const mixCache = new Map();
const pluginCache = new Map(); const pluginCache = new Map();
const samplerBufCache = new Map();
const getMix = (track, bassline) => { const getMix = (track, bassline) => {
const pi = _n(bassline.patternIndex, 0); const pi = _n(bassline.patternIndex, 0);
@ -1164,6 +1192,7 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) {
return null; return null;
} }
// ✅ IMPORTANTE: criar no contexto atual (OFFLINE)
const inst = new Cls(Tone.getContext(), track.params || track.pluginData || {}); const inst = new Cls(Tone.getContext(), track.params || track.pluginData || {});
inst.connect(mix.instVol); inst.connect(mix.instVol);
@ -1171,21 +1200,6 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) {
return inst; return inst;
}; };
const getSamplerBuf = async (track) => {
const key = String(track.id);
if (samplerBufCache.has(key)) return samplerBufCache.get(key);
const url = track.samplePath;
if (!url) {
samplerBufCache.set(key, null);
return null;
}
const b = await getBuf(url);
samplerBufCache.set(key, b);
return b;
};
for (const b of arrangements) { for (const b of arrangements) {
const pi = _n(b.patternIndex, 0); const pi = _n(b.patternIndex, 0);
@ -1210,13 +1224,10 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) {
const pattLenTicks = _patternLengthTicks(patt); const pattLenTicks = _patternLengthTicks(patt);
const mix = getMix(track, b); const mix = getMix(track, b);
// prepara recursos do track const pluginInst = (track.type === "plugin") ? getPluginInst(track, b, mix) : null;
let pluginInst = null; const samplerBuf = (track.type === "sampler") ? (samplerBufByTrackId.get(String(track.id)) || null) : null;
let samplerBuf = null;
if (track.type === "plugin") pluginInst = getPluginInst(track, b, mix);
if (track.type === "sampler") samplerBuf = await getSamplerBuf(track);
// --- Piano roll (notes) --- // Piano roll
if (Array.isArray(patt.notes) && patt.notes.length > 0) { if (Array.isArray(patt.notes) && patt.notes.length > 0) {
for (const n of patt.notes) { for (const n of patt.notes) {
const notePos = _n(n.pos, 0); const notePos = _n(n.pos, 0);
@ -1235,16 +1246,18 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) {
if (track.type === "plugin" && pluginInst) { if (track.type === "plugin" && pluginInst) {
const freq = Tone.Frequency(midi, "midi").toFrequency(); const freq = Tone.Frequency(midi, "midi").toFrequency();
try { pluginInst.triggerAttackRelease(freq, durSec, tSec, vel); } catch {} try { pluginInst.triggerAttackRelease(freq, durSec, tSec, vel); } catch {}
scheduledNotes++;
} else if (track.type === "sampler" && samplerBuf) { } else if (track.type === "sampler" && samplerBuf) {
const base = _n(track.baseNote, 60); const base = _n(track.baseNote, 60);
const rate = Math.pow(2, (midi - base) / 12); const rate = Math.pow(2, (midi - base) / 12);
_playOneShot(samplerBuf, tSec, mix.instVol, tSec + durSec, rate); _playOneShot(samplerBuf, tSec, mix.instVol, tSec + durSec, rate);
scheduledNotes++;
} }
} }
} }
} }
// --- Step sequencer (steps) --- // Step sequencer
else if (Array.isArray(patt.steps) && patt.steps.length > 0) { else if (Array.isArray(patt.steps) && patt.steps.length > 0) {
for (let s = 0; s < patt.steps.length; s++) { for (let s = 0; s < patt.steps.length; s++) {
if (!patt.steps[s]) continue; if (!patt.steps[s]) continue;
@ -1256,9 +1269,10 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) {
if (track.type === "plugin" && pluginInst) { if (track.type === "plugin" && pluginInst) {
try { pluginInst.triggerAttackRelease("C5", stepSec, tSec); } catch {} try { pluginInst.triggerAttackRelease("C5", stepSec, tSec); } catch {}
scheduledSteps++;
} else if (track.type === "sampler" && samplerBuf) { } else if (track.type === "sampler" && samplerBuf) {
// one-shot (sem transposição)
_playOneShot(samplerBuf, tSec, mix.instVol, clipEndSec, 1); _playOneShot(samplerBuf, tSec, mix.instVol, clipEndSec, 1);
scheduledSteps++;
} }
} }
} }
@ -1268,8 +1282,11 @@ export async function renderProjectToBlob({ tailSec = 0.25 } = {}) {
} }
transport.start(); transport.start();
console.log("[Render] agendados:", { scheduledAudio, scheduledNotes, scheduledSteps });
}, duration); }, duration);
// debug peak
const ch = buffer.getChannelData(0); const ch = buffer.getChannelData(0);
let peak = 0; let peak = 0;
for (let i = 0; i < ch.length; i++) peak = Math.max(peak, Math.abs(ch[i])); for (let i = 0; i < ch.length; i++) peak = Math.max(peak, Math.abs(ch[i]));