renderizando projetos no mmpCreator
Deploy / Deploy (push) Successful in 2m20s
Details
Deploy / Deploy (push) Successful in 2m20s
Details
This commit is contained in:
parent
de05d11761
commit
53a47d1a4c
|
|
@ -1,5 +1,5 @@
|
|||
// js/main.js
|
||||
import { appState, loadStateFromSession } from "./state.js"; // <--- Importe loadStateFromSession
|
||||
import { appState, loadStateFromSession } from "./state.js";
|
||||
import {
|
||||
updateTransportLoop,
|
||||
restartAudioEditorIfPlaying,
|
||||
|
|
@ -17,7 +17,7 @@ import { adjustValue, enforceNumericInput, DEFAULT_PROJECT_XML, secondsToSongTic
|
|||
import { ZOOM_LEVELS } from "./config.js";
|
||||
import { loadProjectFromServer } from "./file.js";
|
||||
import { sendAction, joinRoom, setUserName } from "./socket.js";
|
||||
import { renderActivePatternToBlob } from "./pattern/pattern_audio.js";
|
||||
import { renderActivePatternToBlob, renderProjectAndDownload } from "./pattern/pattern_audio.js";
|
||||
import { showToast } from "./ui.js";
|
||||
import { toggleRecording } from "./recording.js"
|
||||
import * as Tone from "https://esm.sh/tone"; // Adicione o Tone aqui se não estiver global
|
||||
|
|
@ -127,6 +127,10 @@ document.addEventListener("DOMContentLoaded", () => {
|
|||
const deleteClipBtn = document.getElementById("delete-clip");
|
||||
const addPatternBtn = document.getElementById("add-pattern-btn");
|
||||
const removePatternBtn = document.getElementById("remove-pattern-btn");
|
||||
const downloadPackageBtn = document.getElementById("download-package-btn");
|
||||
|
||||
// Download projeto
|
||||
downloadPackageBtn?.addEventListener("click", generateMmpFile);
|
||||
|
||||
// Configuração do botão de Gravação
|
||||
const recordBtn = document.getElementById('record-btn');
|
||||
|
|
@ -365,18 +369,13 @@ document.addEventListener("DOMContentLoaded", () => {
|
|||
if (file) handleFileLoad(file).then(() => closeOpenProjectModal());
|
||||
});
|
||||
uploadSampleBtn?.addEventListener("click", () => sampleFileInput?.click());
|
||||
saveMmpBtn?.addEventListener("click", generateMmpFile);
|
||||
saveMmpBtn?.addEventListener("click", renderProjectAndDownload);
|
||||
|
||||
addInstrumentBtn?.addEventListener("click", () => {
|
||||
initializeAudioContext();
|
||||
sendAction({ type: "ADD_TRACK" });
|
||||
});
|
||||
removeInstrumentBtn?.addEventListener("click", () => {
|
||||
initializeAudioContext();
|
||||
sendAction({ type: "REMOVE_LAST_TRACK" });
|
||||
});
|
||||
|
||||
// 👇 ATUALIZE ESTE LISTENER
|
||||
removeInstrumentBtn?.addEventListener("click", () => {
|
||||
initializeAudioContext();
|
||||
|
||||
|
|
|
|||
|
|
@ -913,3 +913,379 @@ export function stopSongPatternPlaybackOnTransport() {
|
|||
} catch {}
|
||||
songPatternScheduleId = null;
|
||||
}
|
||||
|
||||
// =========================================================================
|
||||
// Renderizar o PROJETO inteiro (Playlist patterns + Audio Timeline) para WAV
|
||||
// =========================================================================
|
||||
|
||||
function _n(v, def = 0) {
|
||||
const x = Number(v);
|
||||
return Number.isFinite(x) ? x : def;
|
||||
}
|
||||
|
||||
function _secondsPerStep(bpm) {
|
||||
return 60 / (bpm * 4); // 1/16
|
||||
}
|
||||
|
||||
function _ticksToSeconds(ticks, stepSec) {
|
||||
// LMMS: 12 ticks por 1/16
|
||||
return (_n(ticks, 0) / 12) * stepSec;
|
||||
}
|
||||
|
||||
function _dbFromVol(vol, muted) {
|
||||
const v = clamp(vol ?? 1, 0, MAX_VOL);
|
||||
if (muted || v <= 0) return -Infinity;
|
||||
return Tone.gainToDb(v);
|
||||
}
|
||||
|
||||
function _sanitizeFileName(name) {
|
||||
return String(name || "projeto")
|
||||
.trim()
|
||||
.replace(/[<>:"/\\|?*\x00-\x1F]+/g, "_")
|
||||
.replace(/\s+/g, "_")
|
||||
.slice(0, 80);
|
||||
}
|
||||
|
||||
function _patternLengthTicks(patt) {
|
||||
const T = 12;
|
||||
|
||||
let byNotes = 0;
|
||||
if (Array.isArray(patt?.notes) && patt.notes.length) {
|
||||
for (const n of patt.notes) {
|
||||
const pos = _n(n.pos, 0);
|
||||
const rawLen = _n(n.len, T);
|
||||
const len = rawLen < 0 ? T : Math.max(rawLen, T);
|
||||
byNotes = Math.max(byNotes, pos + len);
|
||||
}
|
||||
}
|
||||
|
||||
const bySteps = (patt?.steps?.length || 0) * T;
|
||||
|
||||
return Math.max(byNotes, bySteps, T);
|
||||
}
|
||||
|
||||
function _collectArrangements() {
|
||||
const basslines = (appState.pattern?.tracks || []).filter(t => t.type === "bassline");
|
||||
const arr = [];
|
||||
|
||||
for (const b of basslines) {
|
||||
const clips = (b.playlist_clips || []).filter(c => _n(c.len, 0) > 0);
|
||||
if (clips.length) arr.push(b);
|
||||
}
|
||||
|
||||
// Fallback: se não houver playlist_clips, renderiza o pattern ativo por N compassos
|
||||
if (arr.length === 0) {
|
||||
const bars = parseInt(document.getElementById("bars-input")?.value, 10) || 1;
|
||||
const activePi = _n(appState.pattern?.activePatternIndex, 0);
|
||||
arr.push({
|
||||
patternIndex: activePi,
|
||||
volume: 1,
|
||||
pan: 0,
|
||||
muted: false,
|
||||
isMuted: false,
|
||||
playlist_clips: [{ pos: 0, len: bars * 192 }], // 192 ticks por compasso (4/4)
|
||||
});
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
function _projectDurationSeconds(bpm) {
|
||||
const stepSec = _secondsPerStep(bpm);
|
||||
|
||||
// 1) fim vindo da playlist (ticks)
|
||||
let maxTick = 0;
|
||||
for (const b of _collectArrangements()) {
|
||||
for (const c of (b.playlist_clips || [])) {
|
||||
const end = _n(c.pos, 0) + _n(c.len, 0);
|
||||
if (end > maxTick) maxTick = end;
|
||||
}
|
||||
}
|
||||
const playlistEndSec = _ticksToSeconds(maxTick, stepSec);
|
||||
|
||||
// 2) fim vindo do editor de áudio (segundos)
|
||||
let audioEndSec = 0;
|
||||
for (const c of (appState.audio?.clips || [])) {
|
||||
const end = _n(c.startTimeInSeconds, 0) + _n(c.durationInSeconds, 0);
|
||||
if (end > audioEndSec) audioEndSec = end;
|
||||
}
|
||||
|
||||
return Math.max(playlistEndSec, audioEndSec, stepSec);
|
||||
}
|
||||
|
||||
async function _fetchAudioBuffer(url, audioCtx) {
|
||||
try {
|
||||
const res = await fetch(url);
|
||||
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
||||
const arr = await res.arrayBuffer();
|
||||
// slice(0) evita problemas em alguns browsers com ArrayBuffer "detached"
|
||||
return await audioCtx.decodeAudioData(arr.slice(0));
|
||||
} catch (e) {
|
||||
console.warn("[Render] Falha ao carregar áudio:", url, e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function _playOneShot(buffer, time, dest, stopTime = null, playbackRate = 1) {
|
||||
const p = new Tone.Player(buffer);
|
||||
p.playbackRate = playbackRate;
|
||||
p.connect(dest);
|
||||
p.start(time);
|
||||
|
||||
if (stopTime != null && stopTime > time) {
|
||||
try { p.stop(stopTime); } catch {}
|
||||
}
|
||||
|
||||
p.onstop = () => {
|
||||
try { p.dispose(); } catch {}
|
||||
};
|
||||
}
|
||||
|
||||
export async function renderProjectToBlob({ tailSec = 0.25 } = {}) {
|
||||
initializeAudioContext();
|
||||
|
||||
const bpm = parseInt(document.getElementById("bpm-input")?.value, 10) || 120;
|
||||
const stepSec = _secondsPerStep(bpm);
|
||||
const duration = _projectDurationSeconds(bpm) + Math.max(0, Number(tailSec) || 0);
|
||||
|
||||
const buffer = await Tone.Offline(async ({ transport }) => {
|
||||
transport.bpm.value = bpm;
|
||||
|
||||
const rawCtx = Tone.getContext().rawContext;
|
||||
const master = new Tone.Gain(1).toDestination();
|
||||
|
||||
// ------------------------------------------------------------
|
||||
// CACHE de buffers (para não baixar/decodificar repetido)
|
||||
// ------------------------------------------------------------
|
||||
const bufferCache = new Map();
|
||||
const getBuf = async (url) => {
|
||||
const key = String(url || "");
|
||||
if (!key) return null;
|
||||
if (bufferCache.has(key)) return bufferCache.get(key);
|
||||
const b = await _fetchAudioBuffer(key, rawCtx);
|
||||
bufferCache.set(key, b);
|
||||
return b;
|
||||
};
|
||||
|
||||
// ------------------------------------------------------------
|
||||
// (A) Render do AUDIO TIMELINE (appState.audio.clips)
|
||||
// ------------------------------------------------------------
|
||||
for (const clip of (appState.audio?.clips || [])) {
|
||||
const muted = !!clip.muted || (_n(clip.volume, 1) <= 0);
|
||||
if (muted) continue;
|
||||
|
||||
const url = clip.sourcePath || clip.src || clip.url;
|
||||
if (!url) continue;
|
||||
|
||||
const buf = await getBuf(url);
|
||||
if (!buf) continue;
|
||||
|
||||
const start = _n(clip.startTimeInSeconds, 0);
|
||||
const dur = _n(clip.durationInSeconds, 0);
|
||||
if (dur <= 0.0001) continue;
|
||||
|
||||
const offset = Math.max(0, _n(clip.offset, 0));
|
||||
const vol = clamp(clip.volume ?? 1, 0, MAX_VOL);
|
||||
const pan = clamp(clip.pan ?? 0, -1, 1);
|
||||
|
||||
const volNode = new Tone.Volume(vol <= 0 ? -Infinity : Tone.gainToDb(vol));
|
||||
const panNode = new Tone.Panner(pan);
|
||||
|
||||
volNode.connect(panNode);
|
||||
panNode.connect(master);
|
||||
|
||||
const player = new Tone.Player(buf);
|
||||
player.connect(volNode);
|
||||
|
||||
player.start(start, offset, dur);
|
||||
player.stop(start + dur + 0.01);
|
||||
|
||||
player.onstop = () => {
|
||||
try { player.dispose(); } catch {}
|
||||
try { volNode.dispose(); } catch {}
|
||||
try { panNode.dispose(); } catch {}
|
||||
};
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------
|
||||
// (B) Render da PLAYLIST (patterns via bassline.playlist_clips)
|
||||
// ------------------------------------------------------------
|
||||
const arrangements = _collectArrangements();
|
||||
const instrumentTracks = (appState.pattern?.tracks || []).filter(t => t.type !== "bassline");
|
||||
|
||||
// mix por (trackId + patternIndex) no OFFLINE:
|
||||
// instVol -> instPan -> pattVol -> pattPan -> master
|
||||
const mixCache = new Map();
|
||||
const pluginCache = new Map();
|
||||
const samplerBufCache = new Map();
|
||||
|
||||
const getMix = (track, bassline) => {
|
||||
const pi = _n(bassline.patternIndex, 0);
|
||||
const key = `${track.id}::${pi}`;
|
||||
if (mixCache.has(key)) return mixCache.get(key);
|
||||
|
||||
const instMuted = !!(track.isMuted || track.muted) || clamp(track.volume ?? 1, 0, MAX_VOL) <= 0;
|
||||
const pattMuted = !!(bassline.isMuted || bassline.muted) || clamp(bassline.volume ?? 1, 0, MAX_VOL) <= 0;
|
||||
|
||||
const instVol = new Tone.Volume(_dbFromVol(track.volume ?? 1, instMuted));
|
||||
const instPan = new Tone.Panner(clamp(track.pan ?? 0, -1, 1));
|
||||
const pattVol = new Tone.Volume(_dbFromVol(bassline.volume ?? 1, pattMuted));
|
||||
const pattPan = new Tone.Panner(clamp(bassline.pan ?? 0, -1, 1));
|
||||
|
||||
instVol.connect(instPan);
|
||||
instPan.connect(pattVol);
|
||||
pattVol.connect(pattPan);
|
||||
pattPan.connect(master);
|
||||
|
||||
const m = { instVol, instPan, pattVol, pattPan };
|
||||
mixCache.set(key, m);
|
||||
return m;
|
||||
};
|
||||
|
||||
const getPluginInst = (track, bassline, mix) => {
|
||||
const pi = _n(bassline.patternIndex, 0);
|
||||
const key = `${track.id}::${pi}`;
|
||||
if (pluginCache.has(key)) return pluginCache.get(key);
|
||||
|
||||
const plugKey = _getPluginKey(track);
|
||||
const Cls = PLUGIN_CLASSES[plugKey];
|
||||
if (!Cls) {
|
||||
console.warn("[Render] Plugin não encontrado:", plugKey, "track:", track.name);
|
||||
pluginCache.set(key, null);
|
||||
return null;
|
||||
}
|
||||
|
||||
const inst = new Cls(null, track.params || track.pluginData || {});
|
||||
inst.connect(mix.instVol);
|
||||
|
||||
pluginCache.set(key, inst);
|
||||
return inst;
|
||||
};
|
||||
|
||||
const getSamplerBuf = async (track) => {
|
||||
const key = String(track.id);
|
||||
if (samplerBufCache.has(key)) return samplerBufCache.get(key);
|
||||
|
||||
const url = track.samplePath;
|
||||
if (!url) {
|
||||
samplerBufCache.set(key, null);
|
||||
return null;
|
||||
}
|
||||
|
||||
const b = await getBuf(url);
|
||||
samplerBufCache.set(key, b);
|
||||
return b;
|
||||
};
|
||||
|
||||
for (const b of arrangements) {
|
||||
const pi = _n(b.patternIndex, 0);
|
||||
|
||||
const pattMuted = !!(b.isMuted || b.muted) || clamp(b.volume ?? 1, 0, MAX_VOL) <= 0;
|
||||
if (pattMuted) continue;
|
||||
|
||||
const clips = (b.playlist_clips || []).filter(c => _n(c.len, 0) > 0);
|
||||
if (!clips.length) continue;
|
||||
|
||||
for (const clip of clips) {
|
||||
const clipStartTick = _n(clip.pos, 0);
|
||||
const clipEndTick = clipStartTick + _n(clip.len, 0);
|
||||
const clipEndSec = _ticksToSeconds(clipEndTick, stepSec);
|
||||
|
||||
for (const track of instrumentTracks) {
|
||||
const instMuted = !!(track.isMuted || track.muted) || clamp(track.volume ?? 1, 0, MAX_VOL) <= 0;
|
||||
if (instMuted) continue;
|
||||
|
||||
const patt = track.patterns?.[pi];
|
||||
if (!patt) continue;
|
||||
|
||||
const pattLenTicks = _patternLengthTicks(patt);
|
||||
const mix = getMix(track, b);
|
||||
|
||||
// prepara recursos do track
|
||||
let pluginInst = null;
|
||||
let samplerBuf = null;
|
||||
if (track.type === "plugin") pluginInst = getPluginInst(track, b, mix);
|
||||
if (track.type === "sampler") samplerBuf = await getSamplerBuf(track);
|
||||
|
||||
// --- Piano roll (notes) ---
|
||||
if (Array.isArray(patt.notes) && patt.notes.length > 0) {
|
||||
for (const n of patt.notes) {
|
||||
const notePos = _n(n.pos, 0);
|
||||
const rawLen = _n(n.len, 12);
|
||||
const lenTicks = rawLen < 0 ? 12 : Math.max(rawLen, 12);
|
||||
const vel = _n(n.vol, 100) / 100;
|
||||
const midi = _n(n.key, 60);
|
||||
|
||||
for (let startTick = clipStartTick + notePos; startTick < clipEndTick; startTick += pattLenTicks) {
|
||||
const tSec = _ticksToSeconds(startTick, stepSec);
|
||||
|
||||
let durSec = _ticksToSeconds(lenTicks, stepSec);
|
||||
durSec = Math.min(durSec, Math.max(0, clipEndSec - tSec));
|
||||
if (durSec <= 0.0001) continue;
|
||||
|
||||
if (track.type === "plugin" && pluginInst) {
|
||||
const freq = Tone.Frequency(midi, "midi").toFrequency();
|
||||
try { pluginInst.triggerAttackRelease(freq, durSec, tSec, vel); } catch {}
|
||||
} else if (track.type === "sampler" && samplerBuf) {
|
||||
const base = _n(track.baseNote, 60);
|
||||
const rate = Math.pow(2, (midi - base) / 12);
|
||||
_playOneShot(samplerBuf, tSec, mix.instVol, tSec + durSec, rate);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// --- Step sequencer (steps) ---
|
||||
else if (Array.isArray(patt.steps) && patt.steps.length > 0) {
|
||||
for (let s = 0; s < patt.steps.length; s++) {
|
||||
if (!patt.steps[s]) continue;
|
||||
|
||||
const stepTick = s * 12;
|
||||
|
||||
for (let startTick = clipStartTick + stepTick; startTick < clipEndTick; startTick += pattLenTicks) {
|
||||
const tSec = _ticksToSeconds(startTick, stepSec);
|
||||
|
||||
if (track.type === "plugin" && pluginInst) {
|
||||
try { pluginInst.triggerAttackRelease("C5", stepSec, tSec); } catch {}
|
||||
} else if (track.type === "sampler" && samplerBuf) {
|
||||
// one-shot (sem transposição)
|
||||
_playOneShot(samplerBuf, tSec, mix.instVol, clipEndSec, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
transport.start();
|
||||
}, duration);
|
||||
|
||||
return bufferToWave(buffer);
|
||||
}
|
||||
|
||||
export async function renderProjectAndDownload() {
|
||||
try {
|
||||
const blob = await renderProjectToBlob({ tailSec: 0.35 });
|
||||
|
||||
const projectName =
|
||||
appState.global?.currentBeatBasslineName ||
|
||||
appState.global?.projectName ||
|
||||
"projeto";
|
||||
|
||||
const fileName = `${_sanitizeFileName(projectName)}.wav`;
|
||||
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement("a");
|
||||
a.href = url;
|
||||
a.download = fileName;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
a.remove();
|
||||
|
||||
setTimeout(() => URL.revokeObjectURL(url), 1500);
|
||||
} catch (e) {
|
||||
console.error("Erro ao renderizar projeto:", e);
|
||||
alert("Erro ao renderizar projeto. Veja o console para detalhes.");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -144,12 +144,11 @@
|
|||
></i>
|
||||
<span style="margin-left: 8px">Salvar projeto</span>
|
||||
<i
|
||||
class="fa-solid fa-file-zipper"
|
||||
id="download-package-btn"
|
||||
title="Baixar Pacote Completo (.zip)"
|
||||
style="margin-left: 15px; color: #ffdd57; cursor: pointer"
|
||||
class="fa-solid fa-file-audio"
|
||||
id="save-mmp-btn"
|
||||
title="Renderizar áudio (WAV/MP3/OGG/FLAC)"
|
||||
></i>
|
||||
<span style="margin-left: 8px; color: #ffdd57">Baixar ZIP</span>
|
||||
<span style="margin-left: 8px">Renderizar</span>
|
||||
<i
|
||||
class="fa-solid fa-upload"
|
||||
id="upload-sample-btn"
|
||||
|
|
@ -885,32 +884,7 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<script>
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
const downloadBtn = document.getElementById("download-package-btn");
|
||||
|
||||
if (downloadBtn) {
|
||||
downloadBtn.addEventListener("click", () => {
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
let projectName = params.get("project");
|
||||
|
||||
if (projectName) {
|
||||
if (!projectName.toLowerCase().endsWith(".mmp")) {
|
||||
projectName += ".mmp";
|
||||
}
|
||||
const apiUrl = `/api/download/${projectName}`;
|
||||
downloadBtn.style.opacity = "0.5";
|
||||
setTimeout(() => (downloadBtn.style.opacity = "1"), 500);
|
||||
window.location.href = apiUrl;
|
||||
} else {
|
||||
alert(
|
||||
"Nenhum projeto selecionado na URL. Abra ou Salve um projeto primeiro."
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
<script>
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
const uploadSampleBtn = document.getElementById("upload-sample-btn");
|
||||
const sampleInput = document.getElementById("sample-file-input");
|
||||
|
|
|
|||
Loading…
Reference in New Issue