// js/creations/file.js //-------------------------------------------------------------- // IMPORTS NECESSÁRIOS //-------------------------------------------------------------- import { appState, saveStateToSession, resetProjectState, loadStateFromSession, } from "./state.js"; import { loadAudioForTrack } from "./pattern/pattern_state.js"; import { renderAll, getSamplePathMap } from "./ui.js"; import { DEFAULT_PAN, DEFAULT_VOLUME, NOTE_LENGTH } from "./config.js"; import { initializeAudioContext, getMainGainNode, getAudioContext } from "./audio.js"; import { DEFAULT_PROJECT_XML, getSecondsPerStep, SAMPLE_SRC } from "./utils.js"; import * as Tone from "https://esm.sh/tone"; import { sendAction } from "./socket.js"; // ⚠️ vem do módulo de áudio (o mesmo que audio_ui usa) import { addAudioTrackLane, addAudioClipToTimeline, updateAudioClipProperties, } from "./audio/audio_state.js"; const TICKS_PER_STEP = 12; function safeId(prefix) { return (crypto?.randomUUID?.() || `${prefix}_${Date.now()}_${Math.floor(Math.random() * 1e6)}`); } function basename(path) { return String(path || "").split(/[\\/]/).pop(); } function resolveSamplePath(sampleName, pathMap) { // 1) tenta pelo manifest (melhor) if (sampleName && pathMap[sampleName]) return pathMap[sampleName]; // 2) fallback simples (se você tiver essa convenção) // ajuste se necessário if (sampleName) return `${SAMPLE_SRC}/${sampleName}`; return null; } function ticksToSeconds(ticks) { const seconds = (ticks / TICKS_PER_STEP) * getSecondsPerStep(); return seconds; } function dirname(path) { const parts = String(path || "").split(/[\\/]/); parts.pop(); return parts.join("/"); } function lastFolder(path) { const dir = dirname(path); return String(dir).split(/[\\/]/).pop(); } export async function parseBeatIndexJson(data) { resetProjectState(); initializeAudioContext(); // BPM const bpm = Number(data?.bpm || 140); const bpmInput = document.getElementById("bpm-input"); if (bpmInput) bpmInput.value = bpm; // (opcional) nome do projeto if (data?.original_title) { appState.global.currentBeatBasslineName = data.original_title; } const pathMap = getSamplePathMap(); // vem do samples-manifest :contentReference[oaicite:6]{index=6} const secondsPerStep = getSecondsPerStep(); const newPatternTracks = []; // 1) monta pattern.tracks (plugin/bassline etc) (data.tracks || []).forEach((t, idx) => { if (t.type === "sample") return; const id = t.id || safeId("ptrk"); // normaliza nome (teu JSON usa track_name / bassline_name) const name = t.track_name || t.bassline_name || t.instrument_name || t.instrumentName || `Track ${idx + 1}`; newPatternTracks.push({ ...t, id, name, }); }); appState.pattern.tracks = newPatternTracks; // 2) cria lanes/clips de áudio a partir dos sample-tracks const sampleTracks = (data?.tracks || []).filter((t) => t?.type === "sample"); // ✅ data for (let i = 0; i < sampleTracks.length; i++) { const t = sampleTracks[i]; const trackId = `sample_lane_${Date.now()}_${i}`; appState.audio.tracks.push({ id: trackId, name: t.track_name || "Áudio", }); const fileName = t.sample_name || basename(t.sample_info?.src) || `${basename(t.sample_info?.src || "")}`; // ✅ sem sample_info solto const encodedFileName = encodeURIComponent(fileName); const projectName = encodeURIComponent(data?.file || data?.original_title || ""); const base = `${SAMPLE_SRC}/${projectName}`; // ✅ SAMPLE_SRC/{project} let resolvedUrl = pathMap[fileName]; if (!resolvedUrl || !resolvedUrl.includes(`/${projectName}/`)) { resolvedUrl = `${base}/${encodedFileName}`; } // ✅ tempo no padrão do seu playlist (12 ticks por step) const startSec = ((Number(t.sample_info?.pos || 0) / 12) * secondsPerStep); const durSec = ((Number(t.sample_info?.len || 0) / 12) * secondsPerStep); const clipId = `clip_${trackId}_0`; addAudioClipToTimeline(resolvedUrl, trackId, startSec, clipId, fileName); const vol = Number(t.sample_info?.vol ?? 100) / 100; const pan = Number(t.sample_info?.pan ?? 0) / 100; const muted = String(t.sample_info?.muted ?? "0") === "1"; updateAudioClipProperties(clipId, { durationInSeconds: durSec || 0, volume: muted ? 0 : vol, pan: isNaN(pan) ? 0 : pan, }); } renderAll(); } //-------------------------------------------------------------- // MANIPULAÇÃO DE ARQUIVOS //-------------------------------------------------------------- export function handleLocalProjectReset() { console.log("Recebido comando de reset. Limpando estado local..."); if (window.ROOM_NAME) { try { sessionStorage.removeItem(`temp_state_${window.ROOM_NAME}`); } catch (e) { console.error("Falha ao limpar estado da sessão:", e); } } resetProjectState(); const bpmInput = document.getElementById("bpm-input"); if (bpmInput) bpmInput.value = 140; ["bars-input", "compasso-a-input", "compasso-b-input"].forEach((id) => { const el = document.getElementById(id); if (el) el.value = id === "bars-input" ? 1 : 4; }); renderAll(); } export async function handleFileLoad(file) { let xmlContent = ""; try { const lower = file.name.toLowerCase(); if (lower.endsWith(".json")) { const json = JSON.parse(await file.text()); sendAction({ type: "LOAD_BEAT_INDEX", data: json }); return; } if (file.name.toLowerCase().endsWith(".mmpz")) { // eslint-disable-next-line no-undef const jszip = new JSZip(); const zip = await jszip.loadAsync(file); const projectFile = Object.keys(zip.files).find((name) => name.toLowerCase().endsWith(".mmp") ); if (!projectFile) throw new Error( "Não foi possível encontrar um arquivo .mmp dentro do .mmpz" ); xmlContent = await zip.files[projectFile].async("string"); } else { xmlContent = await file.text(); } sendAction({ type: "LOAD_PROJECT", xml: xmlContent }); } catch (error) { console.error("Erro ao carregar o projeto:", error); alert(`Erro ao carregar projeto: ${error.message}`); } } export async function loadBeatIndexFromServer(fileName) { const response = await fetch(`src_mmpSearch/metadata/${fileName}.json`); if (!response.ok) throw new Error("Não foi possível carregar beat index"); const data = await response.json(); sendAction({ type: "LOAD_BEAT_INDEX", data }); } export async function loadProjectFromServer(fileName) { try { const response = await fetch(`src_mmpSearch/mmp/${fileName}`); if (!response.ok) throw new Error(`Não foi possível carregar o arquivo ${fileName}`); const xmlContent = await response.text(); sendAction({ type: "LOAD_PROJECT", xml: xmlContent }); return true; } catch (error) { console.error("Erro ao carregar projeto do servidor:", error); alert(`Erro ao carregar projeto: ${error.message}`); return false; } } // ================================================================= // FUNÇÃO AUXILIAR: PARSE DE INSTRUMENTO ÚNICO // ================================================================= function parseInstrumentNode( trackNode, sortedBBTrackNameNodes, pathMap, parentBasslineId = null ) { const instrumentNode = trackNode.querySelector("instrument"); const instrumentTrackNode = trackNode.querySelector("instrumenttrack"); if (!instrumentNode || !instrumentTrackNode) return null; const trackName = trackNode.getAttribute("name"); const instrumentName = instrumentNode.getAttribute("name"); // ============================================================ // ✅ Patterns (Song Editor x Beat/Bassline Rack) // - Song Editor: mantém o comportamento antigo (sequencial) // - Rack (parentBasslineId != null): mapeia por pos/192 (índice real) // e preenche patterns vazios para manter index estável. // ============================================================ const BAR_TICKS = 192; // 1 compasso em ticks (4/4) const allPatternsNodeList = trackNode.querySelectorAll("pattern"); const allPatternsArray = Array.from(allPatternsNodeList).sort((a, b) => { return ( (parseInt(a.getAttribute("pos"), 10) || 0) - (parseInt(b.getAttribute("pos"), 10) || 0) ); }); const makeEmptyPattern = (idx, nameOverride = null) => ({ name: nameOverride || `Pattern ${idx + 1}`, steps: new Array(16).fill(false), notes: [], pos: idx * BAR_TICKS, }); const buildPatternFromNode = (patternNode, fallbackName, fallbackPos) => { const patternName = patternNode.getAttribute("name") || fallbackName; const patternSteps = parseInt(patternNode.getAttribute("steps"), 10) || 16; const steps = new Array(patternSteps).fill(false); const notes = []; patternNode.querySelectorAll("note").forEach((noteNode) => { const pos = parseInt(noteNode.getAttribute("pos"), 10) || 0; const rawLen = parseInt(noteNode.getAttribute("len"), 10) || 0; const len = rawLen < 0 ? TICKS_PER_STEP : rawLen; notes.push({ pos, len, key: parseInt(noteNode.getAttribute("key"), 10), vol: parseInt(noteNode.getAttribute("vol"), 10), pan: parseInt(noteNode.getAttribute("pan"), 10), }); const stepIndex = Math.floor(pos / TICKS_PER_STEP); if (stepIndex < patternSteps) steps[stepIndex] = true; }); return { name: patternName, steps, notes, pos: fallbackPos, }; }; let patterns = []; if (allPatternsArray.length > 0) { // ✅ Caso 1: instrumentos do Rack do Beat/Bassline if (parentBasslineId) { // quantos BB tracks existem no projeto? let bbCount = Array.isArray(sortedBBTrackNameNodes) ? sortedBBTrackNameNodes.length : 0; // garante bbCount >= maior índice encontrado no XML let maxIdx = 0; for (const pn of allPatternsArray) { const posTicks = parseInt(pn.getAttribute("pos"), 10) || 0; const idx = Math.round(posTicks / BAR_TICKS); if (idx > maxIdx) maxIdx = idx; } bbCount = Math.max(bbCount, maxIdx + 1, 1); // cria array denso (sem buracos) patterns = new Array(bbCount).fill(null).map((_, i) => { const nameFromBB = sortedBBTrackNameNodes?.[i]?.getAttribute?.("name") || null; return makeEmptyPattern(i, nameFromBB); }); // injeta patterns do XML no índice certo (pos/192) for (const pn of allPatternsArray) { const posTicks = parseInt(pn.getAttribute("pos"), 10) || 0; const idx = Math.round(posTicks / BAR_TICKS); if (idx < 0 || idx >= patterns.length) continue; const fallbackName = patterns[idx]?.name || `Pattern ${idx + 1}`; patterns[idx] = buildPatternFromNode(pn, fallbackName, idx * BAR_TICKS); } } else { // ✅ Caso 2: instrumentos do Song Editor (mantém sequencial) patterns = allPatternsArray.map((patternNode, index) => { const fallbackName = patternNode.getAttribute("name") || `Pattern ${index}`; const fallbackPos = parseInt(patternNode.getAttribute("pos"), 10) || 0; return buildPatternFromNode(patternNode, fallbackName, fallbackPos); }); } } else { // Fallback: Nenhum pattern encontrado no XML patterns.push(makeEmptyPattern(0, "Pattern 0")); } // ============================================================ // Sample vs Plugin // ============================================================ let finalSamplePath = null; let trackType = "plugin"; if (instrumentName === "audiofileprocessor") { trackType = "sampler"; const afpNode = instrumentNode.querySelector("audiofileprocessor"); const sampleSrc = afpNode ? afpNode.getAttribute("src") : null; if (sampleSrc) { const filename = sampleSrc.split("/").pop(); if (pathMap[filename]) { finalSamplePath = pathMap[filename]; } else { let cleanSrc = sampleSrc.startsWith("samples/") ? sampleSrc.substring("samples/".length) : sampleSrc; finalSamplePath = `${SAMPLE_SRC}/${cleanSrc}`; } } } const volFromFile = parseFloat(instrumentTrackNode.getAttribute("vol")); const panFromFile = parseFloat(instrumentTrackNode.getAttribute("pan")); return { id: Date.now() + Math.random(), name: trackName, type: trackType, samplePath: finalSamplePath, patterns: patterns, volume: !isNaN(volFromFile) ? volFromFile / 100 : DEFAULT_VOLUME, pan: !isNaN(panFromFile) ? panFromFile / 100 : DEFAULT_PAN, instrumentName: instrumentName, instrumentXml: instrumentNode.innerHTML, parentBasslineId: parentBasslineId, }; } // ================================================================= // 🔥 FUNÇÃO DE PARSING PRINCIPAL // ================================================================= export async function parseMmpContent(xmlString) { resetProjectState(); initializeAudioContext(); appState.global.justReset = xmlString === DEFAULT_PROJECT_XML; const audioContainer = document.getElementById("audio-track-container"); if (audioContainer) { audioContainer.innerHTML = ""; } const parser = new DOMParser(); const xmlDoc = parser.parseFromString(xmlString, "application/xml"); appState.global.originalXmlDoc = xmlDoc; // Configuração Global (BPM, Compasso) const head = xmlDoc.querySelector("head"); if (head) { const setVal = (id, attr, def) => { const el = document.getElementById(id); if (el) el.value = head.getAttribute(attr) || def; }; setVal("bpm-input", "bpm", 140); setVal("compasso-a-input", "timesig_numerator", 4); setVal("compasso-b-input", "timesig_denominator", 4); } const pathMap = getSamplePathMap(); // ------------------------------------------------------------- // 2.1) EXTRAÇÃO DE SAMPLETRACKS DO SONG EDITOR (track[type="2"]) // ------------------------------------------------------------- const secondsPerStep = getSecondsPerStep(); // já é usado no JSON parser :contentReference[oaicite:3]{index=3} const sampleTrackNodes = Array.from( xmlDoc.querySelectorAll('song > trackcontainer > track[type="2"]') ); sampleTrackNodes.forEach((node, idx) => { const trackName = node.getAttribute("name") || `Áudio ${idx + 1}`; // cria lane de áudio const trackId = `sample_lane_${Date.now()}_${idx}`; appState.audio.tracks.push({ id: trackId, name: trackName }); // pan/vol defaults do const st = node.querySelector("sampletrack"); const laneVol = st ? Number(st.getAttribute("vol") ?? 100) / 100 : 1; const lanePan = st ? Number(st.getAttribute("pan") ?? 0) / 100 : 0; // cada é um clip const clips = Array.from(node.querySelectorAll(":scope > sampletco")); clips.forEach((c, cidx) => { // resolve caminho via manifest; fallback para /samples const src = c.getAttribute("src") || ""; const fileName = src.split(/[\\/]/).pop(); // "sample.wav" const projectFolder = encodeURIComponent(lastFolder(src) || ""); // "project_name" const encodedFile = encodeURIComponent(fileName); // tenta pelo manifest primeiro let resolvedUrl = fileName ? pathMap[fileName] : null; // se o manifest não tiver ou não vier com /{project}/, monta no padrão do servidor if (!resolvedUrl || (projectFolder && !resolvedUrl.includes(`/${projectFolder}/`))) { resolvedUrl = projectFolder ? `${SAMPLE_SRC}/samples/${projectFolder}/${encodedFile}` : null; } if (!resolvedUrl) return; const posTicks = Number(c.getAttribute("pos") || 0); const lenTicks = Number(c.getAttribute("len") || 0); const startSec = ((posTicks / 12) * secondsPerStep); const durSec = ((lenTicks / 12) * secondsPerStep); const clipId = `clip_${trackId}_${cidx}`; addAudioClipToTimeline(resolvedUrl, trackId, startSec, clipId, fileName); const muted = String(c.getAttribute("muted") ?? "0") === "1"; updateAudioClipProperties(clipId, { durationInSeconds: durSec || 0, volume: muted ? 0 : laneVol, pan: isNaN(lanePan) ? 0 : lanePan, }); }); }); // 1. Identifica colunas de beat/patterns (usado para mapear steps) // Normalmente ficam dentro do primeiro container de Bassline const bbTrackNodes = Array.from(xmlDoc.querySelectorAll('track[type="1"]')); // Cada BBTrack do Song Editor representa uma “coluna/pattern” (pos 0, 192, 384...) // então usamos o próprio array de BBTracks para nomear as patterns: const sortedBBTrackNameNodes = bbTrackNodes; // ------------------------------------------------------------- // 2. EXTRAÇÃO DE INSTRUMENTOS DA RAIZ (SONG EDITOR) // ------------------------------------------------------------- // Pega apenas os instrumentos que estão soltos no Song Editor (não dentro de BBTracks) const songInstrumentNodes = Array.from( xmlDoc.querySelectorAll('song > trackcontainer > track[type="0"]') ); const songTracks = songInstrumentNodes .map((node) => parseInstrumentNode(node, sortedBBTrackNameNodes, pathMap, null) ) // null = Sem Pai .filter((t) => t !== null); // ------------------------------------------------------------- // 3. EXTRAÇÃO DAS TRILHAS DE BASSLINE (BBTracks) E RACK COMPARTILHADO // ------------------------------------------------------------- const bbRackNode = bbTrackNodes.find((n) => n.querySelector('bbtrack > trackcontainer > track[type="0"]') ); // ID do rack compartilhado const rackId = bbRackNode ? `bbRack_${Date.now()}_${Math.random().toString(36).slice(2)}` : null; // 3.2) Parseia instrumentos UMA vez (do rack) let bbRackInstruments = []; if (bbRackNode && rackId) { const internalInstrumentNodes = Array.from( bbRackNode.querySelectorAll('bbtrack > trackcontainer > track[type="0"]') ); bbRackInstruments = internalInstrumentNodes .map((node) => parseInstrumentNode(node, sortedBBTrackNameNodes, pathMap, rackId) ) .filter((t) => t !== null); } // 3.3) Cria os containers “Caixa/Kick/…” como patterns (sem instrumentos próprios) const basslineContainers = bbTrackNodes .map((trackNode, idx) => { const trackName = trackNode.getAttribute("name") || "Beat/Bassline"; const playlistClips = Array.from(trackNode.querySelectorAll(":scope > bbtco")).map((bbtco, cidx) => { const pos = parseInt(bbtco.getAttribute("pos"), 10) || 0; const len = parseInt(bbtco.getAttribute("len"), 10) || 192; return { id: `plc_${idx}_${pos}_${len}_${cidx}`, // determinístico pos, len, name: trackName, }; }); // NÃO retornar null quando não tem clips: return { id: `bb_container_${idx}`, name: trackName, type: "bassline", patternIndex: idx, playlist_clips: playlistClips, // pode ser [] patterns: [], isMuted: false, instrumentSourceId: rackId, volume: 1, pan: 0, }; }) .filter(Boolean); // ------------------------------------------------------------- // 4. COMBINAÇÃO E FINALIZAÇÃO // ------------------------------------------------------------- // A lista final plana contém: // 1) Instrumentos do Song Editor // 2) Instrumentos do rack compartilhado do Beat/Bassline Editor // 3) Containers (Caixa/Kick/...) que só referenciam o rack const newTracks = [ ...songTracks, ...bbRackInstruments, ...basslineContainers, ]; // Inicializa áudio apenas para instrumentos reais newTracks.forEach((track) => { if (track.type !== "bassline") { track.volumeNode = new Tone.Volume(Tone.gainToDb(track.volume)); track.pannerNode = new Tone.Panner(track.pan); track.volumeNode.connect(track.pannerNode); track.pannerNode.connect(getMainGainNode()); } }); // Configura tamanho da timeline let isFirstTrackWithNotes = true; newTracks.forEach((track) => { if (track.type === "bassline" || !isFirstTrackWithNotes) return; const activePattern = track.patterns?.[track.activePatternIndex || 0]; if (!activePattern) return; let bars = 1; // ✅ Se tiver piano roll, calcula pelo final da última nota if (activePattern.notes && activePattern.notes.length > 0) { const TICKS_PER_BAR = 192; // LMMS 4/4 const TICKS_PER_STEP = 12; // 1/16 let maxEndTick = 0; activePattern.notes.forEach((n) => { const pos = parseInt(n.pos, 10) || 0; const rawLen = parseInt(n.len, 10) || 0; const len = rawLen < 0 ? TICKS_PER_STEP : rawLen; // fallback maxEndTick = Math.max(maxEndTick, pos + Math.max(len, TICKS_PER_STEP)); }); bars = Math.max(1, Math.ceil(maxEndTick / TICKS_PER_BAR)); } // ✅ Senão, cai no step sequencer normal else if (activePattern.steps && activePattern.steps.length > 0) { bars = Math.max(1, Math.ceil(activePattern.steps.length / 16)); } const barsInput = document.getElementById("bars-input"); if (barsInput) barsInput.value = String(bars); isFirstTrackWithNotes = false; }); // Carrega samples/plugins try { const promises = newTracks .filter((t) => t.type !== "bassline") .map((track) => loadAudioForTrack(track)); await Promise.all(promises); } catch (error) { console.error("Erro ao carregar áudios:", error); } const patternNames = basslineContainers.map(b => b.name); // ["Caixa","Kick","Batida/Linha de Baixo 2",...] newTracks.forEach(t => { if (t.type === "bassline") return; (t.patterns || []).forEach((p, idx) => { if (patternNames[idx]) p.name = patternNames[idx]; }); }); // Atualiza estado global appState.pattern.tracks = newTracks; appState.pattern.focusedBasslineId = null; // Reseta o foco const firstInst = newTracks.find((t) => t.type !== "bassline"); appState.pattern.activeTrackId = firstInst ? firstInst.id : null; appState.pattern.activePatternIndex = null; loadStateFromSession(); await Promise.resolve(); renderAll(); } // -------------------------------------------------------------- // GERAÇÃO DE ARQUIVO (EXPORT) — MMPZ com WAVs “sliceados” // -------------------------------------------------------------- // ✅ agora é async porque vamos gerar zip + wavs export async function generateMmpFile() { if (appState.global.originalXmlDoc) { await modifyAndSaveExistingMmp(); } else { await generateNewMmp(); } } function generateXmlFromState() { if (!appState.global.originalXmlDoc) { const parser = new DOMParser(); appState.global.originalXmlDoc = parser.parseFromString( DEFAULT_PROJECT_XML, "application/xml" ); } const xmlDoc = appState.global.originalXmlDoc.cloneNode(true); const head = xmlDoc.querySelector("head"); if (head) { head.setAttribute("bpm", document.getElementById("bpm-input").value || 140); head.setAttribute( "num_bars", document.getElementById("bars-input").value || 1 ); head.setAttribute( "timesig_numerator", document.getElementById("compasso-a-input").value || 4 ); head.setAttribute( "timesig_denominator", document.getElementById("compasso-b-input").value || 4 ); } // Exportação Simplificada: Coloca todos os instrumentos reais no primeiro container const bbTrackContainer = xmlDoc.querySelector( 'track[type="1"] > bbtrack > trackcontainer' ); if (bbTrackContainer) { bbTrackContainer .querySelectorAll('track[type="0"]') .forEach((node) => node.remove()); const tracksXml = appState.pattern.tracks .filter((t) => t.type !== "bassline") .map((track) => createTrackXml(track)) .join(""); const tempDoc = new DOMParser().parseFromString( `${tracksXml}`, "application/xml" ); Array.from(tempDoc.documentElement.children).forEach((newTrackNode) => { bbTrackContainer.appendChild(newTrackNode); }); } applyPlaylistClipsToXml(xmlDoc); const serializer = new XMLSerializer(); return serializer.serializeToString(xmlDoc); } export function syncPatternStateToServer() { if (!window.ROOM_NAME) return; const currentXml = generateXmlFromState(); sendAction({ type: "SYNC_PATTERN_STATE", xml: currentXml }); saveStateToSession(); } function ensureBbTrackCount(xmlDoc, neededCount) { const songTc = xmlDoc.querySelector("song > trackcontainer"); if (!songTc) return; const bbTracks = Array.from(songTc.querySelectorAll(':scope > track[type="1"]')); if (bbTracks.length === 0) return; const template = bbTracks[bbTracks.length - 1]; while (bbTracks.length < neededCount) { const clone = template.cloneNode(true); // limpa os clips (bbtco) do clone (opcional, mas bom) Array.from(clone.querySelectorAll(":scope > bbtco")).forEach((n) => n.remove()); // ⚠️ Eu recomendo NÃO remover instrumentos internos aqui, // senão o bbtrack clonado pode ficar mudo no LMMS. // (Então NÃO faça: inner.querySelectorAll('track[type="0"]').remove()) clone.setAttribute("name", `Beat/Bassline ${bbTracks.length + 1}`); songTc.appendChild(clone); bbTracks.push(clone); } } function applyPlaylistClipsToXml(xmlDoc) { const basslines = appState.pattern.tracks .filter((t) => t.type === "bassline" && Number.isFinite(Number(t.patternIndex))) .slice() .sort((a, b) => Number(a.patternIndex) - Number(b.patternIndex)); const maxIdx = Math.max(-1, ...basslines.map((b) => Number(b.patternIndex))); ensureBbTrackCount(xmlDoc, maxIdx + 1); const bbTrackNodes = Array.from( xmlDoc.querySelectorAll('song > trackcontainer > track[type="1"]') ); if (!bbTrackNodes.length) return; for (const b of basslines) { const idx = Number(b.patternIndex); const node = bbTrackNodes[idx]; if (!node) continue; // ✅ mantém nome/mute sincronizados if (b.name) node.setAttribute("name", b.name); node.setAttribute("muted", b.isMuted ? "1" : "0"); Array.from(node.querySelectorAll(":scope > bbtco")).forEach((n) => n.remove()); const clips = (b.playlist_clips || []).slice().sort((x, y) => (x.pos ?? 0) - (y.pos ?? 0)); for (const c of clips) { const el = xmlDoc.createElement("bbtco"); el.setAttribute("pos", String(Math.max(0, Math.floor(c.pos ?? 0)))); el.setAttribute("len", String(Math.max(12, Math.floor(c.len ?? 192)))); node.appendChild(el); } } } function createTrackXml(track) { if (!track.patterns || track.patterns.length === 0) return ""; const ticksPerStep = 12; // 1 step (1/16) no LMMS = 12 ticks (com bar=192 em 4/4) const lmmsVolume = Math.round(track.volume * 100); const lmmsPan = Math.round(track.pan * 100); const instrName = track.instrumentName || "kicker"; const instrXml = track.instrumentXml || ``; const patternsXml = track.patterns .map((pattern) => { let patternNotesXml = ""; if ( track.type === "plugin" && pattern.notes && pattern.notes.length > 0 ) { patternNotesXml = pattern.notes .map( (note) => `` ) .join("\n "); } else if (pattern.steps) { patternNotesXml = pattern.steps .map((isActive, index) => { if (isActive) { const notePos = Math.round(index * ticksPerStep); return ``; } return ""; }) .join("\n "); } return ` ${patternNotesXml} `; }) .join("\n "); return ` ${instrXml} ${patternsXml} `; } async function modifyAndSaveExistingMmp() { console.log("EXPORT MMPZ"); await generateAndDownloadMmpz("projeto_editado"); } async function generateNewMmp() { console.log("EXPORT MMPZ"); await generateAndDownloadMmpz("novo_projeto"); } function downloadFile(content, fileName) { const blob = new Blob([content], { type: "application/xml;charset=utf-8" }); const url = URL.createObjectURL(blob); const a = document.createElement("a"); a.href = url; a.download = fileName; document.body.appendChild(a); a.click(); document.body.removeChild(a); URL.revokeObjectURL(url); } async function generateAndDownloadMmpz(baseName) { initializeAudioContext(); const baseXmlString = generateXmlFromState(); // seu método atual (patterns OK) const xmlDoc = new DOMParser().parseFromString(baseXmlString, "application/xml"); // JSZip precisa existir (você já usa pra abrir mmpz) const zip = new JSZip(); await applySampleTracksToXmlAndZip(xmlDoc, zip); const finalXml = new XMLSerializer().serializeToString(xmlDoc); zip.file(`${baseName}.mmp`, finalXml); zip.file( "README.txt", "1) Extraia este .zip\n2) Abra o arquivo .mmp no LMMS\n3) Mantenha a pasta samples/ na mesma pasta do .mmp\n" ); const blob = await zip.generateAsync({ type: "blob", compression: "DEFLATE" }); downloadBlob(blob, `${baseName}.zip`); } export async function buildRenderPackageBlob(baseName = "projeto") { initializeAudioContext(); const baseXmlString = generateXmlFromState(); // patterns const xmlDoc = new DOMParser().parseFromString(baseXmlString, "application/xml"); const zip = new JSZip(); await applySampleTracksToXmlAndZip(xmlDoc, zip); // ✅ coloca sampletco + samples/*.wav const finalXml = new XMLSerializer().serializeToString(xmlDoc); zip.file(`${baseName}.mmp`, finalXml); const blob = await zip.generateAsync({ type: "blob", compression: "DEFLATE" }); // pode ser .mmpz (LMMS costuma aceitar) ou .zip (você já usa) return { blob, fileName: `${baseName}.mmpz` }; } export async function buildRenderPackageBase64(baseName = "projeto") { const { blob, fileName } = await buildRenderPackageBlob(baseName); const base64 = await blobToBase64(blob); return { mmpzBase64: base64, mmpzName: fileName }; } function blobToBase64(blob) { return new Promise((resolve, reject) => { const r = new FileReader(); r.onload = () => resolve(String(r.result).split(",")[1]); // remove data:... r.onerror = reject; r.readAsDataURL(blob); }); } function downloadBlob(blob, fileName) { const url = URL.createObjectURL(blob); const a = document.createElement("a"); a.href = url; a.download = fileName; document.body.appendChild(a); a.click(); document.body.removeChild(a); URL.revokeObjectURL(url); } // -------------------- WAV helpers -------------------- function _clamp(n, a, b) { const x = Number(n); if (!Number.isFinite(x)) return a; return Math.max(a, Math.min(b, x)); } function _encodeWav16(channelData, sampleRate) { const numChannels = channelData.length; const length = channelData[0]?.length || 0; const bytesPerSample = 2; const blockAlign = numChannels * bytesPerSample; const byteRate = sampleRate * blockAlign; const dataSize = length * blockAlign; const buffer = new ArrayBuffer(44 + dataSize); const view = new DataView(buffer); const writeStr = (off, s) => { for (let i = 0; i < s.length; i++) view.setUint8(off + i, s.charCodeAt(i)); }; writeStr(0, "RIFF"); view.setUint32(4, 36 + dataSize, true); writeStr(8, "WAVE"); writeStr(12, "fmt "); view.setUint32(16, 16, true); view.setUint16(20, 1, true); view.setUint16(22, numChannels, true); view.setUint32(24, sampleRate, true); view.setUint32(28, byteRate, true); view.setUint16(32, blockAlign, true); view.setUint16(34, 16, true); writeStr(36, "data"); view.setUint32(40, dataSize, true); let offset = 44; for (let i = 0; i < length; i++) { for (let ch = 0; ch < numChannels; ch++) { let s = channelData[ch][i] || 0; s = Math.max(-1, Math.min(1, s)); view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true); offset += 2; } } return buffer; } function _sliceToWavArrayBuffer(audioBuffer, offsetSec, durSec, { volume = 1, pan = 0 } = {}) { const sr = audioBuffer.sampleRate; const start = Math.max(0, Math.floor((offsetSec || 0) * sr)); const end = Math.min(audioBuffer.length, start + Math.floor((durSec || 0) * sr)); const sliceLen = Math.max(0, end - start); if (sliceLen <= 0) return null; volume = _clamp(volume, 0, 1.5); pan = _clamp(pan, -1, 1); // equal-power pan const angle = (pan + 1) * (Math.PI / 4); const gL = Math.cos(angle) * volume; const gR = Math.sin(angle) * volume; const inCh = audioBuffer.numberOfChannels || 1; const outCh = (inCh === 1 && Math.abs(pan) > 1e-6) ? 2 : inCh; const out = new Array(outCh).fill(0).map(() => new Float32Array(sliceLen)); if (inCh === 1) { const src = audioBuffer.getChannelData(0).subarray(start, end); if (outCh === 1) { for (let i = 0; i < sliceLen; i++) out[0][i] = src[i] * volume; } else { for (let i = 0; i < sliceLen; i++) { out[0][i] = src[i] * gL; out[1][i] = src[i] * gR; } } } else { const L = audioBuffer.getChannelData(0).subarray(start, end); const R = audioBuffer.getChannelData(1).subarray(start, end); for (let i = 0; i < sliceLen; i++) { out[0][i] = L[i] * gL; out[1][i] = R[i] * gR; } } return _encodeWav16(out, sr); } function _makeSilenceWav(sampleRate = 44100, seconds = 0.1) { const len = Math.max(1, Math.floor(sampleRate * seconds)); const ch = [new Float32Array(len)]; return _encodeWav16(ch, sampleRate); } function _ensureSilenceInZip(zip) { if (__exportCache.silencePath) return __exportCache.silencePath; const path = "samples/__silence.wav"; zip.file(path, _makeSilenceWav(44100, 0.1)); __exportCache.silencePath = path; return path; } async function _ensureClipBuffer(clip) { if (clip?.buffer) return clip.buffer; // fallback: tenta buscar e decodificar do sourcePath if (!clip?.sourcePath) return null; const ctx = getAudioContext(); const res = await fetch(clip.sourcePath); if (!res.ok) return null; const arr = await res.arrayBuffer(); const decoded = await ctx.decodeAudioData(arr.slice(0)); clip.buffer = decoded; return decoded; } const __exportCache = { decodedBySrc: new Map(), // src -> AudioBuffer rawBySrc: new Map(), // src -> ArrayBuffer (wav original) sliceByKey: new Map(), // key -> { path } silencePath: null, }; function _resetExportCache() { __exportCache.decodedBySrc.clear(); __exportCache.rawBySrc.clear(); __exportCache.sliceByKey.clear(); __exportCache.silencePath = null; } function _sanitizeFileName(name) { return String(name || "clip") .normalize("NFKD") .replace(/[^\w\-\.]+/g, "_") .replace(/_+/g, "_") .replace(/^_+|_+$/g, "") .slice(0, 80) || "clip"; } async function _fetchArrayBufferCached(url) { if (__exportCache.rawBySrc.has(url)) return __exportCache.rawBySrc.get(url); const res = await fetch(url); if (!res.ok) throw new Error(`Falha ao buscar áudio: ${url}`); const arr = await res.arrayBuffer(); __exportCache.rawBySrc.set(url, arr); return arr; } async function _decodeAudioBufferCached(url) { if (__exportCache.decodedBySrc.has(url)) return __exportCache.decodedBySrc.get(url); const ctx = getAudioContext(); const arr = await _fetchArrayBufferCached(url); const decoded = await ctx.decodeAudioData(arr.slice(0)); __exportCache.decodedBySrc.set(url, decoded); return decoded; } function _isProbablyWavUrl(url) { return /\.wav(\?|#|$)/i.test(String(url || "")); } // -------------------- XML: SampleTracks + zip -------------------- async function applySampleTracksToXmlAndZip(xmlDoc, zip) { _resetExportCache(); const songTc = xmlDoc.querySelector("song > trackcontainer"); if (!songTc) return; // template se existir const existingSample = songTc.querySelector(':scope > track[type="2"]'); const template = existingSample ? existingSample.cloneNode(true) : null; // remove todas as sample tracks antigas Array.from(songTc.querySelectorAll(':scope > track[type="2"]')).forEach(n => n.remove()); const secondsPerStep = getSecondsPerStep(); const tracks = appState.audio?.tracks || []; const clips = appState.audio?.clips || []; // agrupa clips por lane for (let i = 0; i < tracks.length; i++) { const lane = tracks[i]; const laneName = lane?.name || `Áudio ${i + 1}`; const trackNode = template ? template.cloneNode(true) : (() => { const t = xmlDoc.createElement("track"); t.setAttribute("type", "2"); const st = xmlDoc.createElement("sampletrack"); st.setAttribute("vol", "100"); st.setAttribute("pan", "0"); t.appendChild(st); return t; })(); trackNode.setAttribute("type", "2"); trackNode.setAttribute("name", laneName); // sampletrack neutro (mix “baked” no wav) let st = trackNode.querySelector("sampletrack"); if (!st) { st = xmlDoc.createElement("sampletrack"); trackNode.insertBefore(st, trackNode.firstChild); } st.setAttribute("vol", "100"); st.setAttribute("pan", "0"); Array.from(trackNode.querySelectorAll(":scope > sampletco")).forEach(n => n.remove()); const laneClips = clips .filter(c => String(c.trackId) === String(lane.id)) .slice() .sort((a, b) => (a.startTimeInSeconds || 0) - (b.startTimeInSeconds || 0)); for (let cidx = 0; cidx < laneClips.length; cidx++) { const clip = laneClips[cidx]; const startSec = clip.startTimeInSeconds || 0; const durSec = clip.durationInSeconds || 0; if (durSec <= 0.0001) continue; const vol = clip.volume ?? 1; const pan = clip.pan ?? 0; const muted = !!clip.muted || vol === 0; const posTicks = Math.round((startSec / secondsPerStep) * 12); const lenTicks = Math.max(1, Math.round((durSec / secondsPerStep) * 12)); // ----------------------------------------- // ✅ OTIMIZAÇÃO 1: mutado -> usa silence.wav // ----------------------------------------- if (muted) { const silPath = _ensureSilenceInZip(zip); const tco = xmlDoc.createElement("sampletco"); tco.setAttribute("pos", String(Math.max(0, posTicks))); tco.setAttribute("len", String(lenTicks)); tco.setAttribute("muted", "0"); // já é silêncio tco.setAttribute("src", silPath); trackNode.appendChild(tco); continue; } // precisamos do sourcePath (arquivo original) const srcUrl = clip.sourcePath || clip.src || clip.url; if (!srcUrl) continue; const offsetSec = clip.offset || 0; // ✅ ADICIONE ISSO const needsOffset = Math.abs(offsetSec) > 1e-6; const needsBake = Math.abs((vol ?? 1) - 1) > 1e-6 || Math.abs((pan ?? 0) - 0) > 1e-6; // ----------------------------------------- // ✅ OTIMIZAÇÃO 2: sem offset + sem bake mix // -> usa arquivo original no zip (sem decode/encode) // ----------------------------------------- const isWav = _isProbablyWavUrl(srcUrl); if (!needsOffset && !needsBake && isWav) { const raw = await _fetchArrayBufferCached(srcUrl); const name = `${_sanitizeFileName(laneName)}__${_sanitizeFileName(clip.name || "clip")}` + `__raw__${_sanitizeFileName(clip.id || String(cidx))}.wav`; const zipPath = `samples/${name}`; if (!zip.file(zipPath)) zip.file(zipPath, raw); const tco = xmlDoc.createElement("sampletco"); tco.setAttribute("pos", String(Math.max(0, posTicks))); tco.setAttribute("len", String(lenTicks)); tco.setAttribute("muted", "0"); tco.setAttribute("src", zipPath); trackNode.appendChild(tco); continue; } // se NÃO for wav, cai no “bake” (decode+encode wav) abaixo ✅ // ----------------------------------------- // ✅ OTIMIZAÇÃO 3: cache de slice idêntico // ----------------------------------------- const sliceKey = `${srcUrl}__o=${offsetSec.toFixed(6)}__d=${durSec.toFixed(6)}__v=${Number(vol).toFixed(6)}__p=${Number(pan).toFixed(6)}`; const cached = __exportCache.sliceByKey.get(sliceKey); if (cached) { const tco = xmlDoc.createElement("sampletco"); tco.setAttribute("pos", String(Math.max(0, posTicks))); tco.setAttribute("len", String(lenTicks)); tco.setAttribute("muted", "0"); tco.setAttribute("src", cached.path); trackNode.appendChild(tco); continue; } // decode uma vez por srcUrl const buffer = await _decodeAudioBufferCached(srcUrl); const wav = _sliceToWavArrayBuffer(buffer, offsetSec, durSec, { volume: vol, pan }); if (!wav) continue; const sliceName = `${_sanitizeFileName(laneName)}__${_sanitizeFileName(clip.name || "clip")}` + `__slice__${_sanitizeFileName(clip.id || String(cidx))}.wav`; const zipPath = `samples/${sliceName}`; zip.file(zipPath, wav); __exportCache.sliceByKey.set(sliceKey, { path: zipPath }); const tco = xmlDoc.createElement("sampletco"); tco.setAttribute("pos", String(Math.max(0, posTicks))); tco.setAttribute("len", String(lenTicks)); tco.setAttribute("muted", "0"); tco.setAttribute("src", zipPath); trackNode.appendChild(tco); } songTc.appendChild(trackNode); } } export { generateXmlFromState as generateXmlFromStateExported };