// js/creations/file.js //-------------------------------------------------------------- // IMPORTS NECESSÁRIOS //-------------------------------------------------------------- import { appState, saveStateToSession, resetProjectState, loadStateFromSession, } from "./state.js"; import { loadAudioForTrack } from "./pattern/pattern_state.js"; import { renderAll, getSamplePathMap } from "./ui.js"; import { DEFAULT_PAN, DEFAULT_VOLUME, NOTE_LENGTH } from "./config.js"; import { initializeAudioContext, getMainGainNode } from "./audio.js"; import { DEFAULT_PROJECT_XML, getSecondsPerStep } from "./utils.js"; import * as Tone from "https://esm.sh/tone"; import { sendAction } from "./socket.js"; // ⚠️ vem do módulo de áudio (o mesmo que audio_ui usa) import { addAudioTrackLane, addAudioClipToTimeline, updateAudioClipProperties, } from "./audio/audio_state.js"; const TICKS_PER_STEP = 12; function safeId(prefix) { return (crypto?.randomUUID?.() || `${prefix}_${Date.now()}_${Math.floor(Math.random() * 1e6)}`); } function basename(path) { return String(path || "").split(/[\\/]/).pop(); } function resolveSamplePath(sampleName, pathMap) { // 1) tenta pelo manifest (melhor) if (sampleName && pathMap[sampleName]) return pathMap[sampleName]; // 2) fallback simples (se você tiver essa convenção) // ajuste se necessário if (sampleName) return `src/samples/${sampleName}`; return null; } // helper: LMMS ticks -> seconds (1 beat = 192 ticks) function ticksToSeconds(ticks, bpm) { seconds = (ticks / 12) * getSecondsPerStep() return seconds; } export async function parseBeatIndexJson(data) { resetProjectState(); initializeAudioContext(); // BPM const bpm = Number(data?.bpm || 140); const bpmInput = document.getElementById("bpm-input"); if (bpmInput) bpmInput.value = bpm; // (opcional) nome do projeto if (data?.original_title) { appState.global.currentBeatBasslineName = data.original_title; } const pathMap = getSamplePathMap(); // vem do samples-manifest :contentReference[oaicite:6]{index=6} const secondsPerStep = getSecondsPerStep(); const newPatternTracks = []; // 1) monta pattern.tracks (plugin/bassline etc) (data.tracks || []).forEach((t, idx) => { if (t.type === "sample") return; const id = t.id || safeId("ptrk"); // normaliza nome (teu JSON usa track_name / bassline_name) const name = t.track_name || t.bassline_name || t.instrument_name || t.instrumentName || `Track ${idx + 1}`; newPatternTracks.push({ ...t, id, name, }); }); appState.pattern.tracks = newPatternTracks; // 2) cria lanes/clips de áudio a partir dos sample-tracks const sampleTracks = (data?.tracks || []).filter((t) => t?.type === "sample"); // ✅ data for (let i = 0; i < sampleTracks.length; i++) { const t = sampleTracks[i]; const trackId = `sample_lane_${Date.now()}_${i}`; appState.audio.tracks.push({ id: trackId, name: t.track_name || "Áudio", }); const fileName = t.sample_name || basename(t.sample_info?.src) || `${basename(t.sample_info?.src || "")}`; // ✅ sem sample_info solto const resolvedUrl = pathMap[fileName] || `/src_mmpSearch/samples/samples/${data?.file || data?.original_title || ""}/${fileName}`; // ✅ data // ✅ tempo no padrão do seu playlist (12 ticks por step) const startSec = ((Number(t.sample_info?.pos || 0) / 12) * secondsPerStep); const durSec = ((Number(t.sample_info?.len || 0) / 12) * secondsPerStep); const clipId = `clip_${trackId}_0`; addAudioClipToTimeline(resolvedUrl, trackId, startSec, clipId, fileName); const vol = Number(t.sample_info?.vol ?? 100) / 100; const pan = Number(t.sample_info?.pan ?? 0) / 100; const muted = String(t.sample_info?.muted ?? "0") === "1"; updateAudioClipProperties(clipId, { durationInSeconds: durSec || 0, volume: muted ? 0 : vol, pan: isNaN(pan) ? 0 : pan, }); } renderAll(); } //-------------------------------------------------------------- // MANIPULAÇÃO DE ARQUIVOS //-------------------------------------------------------------- export function handleLocalProjectReset() { console.log("Recebido comando de reset. Limpando estado local..."); if (window.ROOM_NAME) { try { sessionStorage.removeItem(`temp_state_${window.ROOM_NAME}`); } catch (e) { console.error("Falha ao limpar estado da sessão:", e); } } resetProjectState(); const bpmInput = document.getElementById("bpm-input"); if (bpmInput) bpmInput.value = 140; ["bars-input", "compasso-a-input", "compasso-b-input"].forEach((id) => { const el = document.getElementById(id); if (el) el.value = id === "bars-input" ? 1 : 4; }); renderAll(); } export async function handleFileLoad(file) { let xmlContent = ""; try { const lower = file.name.toLowerCase(); if (lower.endsWith(".json")) { const json = JSON.parse(await file.text()); sendAction({ type: "LOAD_BEAT_INDEX", data: json }); return; } if (file.name.toLowerCase().endsWith(".mmpz")) { // eslint-disable-next-line no-undef const jszip = new JSZip(); const zip = await jszip.loadAsync(file); const projectFile = Object.keys(zip.files).find((name) => name.toLowerCase().endsWith(".mmp") ); if (!projectFile) throw new Error( "Não foi possível encontrar um arquivo .mmp dentro do .mmpz" ); xmlContent = await zip.files[projectFile].async("string"); } else { xmlContent = await file.text(); } sendAction({ type: "LOAD_PROJECT", xml: xmlContent }); } catch (error) { console.error("Erro ao carregar o projeto:", error); alert(`Erro ao carregar projeto: ${error.message}`); } } export async function loadBeatIndexFromServer(fileName) { const response = await fetch(`src_mmpSearch/index/${fileName}.json`); if (!response.ok) throw new Error("Não foi possível carregar beat index"); const data = await response.json(); sendAction({ type: "LOAD_BEAT_INDEX", data }); } export async function loadProjectFromServer(fileName) { try { const response = await fetch(`src_mmpSearch/mmp/${fileName}`); if (!response.ok) throw new Error(`Não foi possível carregar o arquivo ${fileName}`); const xmlContent = await response.text(); sendAction({ type: "LOAD_PROJECT", xml: xmlContent }); return true; } catch (error) { console.error("Erro ao carregar projeto do servidor:", error); alert(`Erro ao carregar projeto: ${error.message}`); return false; } } // ================================================================= // FUNÇÃO AUXILIAR: PARSE DE INSTRUMENTO ÚNICO // ================================================================= function parseInstrumentNode( trackNode, sortedBBTrackNameNodes, pathMap, parentBasslineId = null ) { const instrumentNode = trackNode.querySelector("instrument"); const instrumentTrackNode = trackNode.querySelector("instrumenttrack"); if (!instrumentNode || !instrumentTrackNode) return null; const trackName = trackNode.getAttribute("name"); const instrumentName = instrumentNode.getAttribute("name"); // Lógica de Patterns const allPatternsNodeList = trackNode.querySelectorAll("pattern"); const allPatternsArray = Array.from(allPatternsNodeList).sort((a, b) => { return ( (parseInt(a.getAttribute("pos"), 10) || 0) - (parseInt(b.getAttribute("pos"), 10) || 0) ); }); const patternsToCreate = sortedBBTrackNameNodes.length > 0 ? sortedBBTrackNameNodes : [{ getAttribute: () => "Pattern 1" }]; const patterns = patternsToCreate.map((bbTrack, index) => { const patternNode = allPatternsArray[index]; const bbTrackName = bbTrack.getAttribute("name") || `Pattern ${index + 1}`; if (!patternNode) { return { name: bbTrackName, steps: new Array(16).fill(false), notes: [], pos: 0, }; } const patternSteps = parseInt(patternNode.getAttribute("steps"), 10) || 16; const steps = new Array(patternSteps).fill(false); const notes = []; // === CORREÇÃO MATEMÁTICA === // No LMMS, 1 semínima (beat) = 192 ticks. // 1 semicolcheia (1/16 step) = 192 / 4 = 48 ticks. const ticksPerStep = 48; patternNode.querySelectorAll("note").forEach((noteNode) => { const pos = parseInt(noteNode.getAttribute("pos"), 10); notes.push({ pos: pos, len: parseInt(noteNode.getAttribute("len"), 10), key: parseInt(noteNode.getAttribute("key"), 10), vol: parseInt(noteNode.getAttribute("vol"), 10), pan: parseInt(noteNode.getAttribute("pan"), 10), }); // Calcula qual quadradinho acender const stepIndex = Math.round(pos / ticksPerStep); if (stepIndex < patternSteps) steps[stepIndex] = true; }); return { name: bbTrackName, steps: steps, notes: notes, pos: parseInt(patternNode.getAttribute("pos"), 10) || 0, }; }); // Lógica de Sample vs Plugin let finalSamplePath = null; let trackType = "plugin"; if (instrumentName === "audiofileprocessor") { trackType = "sampler"; const afpNode = instrumentNode.querySelector("audiofileprocessor"); const sampleSrc = afpNode ? afpNode.getAttribute("src") : null; if (sampleSrc) { const filename = sampleSrc.split("/").pop(); if (pathMap[filename]) { finalSamplePath = pathMap[filename]; } else { let cleanSrc = sampleSrc.startsWith("samples/") ? sampleSrc.substring("samples/".length) : sampleSrc; finalSamplePath = `src/samples/${cleanSrc}`; } } } const volFromFile = parseFloat(instrumentTrackNode.getAttribute("vol")); const panFromFile = parseFloat(instrumentTrackNode.getAttribute("pan")); return { id: Date.now() + Math.random(), name: trackName, type: trackType, samplePath: finalSamplePath, patterns: patterns, activePatternIndex: 0, volume: !isNaN(volFromFile) ? volFromFile / 100 : DEFAULT_VOLUME, pan: !isNaN(panFromFile) ? panFromFile / 100 : DEFAULT_PAN, instrumentName: instrumentName, instrumentXml: instrumentNode.innerHTML, parentBasslineId: parentBasslineId, // Guarda o ID do pai para filtragem na UI }; } // ================================================================= // 🔥 FUNÇÃO DE PARSING PRINCIPAL // ================================================================= export async function parseMmpContent(xmlString) { resetProjectState(); initializeAudioContext(); appState.global.justReset = xmlString === DEFAULT_PROJECT_XML; const audioContainer = document.getElementById("audio-track-container"); if (audioContainer) { audioContainer.innerHTML = ""; } const parser = new DOMParser(); const xmlDoc = parser.parseFromString(xmlString, "application/xml"); appState.global.originalXmlDoc = xmlDoc; // Configuração Global (BPM, Compasso) const head = xmlDoc.querySelector("head"); if (head) { const setVal = (id, attr, def) => { const el = document.getElementById(id); if (el) el.value = head.getAttribute(attr) || def; }; setVal("bpm-input", "bpm", 140); setVal("compasso-a-input", "timesig_numerator", 4); setVal("compasso-b-input", "timesig_denominator", 4); } const pathMap = getSamplePathMap(); // 1. Identifica colunas de beat/patterns (usado para mapear steps) // Normalmente ficam dentro do primeiro container de Bassline const bbTrackNodes = Array.from(xmlDoc.querySelectorAll('track[type="1"]')); let sortedBBTrackNameNodes = []; if (bbTrackNodes.length > 0) { sortedBBTrackNameNodes = Array.from( bbTrackNodes[0].querySelectorAll("bbtco") ).sort((a, b) => { return ( (parseInt(a.getAttribute("pos"), 10) || 0) - (parseInt(b.getAttribute("pos"), 10) || 0) ); }); } // ------------------------------------------------------------- // 2. EXTRAÇÃO DE INSTRUMENTOS DA RAIZ (SONG EDITOR) // ------------------------------------------------------------- // Pega apenas os instrumentos que estão soltos no Song Editor (não dentro de BBTracks) const songInstrumentNodes = Array.from( xmlDoc.querySelectorAll('song > trackcontainer > track[type="0"]') ); const songTracks = songInstrumentNodes .map((node) => parseInstrumentNode(node, sortedBBTrackNameNodes, pathMap, null) ) // null = Sem Pai .filter((t) => t !== null); // ------------------------------------------------------------- // 3. EXTRAÇÃO DAS TRILHAS DE BASSLINE (BBTracks) E RACK COMPARTILHADO // ------------------------------------------------------------- const bbRackNode = bbTrackNodes.find((n) => n.querySelector('bbtrack > trackcontainer > track[type="0"]') ); // ID do rack compartilhado const rackId = bbRackNode ? `bbRack_${Date.now()}_${Math.random().toString(36).slice(2)}` : null; // 3.2) Parseia instrumentos UMA vez (do rack) let bbRackInstruments = []; if (bbRackNode && rackId) { const internalInstrumentNodes = Array.from( bbRackNode.querySelectorAll('bbtrack > trackcontainer > track[type="0"]') ); bbRackInstruments = internalInstrumentNodes .map((node) => parseInstrumentNode(node, sortedBBTrackNameNodes, pathMap, rackId) ) .filter((t) => t !== null); } // 3.3) Cria os containers “Caixa/Kick/…” como patterns (sem instrumentos próprios) const basslineContainers = bbTrackNodes .map((trackNode, idx) => { const trackName = trackNode.getAttribute("name") || "Beat/Bassline"; const playlistClips = Array.from( trackNode.querySelectorAll(":scope > bbtco") ).map((bbtco) => ({ pos: parseInt(bbtco.getAttribute("pos"), 10) || 0, len: parseInt(bbtco.getAttribute("len"), 10) || 192, name: trackName, })); if (playlistClips.length === 0) return null; return { id: `bassline_${Date.now()}_${Math.random().toString(36).slice(2)}`, name: trackName, type: "bassline", playlist_clips: playlistClips, // 🔥 importante: qual “coluna/pattern” este BBTrack representa patternIndex: idx, // 🔥 importante: de onde vêm os instrumentos instrumentSourceId: rackId, volume: 1, pan: 0, patterns: [], isMuted: trackNode.getAttribute("muted") === "1", }; }) .filter(Boolean); // ------------------------------------------------------------- // 4. COMBINAÇÃO E FINALIZAÇÃO // ------------------------------------------------------------- // A lista final plana contém: // 1) Instrumentos do Song Editor // 2) Instrumentos do rack compartilhado do Beat/Bassline Editor // 3) Containers (Caixa/Kick/...) que só referenciam o rack const newTracks = [ ...songTracks, ...bbRackInstruments, ...basslineContainers, ]; // Inicializa áudio apenas para instrumentos reais newTracks.forEach((track) => { if (track.type !== "bassline") { track.volumeNode = new Tone.Volume(Tone.gainToDb(track.volume)); track.pannerNode = new Tone.Panner(track.pan); track.volumeNode.connect(track.pannerNode); track.pannerNode.connect(getMainGainNode()); } }); // Configura tamanho da timeline let isFirstTrackWithNotes = true; newTracks.forEach((track) => { if (track.type !== "bassline" && isFirstTrackWithNotes) { const activePattern = track.patterns[track.activePatternIndex || 0]; if ( activePattern && activePattern.steps && activePattern.steps.length > 0 ) { const bars = Math.ceil(activePattern.steps.length / 16); const barsInput = document.getElementById("bars-input"); if (barsInput) barsInput.value = bars > 0 ? bars : 1; isFirstTrackWithNotes = false; } } }); // Carrega samples/plugins try { const promises = newTracks .filter((t) => t.type !== "bassline") .map((track) => loadAudioForTrack(track)); await Promise.all(promises); } catch (error) { console.error("Erro ao carregar áudios:", error); } // Atualiza estado global appState.pattern.tracks = newTracks; appState.pattern.focusedBasslineId = null; // Reseta o foco const firstInst = newTracks.find((t) => t.type !== "bassline"); appState.pattern.activeTrackId = firstInst ? firstInst.id : null; appState.pattern.activePatternIndex = 0; loadStateFromSession(); await Promise.resolve(); renderAll(); } // -------------------------------------------------------------- // GERAÇÃO DE ARQUIVO (EXPORT) // -------------------------------------------------------------- export function generateMmpFile() { if (appState.global.originalXmlDoc) { modifyAndSaveExistingMmp(); } else { generateNewMmp(); } } function generateXmlFromState() { if (!appState.global.originalXmlDoc) { const parser = new DOMParser(); appState.global.originalXmlDoc = parser.parseFromString( DEFAULT_PROJECT_XML, "application/xml" ); } const xmlDoc = appState.global.originalXmlDoc.cloneNode(true); const head = xmlDoc.querySelector("head"); if (head) { head.setAttribute("bpm", document.getElementById("bpm-input").value || 140); head.setAttribute( "num_bars", document.getElementById("bars-input").value || 1 ); head.setAttribute( "timesig_numerator", document.getElementById("compasso-a-input").value || 4 ); head.setAttribute( "timesig_denominator", document.getElementById("compasso-b-input").value || 4 ); } // Exportação Simplificada: Coloca todos os instrumentos reais no primeiro container const bbTrackContainer = xmlDoc.querySelector( 'track[type="1"] > bbtrack > trackcontainer' ); if (bbTrackContainer) { bbTrackContainer .querySelectorAll('track[type="0"]') .forEach((node) => node.remove()); const tracksXml = appState.pattern.tracks .filter((t) => t.type !== "bassline") .map((track) => createTrackXml(track)) .join(""); const tempDoc = new DOMParser().parseFromString( `${tracksXml}`, "application/xml" ); Array.from(tempDoc.documentElement.children).forEach((newTrackNode) => { bbTrackContainer.appendChild(newTrackNode); }); } const serializer = new XMLSerializer(); return serializer.serializeToString(xmlDoc); } export function syncPatternStateToServer() { if (!window.ROOM_NAME) return; const currentXml = generateXmlFromState(); sendAction({ type: "SYNC_PATTERN_STATE", xml: currentXml }); saveStateToSession(); } function createTrackXml(track) { if (!track.patterns || track.patterns.length === 0) return ""; const ticksPerStep = 48; // Sincronizado com o parsing const lmmsVolume = Math.round(track.volume * 100); const lmmsPan = Math.round(track.pan * 100); const instrName = track.instrumentName || "kicker"; const instrXml = track.instrumentXml || ``; const patternsXml = track.patterns .map((pattern) => { let patternNotesXml = ""; if ( track.type === "plugin" && pattern.notes && pattern.notes.length > 0 ) { patternNotesXml = pattern.notes .map( (note) => `` ) .join("\n "); } else if (pattern.steps) { patternNotesXml = pattern.steps .map((isActive, index) => { if (isActive) { const notePos = Math.round(index * ticksPerStep); return ``; } return ""; }) .join("\n "); } return ` ${patternNotesXml} `; }) .join("\n "); return ` ${instrXml} ${patternsXml} `; } function modifyAndSaveExistingMmp() { const content = generateXmlFromState(); downloadFile(content, "projeto_editado.mmp"); } function generateNewMmp() { const content = generateXmlFromState(); downloadFile(content, "novo_projeto.mmp"); } function downloadFile(content, fileName) { const blob = new Blob([content], { type: "application/xml;charset=utf-8" }); const url = URL.createObjectURL(blob); const a = document.createElement("a"); a.href = url; a.download = fileName; document.body.appendChild(a); a.click(); document.body.removeChild(a); URL.revokeObjectURL(url); } export { generateXmlFromState as generateXmlFromStateExported };