mmpSearch/assets/js/creations/file.js

802 lines
26 KiB
JavaScript
Executable File

// js/creations/file.js
//--------------------------------------------------------------
// IMPORTS NECESSÁRIOS
//--------------------------------------------------------------
import {
appState,
saveStateToSession,
resetProjectState,
loadStateFromSession,
} from "./state.js";
import { loadAudioForTrack } from "./pattern/pattern_state.js";
import { renderAll, getSamplePathMap } from "./ui.js";
import { DEFAULT_PAN, DEFAULT_VOLUME, NOTE_LENGTH } from "./config.js";
import { initializeAudioContext, getMainGainNode } from "./audio.js";
import { DEFAULT_PROJECT_XML, getSecondsPerStep, SAMPLE_SRC } from "./utils.js";
import * as Tone from "https://esm.sh/tone";
import { sendAction } from "./socket.js";
// ⚠️ vem do módulo de áudio (o mesmo que audio_ui usa)
import {
addAudioTrackLane,
addAudioClipToTimeline,
updateAudioClipProperties,
} from "./audio/audio_state.js";
const TICKS_PER_STEP = 12;
function safeId(prefix) {
return (crypto?.randomUUID?.() || `${prefix}_${Date.now()}_${Math.floor(Math.random() * 1e6)}`);
}
function basename(path) {
return String(path || "").split(/[\\/]/).pop();
}
function resolveSamplePath(sampleName, pathMap) {
// 1) tenta pelo manifest (melhor)
if (sampleName && pathMap[sampleName]) return pathMap[sampleName];
// 2) fallback simples (se você tiver essa convenção)
// ajuste se necessário
if (sampleName) return `${SAMPLE_SRC}/${sampleName}`;
return null;
}
function ticksToSeconds(ticks) {
const seconds = (ticks / TICKS_PER_STEP) * getSecondsPerStep();
return seconds;
}
function dirname(path) {
const parts = String(path || "").split(/[\\/]/);
parts.pop();
return parts.join("/");
}
function lastFolder(path) {
const dir = dirname(path);
return String(dir).split(/[\\/]/).pop();
}
export async function parseBeatIndexJson(data) {
resetProjectState();
initializeAudioContext();
// BPM
const bpm = Number(data?.bpm || 140);
const bpmInput = document.getElementById("bpm-input");
if (bpmInput) bpmInput.value = bpm;
// (opcional) nome do projeto
if (data?.original_title) {
appState.global.currentBeatBasslineName = data.original_title;
}
const pathMap = getSamplePathMap(); // vem do samples-manifest :contentReference[oaicite:6]{index=6}
const secondsPerStep = getSecondsPerStep();
const newPatternTracks = [];
// 1) monta pattern.tracks (plugin/bassline etc)
(data.tracks || []).forEach((t, idx) => {
if (t.type === "sample") return;
const id = t.id || safeId("ptrk");
// normaliza nome (teu JSON usa track_name / bassline_name)
const name =
t.track_name ||
t.bassline_name ||
t.instrument_name ||
t.instrumentName ||
`Track ${idx + 1}`;
newPatternTracks.push({
...t,
id,
name,
});
});
appState.pattern.tracks = newPatternTracks;
// 2) cria lanes/clips de áudio a partir dos sample-tracks
const sampleTracks = (data?.tracks || []).filter((t) => t?.type === "sample"); // ✅ data
for (let i = 0; i < sampleTracks.length; i++) {
const t = sampleTracks[i];
const trackId = `sample_lane_${Date.now()}_${i}`;
appState.audio.tracks.push({
id: trackId,
name: t.track_name || "Áudio",
});
const fileName =
t.sample_name ||
basename(t.sample_info?.src) ||
`${basename(t.sample_info?.src || "")}`; // ✅ sem sample_info solto
const encodedFileName = encodeURIComponent(fileName);
const projectName = encodeURIComponent(data?.file || data?.original_title || "");
const base = `${SAMPLE_SRC}/${projectName}`; // ✅ SAMPLE_SRC/{project}
let resolvedUrl = pathMap[fileName];
if (!resolvedUrl || !resolvedUrl.includes(`/${projectName}/`)) {
resolvedUrl = `${base}/${encodedFileName}`;
}
// ✅ tempo no padrão do seu playlist (12 ticks por step)
const startSec = ((Number(t.sample_info?.pos || 0) / 12) * secondsPerStep);
const durSec = ((Number(t.sample_info?.len || 0) / 12) * secondsPerStep);
const clipId = `clip_${trackId}_0`;
addAudioClipToTimeline(resolvedUrl, trackId, startSec, clipId, fileName);
const vol = Number(t.sample_info?.vol ?? 100) / 100;
const pan = Number(t.sample_info?.pan ?? 0) / 100;
const muted = String(t.sample_info?.muted ?? "0") === "1";
updateAudioClipProperties(clipId, {
durationInSeconds: durSec || 0,
volume: muted ? 0 : vol,
pan: isNaN(pan) ? 0 : pan,
});
}
renderAll();
}
//--------------------------------------------------------------
// MANIPULAÇÃO DE ARQUIVOS
//--------------------------------------------------------------
export function handleLocalProjectReset() {
console.log("Recebido comando de reset. Limpando estado local...");
if (window.ROOM_NAME) {
try {
sessionStorage.removeItem(`temp_state_${window.ROOM_NAME}`);
} catch (e) {
console.error("Falha ao limpar estado da sessão:", e);
}
}
resetProjectState();
const bpmInput = document.getElementById("bpm-input");
if (bpmInput) bpmInput.value = 140;
["bars-input", "compasso-a-input", "compasso-b-input"].forEach((id) => {
const el = document.getElementById(id);
if (el) el.value = id === "bars-input" ? 1 : 4;
});
renderAll();
}
export async function handleFileLoad(file) {
let xmlContent = "";
try {
const lower = file.name.toLowerCase();
if (lower.endsWith(".json")) {
const json = JSON.parse(await file.text());
sendAction({ type: "LOAD_BEAT_INDEX", data: json });
return;
}
if (file.name.toLowerCase().endsWith(".mmpz")) {
// eslint-disable-next-line no-undef
const jszip = new JSZip();
const zip = await jszip.loadAsync(file);
const projectFile = Object.keys(zip.files).find((name) =>
name.toLowerCase().endsWith(".mmp")
);
if (!projectFile)
throw new Error(
"Não foi possível encontrar um arquivo .mmp dentro do .mmpz"
);
xmlContent = await zip.files[projectFile].async("string");
} else {
xmlContent = await file.text();
}
sendAction({ type: "LOAD_PROJECT", xml: xmlContent });
} catch (error) {
console.error("Erro ao carregar o projeto:", error);
alert(`Erro ao carregar projeto: ${error.message}`);
}
}
export async function loadBeatIndexFromServer(fileName) {
const response = await fetch(`src_mmpSearch/metadata/${fileName}.json`);
if (!response.ok) throw new Error("Não foi possível carregar beat index");
const data = await response.json();
sendAction({ type: "LOAD_BEAT_INDEX", data });
}
export async function loadProjectFromServer(fileName) {
try {
const response = await fetch(`src_mmpSearch/mmp/${fileName}`);
if (!response.ok)
throw new Error(`Não foi possível carregar o arquivo ${fileName}`);
const xmlContent = await response.text();
sendAction({ type: "LOAD_PROJECT", xml: xmlContent });
return true;
} catch (error) {
console.error("Erro ao carregar projeto do servidor:", error);
alert(`Erro ao carregar projeto: ${error.message}`);
return false;
}
}
// =================================================================
// FUNÇÃO AUXILIAR: PARSE DE INSTRUMENTO ÚNICO
// =================================================================
function parseInstrumentNode(
trackNode,
sortedBBTrackNameNodes, // Esse argumento agora será ignorado para evitar o bug
pathMap,
parentBasslineId = null
) {
const instrumentNode = trackNode.querySelector("instrument");
const instrumentTrackNode = trackNode.querySelector("instrumenttrack");
if (!instrumentNode || !instrumentTrackNode) return null;
const trackName = trackNode.getAttribute("name");
const instrumentName = instrumentNode.getAttribute("name");
// Lógica de Patterns:
// CORREÇÃO: Iteramos diretamente sobre os patterns encontrados no XML do instrumento.
// Não tentamos mais mapear 1-para-1 com os clipes da timeline, pois instrumentos de
// Beat/Bassline reutilizam o mesmo pattern várias vezes.
const allPatternsNodeList = trackNode.querySelectorAll("pattern");
// Ordena os patterns pela posição (importante para Song Editor, neutro para BB Editor)
const allPatternsArray = Array.from(allPatternsNodeList).sort((a, b) => {
return (
(parseInt(a.getAttribute("pos"), 10) || 0) -
(parseInt(b.getAttribute("pos"), 10) || 0)
);
});
// Se não houver patterns no XML, criamos um vazio para não quebrar a UI
let patterns = [];
if (allPatternsArray.length > 0) {
patterns = allPatternsArray.map((patternNode, index) => {
// Tenta pegar o nome do atributo, ou gera um genérico
const patternName = patternNode.getAttribute("name") || `Pattern ${index}`;
const patternSteps = parseInt(patternNode.getAttribute("steps"), 10) || 16;
const steps = new Array(patternSteps).fill(false);
const notes = [];
// No XML base: 1 compasso (bar) = 192 ticks em 4/4,
// então 1 beat = 48 ticks e 1 step (1/16) = 12 ticks.
const ticksPerStep = 12;
patternNode.querySelectorAll("note").forEach((noteNode) => {
const pos = parseInt(noteNode.getAttribute("pos"), 10) || 0;
const rawLen = parseInt(noteNode.getAttribute("len"), 10) || 0;
// ✅ LMMS costuma salvar one-shots com len negativo (ex: -192).
// Na nossa DAW, tratamos isso como 1 step (1/16) = 12 ticks.
const len = rawLen < 0 ? TICKS_PER_STEP : rawLen;
notes.push({
pos,
len,
key: parseInt(noteNode.getAttribute("key"), 10),
vol: parseInt(noteNode.getAttribute("vol"), 10),
pan: parseInt(noteNode.getAttribute("pan"), 10),
});
// stepIndex também deve usar 12 (você já corrigiu isso antes)
const stepIndex = Math.floor(pos / TICKS_PER_STEP);
if (stepIndex < patternSteps) steps[stepIndex] = true;
});
return {
name: patternName,
steps: steps,
notes: notes,
pos: parseInt(patternNode.getAttribute("pos"), 10) || 0,
};
});
} else {
// Fallback: Nenhum pattern encontrado no XML, cria um padrão vazio
patterns.push({
name: "Pattern 0",
steps: new Array(16).fill(false),
notes: [],
pos: 0
});
}
// Lógica de Sample vs Plugin
let finalSamplePath = null;
let trackType = "plugin";
if (instrumentName === "audiofileprocessor") {
trackType = "sampler";
const afpNode = instrumentNode.querySelector("audiofileprocessor");
const sampleSrc = afpNode ? afpNode.getAttribute("src") : null;
if (sampleSrc) {
const filename = sampleSrc.split("/").pop();
if (pathMap[filename]) {
finalSamplePath = pathMap[filename];
} else {
let cleanSrc = sampleSrc.startsWith("samples/")
? sampleSrc.substring("samples/".length)
: sampleSrc;
finalSamplePath = `${SAMPLE_SRC}/${cleanSrc}`;
}
}
}
const volFromFile = parseFloat(instrumentTrackNode.getAttribute("vol"));
const panFromFile = parseFloat(instrumentTrackNode.getAttribute("pan"));
const baseNoteFromFile = parseInt(instrumentTrackNode.getAttribute("basenote"), 10);
const pitchFromFile = parseFloat(instrumentTrackNode.getAttribute("pitch"));
const baseNote = !isNaN(baseNoteFromFile) ? baseNoteFromFile : 60; // fallback C4 (MIDI 60)
const pitch = !isNaN(pitchFromFile) ? pitchFromFile : 0;
return {
id: Date.now() + Math.random(),
name: trackName,
type: trackType,
samplePath: finalSamplePath,
patterns,
activePatternIndex: 0, // ✅ evita index undefined
baseNote, // ✅ importante p/ sample pitch
pitch, // (opcional p/ transposição depois)
volume: !isNaN(volFromFile) ? volFromFile / 100 : DEFAULT_VOLUME,
pan: !isNaN(panFromFile) ? panFromFile / 100 : DEFAULT_PAN,
instrumentName,
instrumentXml: instrumentNode.innerHTML,
parentBasslineId,
};
}
// =================================================================
// 🔥 FUNÇÃO DE PARSING PRINCIPAL
// =================================================================
export async function parseMmpContent(xmlString) {
resetProjectState();
initializeAudioContext();
appState.global.justReset = xmlString === DEFAULT_PROJECT_XML;
const audioContainer = document.getElementById("audio-track-container");
if (audioContainer) {
audioContainer.innerHTML = "";
}
const parser = new DOMParser();
const xmlDoc = parser.parseFromString(xmlString, "application/xml");
appState.global.originalXmlDoc = xmlDoc;
// Configuração Global (BPM, Compasso)
const head = xmlDoc.querySelector("head");
if (head) {
const setVal = (id, attr, def) => {
const el = document.getElementById(id);
if (el) el.value = head.getAttribute(attr) || def;
};
setVal("bpm-input", "bpm", 140);
setVal("compasso-a-input", "timesig_numerator", 4);
setVal("compasso-b-input", "timesig_denominator", 4);
}
const pathMap = getSamplePathMap();
// -------------------------------------------------------------
// 2.1) EXTRAÇÃO DE SAMPLETRACKS DO SONG EDITOR (track[type="2"])
// -------------------------------------------------------------
const secondsPerStep = getSecondsPerStep(); // já é usado no JSON parser :contentReference[oaicite:3]{index=3}
const sampleTrackNodes = Array.from(
xmlDoc.querySelectorAll('song > trackcontainer > track[type="2"]')
);
sampleTrackNodes.forEach((node, idx) => {
const trackName = node.getAttribute("name") || `Áudio ${idx + 1}`;
// cria lane de áudio
const trackId = `sample_lane_${Date.now()}_${idx}`;
appState.audio.tracks.push({ id: trackId, name: trackName });
// pan/vol defaults do <sampletrack>
const st = node.querySelector("sampletrack");
const laneVol = st ? Number(st.getAttribute("vol") ?? 100) / 100 : 1;
const lanePan = st ? Number(st.getAttribute("pan") ?? 0) / 100 : 0;
// cada <sampletco> é um clip
const clips = Array.from(node.querySelectorAll(":scope > sampletco"));
clips.forEach((c, cidx) => {
// resolve caminho via manifest; fallback para /samples
const src = c.getAttribute("src") || "";
const fileName = src.split(/[\\/]/).pop(); // "sample.wav"
const projectFolder = encodeURIComponent(lastFolder(src) || ""); // "project_name"
const encodedFile = encodeURIComponent(fileName);
// tenta pelo manifest primeiro
let resolvedUrl = fileName ? pathMap[fileName] : null;
// se o manifest não tiver ou não vier com /{project}/, monta no padrão do servidor
if (!resolvedUrl || (projectFolder && !resolvedUrl.includes(`/${projectFolder}/`))) {
resolvedUrl = projectFolder
? `${SAMPLE_SRC}/samples/${projectFolder}/${encodedFile}`
: null;
}
if (!resolvedUrl) return;
const posTicks = Number(c.getAttribute("pos") || 0);
const lenTicks = Number(c.getAttribute("len") || 0);
const startSec = ((posTicks / 12) * secondsPerStep);
const durSec = ((lenTicks / 12) * secondsPerStep);
const clipId = `clip_${trackId}_${cidx}`;
addAudioClipToTimeline(resolvedUrl, trackId, startSec, clipId, fileName);
const muted = String(c.getAttribute("muted") ?? "0") === "1";
updateAudioClipProperties(clipId, {
durationInSeconds: durSec || 0,
volume: muted ? 0 : laneVol,
pan: isNaN(lanePan) ? 0 : lanePan,
});
});
});
// 1. Identifica colunas de beat/patterns (usado para mapear steps)
// Normalmente ficam dentro do primeiro container de Bassline
const bbTrackNodes = Array.from(xmlDoc.querySelectorAll('track[type="1"]'));
// Cada BBTrack do Song Editor representa uma “coluna/pattern” (pos 0, 192, 384...)
// então usamos o próprio array de BBTracks para nomear as patterns:
const sortedBBTrackNameNodes = bbTrackNodes;
// -------------------------------------------------------------
// 2. EXTRAÇÃO DE INSTRUMENTOS DA RAIZ (SONG EDITOR)
// -------------------------------------------------------------
// Pega apenas os instrumentos que estão soltos no Song Editor (não dentro de BBTracks)
const songInstrumentNodes = Array.from(
xmlDoc.querySelectorAll('song > trackcontainer > track[type="0"]')
);
const songTracks = songInstrumentNodes
.map((node) =>
parseInstrumentNode(node, sortedBBTrackNameNodes, pathMap, null)
) // null = Sem Pai
.filter((t) => t !== null);
// -------------------------------------------------------------
// 3. EXTRAÇÃO DAS TRILHAS DE BASSLINE (BBTracks) E RACK COMPARTILHADO
// -------------------------------------------------------------
const bbRackNode = bbTrackNodes.find((n) =>
n.querySelector('bbtrack > trackcontainer > track[type="0"]')
);
// ID do rack compartilhado
const rackId = bbRackNode
? `bbRack_${Date.now()}_${Math.random().toString(36).slice(2)}`
: null;
// 3.2) Parseia instrumentos UMA vez (do rack)
let bbRackInstruments = [];
if (bbRackNode && rackId) {
const internalInstrumentNodes = Array.from(
bbRackNode.querySelectorAll('bbtrack > trackcontainer > track[type="0"]')
);
bbRackInstruments = internalInstrumentNodes
.map((node) =>
parseInstrumentNode(node, sortedBBTrackNameNodes, pathMap, rackId)
)
.filter((t) => t !== null);
}
// 3.3) Cria os containers “Caixa/Kick/…” como patterns (sem instrumentos próprios)
const basslineContainers = bbTrackNodes
.map((trackNode, idx) => {
const trackName = trackNode.getAttribute("name") || "Beat/Bassline";
const playlistClips = Array.from(
trackNode.querySelectorAll(":scope > bbtco")
).map((bbtco) => ({
pos: parseInt(bbtco.getAttribute("pos"), 10) || 0,
len: parseInt(bbtco.getAttribute("len"), 10) || 192,
name: trackName,
}));
if (playlistClips.length === 0) return null;
return {
id: `bassline_${Date.now()}_${Math.random().toString(36).slice(2)}`,
name: trackName,
type: "bassline",
playlist_clips: playlistClips,
// 🔥 importante: qual “coluna/pattern” este BBTrack representa
patternIndex: idx,
// 🔥 importante: de onde vêm os instrumentos
instrumentSourceId: rackId,
volume: 1,
pan: 0,
patterns: [],
isMuted: trackNode.getAttribute("muted") === "1",
};
})
.filter(Boolean);
// -------------------------------------------------------------
// 4. COMBINAÇÃO E FINALIZAÇÃO
// -------------------------------------------------------------
// A lista final plana contém:
// 1) Instrumentos do Song Editor
// 2) Instrumentos do rack compartilhado do Beat/Bassline Editor
// 3) Containers (Caixa/Kick/...) que só referenciam o rack
const newTracks = [
...songTracks,
...bbRackInstruments,
...basslineContainers,
];
// Inicializa áudio apenas para instrumentos reais
newTracks.forEach((track) => {
if (track.type !== "bassline") {
track.volumeNode = new Tone.Volume(Tone.gainToDb(track.volume));
track.pannerNode = new Tone.Panner(track.pan);
track.volumeNode.connect(track.pannerNode);
track.pannerNode.connect(getMainGainNode());
}
});
// Configura tamanho da timeline
let isFirstTrackWithNotes = true;
newTracks.forEach((track) => {
if (track.type === "bassline" || !isFirstTrackWithNotes) return;
const activePattern = track.patterns?.[track.activePatternIndex || 0];
if (!activePattern) return;
let bars = 1;
// ✅ Se tiver piano roll, calcula pelo final da última nota
if (activePattern.notes && activePattern.notes.length > 0) {
const TICKS_PER_BAR = 192; // LMMS 4/4
const TICKS_PER_STEP = 12; // 1/16
let maxEndTick = 0;
activePattern.notes.forEach((n) => {
const pos = parseInt(n.pos, 10) || 0;
const rawLen = parseInt(n.len, 10) || 0;
const len = rawLen < 0 ? TICKS_PER_STEP : rawLen; // fallback
maxEndTick = Math.max(maxEndTick, pos + Math.max(len, TICKS_PER_STEP));
});
bars = Math.max(1, Math.ceil(maxEndTick / TICKS_PER_BAR));
}
// ✅ Senão, cai no step sequencer normal
else if (activePattern.steps && activePattern.steps.length > 0) {
bars = Math.max(1, Math.ceil(activePattern.steps.length / 16));
}
const barsInput = document.getElementById("bars-input");
if (barsInput) barsInput.value = String(bars);
isFirstTrackWithNotes = false;
});
// Carrega samples/plugins
try {
const promises = newTracks
.filter((t) => t.type !== "bassline")
.map((track) => loadAudioForTrack(track));
await Promise.all(promises);
} catch (error) {
console.error("Erro ao carregar áudios:", error);
}
const patternNames = basslineContainers.map(b => b.name); // ["Caixa","Kick","Batida/Linha de Baixo 2",...]
newTracks.forEach(t => {
if (t.type === "bassline") return;
(t.patterns || []).forEach((p, idx) => {
if (patternNames[idx]) p.name = patternNames[idx];
});
});
// Atualiza estado global
appState.pattern.tracks = newTracks;
appState.pattern.focusedBasslineId = null; // Reseta o foco
const firstInst = newTracks.find((t) => t.type !== "bassline");
appState.pattern.activeTrackId = firstInst ? firstInst.id : null;
appState.pattern.activePatternIndex = 0;
loadStateFromSession();
await Promise.resolve();
renderAll();
}
// --------------------------------------------------------------
// GERAÇÃO DE ARQUIVO (EXPORT)
// --------------------------------------------------------------
export function generateMmpFile() {
if (appState.global.originalXmlDoc) {
modifyAndSaveExistingMmp();
} else {
generateNewMmp();
}
}
function generateXmlFromState() {
if (!appState.global.originalXmlDoc) {
const parser = new DOMParser();
appState.global.originalXmlDoc = parser.parseFromString(
DEFAULT_PROJECT_XML,
"application/xml"
);
}
const xmlDoc = appState.global.originalXmlDoc.cloneNode(true);
const head = xmlDoc.querySelector("head");
if (head) {
head.setAttribute("bpm", document.getElementById("bpm-input").value || 140);
head.setAttribute(
"num_bars",
document.getElementById("bars-input").value || 1
);
head.setAttribute(
"timesig_numerator",
document.getElementById("compasso-a-input").value || 4
);
head.setAttribute(
"timesig_denominator",
document.getElementById("compasso-b-input").value || 4
);
}
// Exportação Simplificada: Coloca todos os instrumentos reais no primeiro container
const bbTrackContainer = xmlDoc.querySelector(
'track[type="1"] > bbtrack > trackcontainer'
);
if (bbTrackContainer) {
bbTrackContainer
.querySelectorAll('track[type="0"]')
.forEach((node) => node.remove());
const tracksXml = appState.pattern.tracks
.filter((t) => t.type !== "bassline")
.map((track) => createTrackXml(track))
.join("");
const tempDoc = new DOMParser().parseFromString(
`<root>${tracksXml}</root>`,
"application/xml"
);
Array.from(tempDoc.documentElement.children).forEach((newTrackNode) => {
bbTrackContainer.appendChild(newTrackNode);
});
}
const serializer = new XMLSerializer();
return serializer.serializeToString(xmlDoc);
}
export function syncPatternStateToServer() {
if (!window.ROOM_NAME) return;
const currentXml = generateXmlFromState();
sendAction({ type: "SYNC_PATTERN_STATE", xml: currentXml });
saveStateToSession();
}
function createTrackXml(track) {
if (!track.patterns || track.patterns.length === 0) return "";
const ticksPerStep = 12; // 1 step (1/16) no LMMS = 12 ticks (com bar=192 em 4/4)
const lmmsVolume = Math.round(track.volume * 100);
const lmmsPan = Math.round(track.pan * 100);
const instrName = track.instrumentName || "kicker";
const instrXml =
track.instrumentXml ||
`<kicker><env amt="0" attack="0.01" hold="0.1" decay="0.1" release="0.1" sustain="0.5" sync_mode="0"/></kicker>`;
const patternsXml = track.patterns
.map((pattern) => {
let patternNotesXml = "";
if (
track.type === "plugin" &&
pattern.notes &&
pattern.notes.length > 0
) {
patternNotesXml = pattern.notes
.map(
(note) =>
`<note vol="${note.vol}" len="${note.len}" pos="${note.pos}" pan="${note.pan}" key="${note.key}"/>`
)
.join("\n ");
} else if (pattern.steps) {
patternNotesXml = pattern.steps
.map((isActive, index) => {
if (isActive) {
const notePos = Math.round(index * ticksPerStep);
return `<note vol="100" len="${NOTE_LENGTH}" pos="${notePos}" pan="0" key="57"/>`;
}
return "";
})
.join("\n ");
}
return `<pattern type="0" pos="${pattern.pos}" muted="0" steps="${
pattern.steps ? pattern.steps.length : 16
}" name="${pattern.name}">
${patternNotesXml}
</pattern>`;
})
.join("\n ");
return `
<track type="0" solo="0" muted="0" name="${track.name}">
<instrumenttrack vol="${lmmsVolume}" pitch="0" fxch="0" pitchrange="1" basenote="57" usemasterpitch="1" pan="${lmmsPan}">
<instrument name="${instrName}">
${instrXml}
</instrument>
<fxchain enabled="0" numofeffects="0"/>
</instrumenttrack>
${patternsXml}
</track>`;
}
function modifyAndSaveExistingMmp() {
const content = generateXmlFromState();
downloadFile(content, "projeto_editado.mmp");
}
function generateNewMmp() {
const content = generateXmlFromState();
downloadFile(content, "novo_projeto.mmp");
}
function downloadFile(content, fileName) {
const blob = new Blob([content], { type: "application/xml;charset=utf-8" });
const url = URL.createObjectURL(blob);
const a = document.createElement("a");
a.href = url;
a.download = fileName;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
URL.revokeObjectURL(url);
}
export { generateXmlFromState as generateXmlFromStateExported };