diff --git a/assets/js/creations/audio.js b/assets/js/creations/audio.js
index 42850ea..46edd0d 100644
--- a/assets/js/creations/audio.js
+++ b/assets/js/creations/audio.js
@@ -22,7 +22,6 @@ export function initializeAudioContext() {
mainGainNode = audioContext.createGain();
masterPannerNode = audioContext.createStereoPanner();
- // Roteamento: Gain Master -> Panner Master -> Saída
mainGainNode.connect(masterPannerNode);
masterPannerNode.connect(audioContext.destination);
}
@@ -75,17 +74,13 @@ export function playSample(filePath, trackId) {
const track = trackId ? appState.tracks.find((t) => t.id == trackId) : null;
- if (!track) {
+ if (!track || !track.audioBuffer) {
+ // Se não houver buffer (ex: preview do sample browser), toca como um áudio simples
const audio = new Audio(filePath);
audio.play();
return;
}
- if (!track.audioBuffer) {
- console.warn(`Buffer para a trilha ${track.name} ainda não carregado.`);
- return;
- }
-
const source = audioContext.createBufferSource();
source.buffer = track.audioBuffer;
@@ -124,11 +119,20 @@ function tick() {
}
}
+ // --- INÍCIO DA CORREÇÃO ---
appState.tracks.forEach((track) => {
- if (track.steps[appState.currentStep] && track.samplePath) {
+ // 1. Verifica se a faixa tem patterns
+ if (!track.patterns || track.patterns.length === 0) return;
+
+ // 2. Pega o pattern que está ativo para esta faixa
+ const activePattern = track.patterns[track.activePatternIndex];
+
+ // 3. Verifica se o pattern existe e se o step atual está ativo NELE
+ if (activePattern && activePattern.steps[appState.currentStep] && track.samplePath) {
playSample(track.samplePath, track.id);
}
});
+ // --- FIM DA CORREÇÃO ---
highlightStep(appState.currentStep, true);
appState.currentStep = (appState.currentStep + 1) % totalSteps;
@@ -151,31 +155,43 @@ export function startPlayback() {
}
export function stopPlayback() {
- clearInterval(appState.playbackIntervalId);
+ if(appState.playbackIntervalId) {
+ clearInterval(appState.playbackIntervalId);
+ }
appState.playbackIntervalId = null;
appState.isPlaying = false;
highlightStep(appState.currentStep - 1, false);
+ highlightStep(appState.currentStep, false); // Garante que o último step "playing" seja limpo
appState.currentStep = 0;
if (timerDisplay) timerDisplay.textContent = '00:00:00';
- document.getElementById("play-btn").classList.remove("fa-pause");
- document.getElementById("play-btn").classList.add("fa-play");
+ const playBtn = document.getElementById("play-btn");
+ if (playBtn) {
+ playBtn.classList.remove("fa-pause");
+ playBtn.classList.add("fa-play");
+ }
}
export function rewindPlayback() {
+ const previousStep = appState.currentStep;
appState.currentStep = 0;
if (!appState.isPlaying) {
if (timerDisplay) timerDisplay.textContent = '00:00:00';
- document
- .querySelectorAll(".step.playing")
- .forEach((s) => s.classList.remove("playing"));
+ highlightStep(previousStep - 1, false);
+ highlightStep(previousStep, false);
}
}
export function togglePlayback() {
+ initializeAudioContext(); // Garante que o contexto de áudio foi iniciado por um gesto do usuário
if (appState.isPlaying) {
- stopPlayback();
+ // Pausa a reprodução, mas não reseta
+ clearInterval(appState.playbackIntervalId);
+ appState.playbackIntervalId = null;
+ appState.isPlaying = false;
+ document.getElementById("play-btn").classList.remove("fa-pause");
+ document.getElementById("play-btn").classList.add("fa-play");
} else {
startPlayback();
}
diff --git a/assets/js/creations/file.js b/assets/js/creations/file.js
index e31e345..dd21b72 100644
--- a/assets/js/creations/file.js
+++ b/assets/js/creations/file.js
@@ -2,7 +2,7 @@
import { appState, loadAudioForTrack } from "./state.js";
import { getTotalSteps } from "./utils.js";
import { renderApp, getSamplePathMap } from "./ui.js";
-import { NOTE_LENGTH, TICKS_PER_BAR } from "./config.js";
+import { DEFAULT_PAN, DEFAULT_VOLUME, NOTE_LENGTH, TICKS_PER_BAR } from "./config.js";
import {
initializeAudioContext,
getAudioContext,
@@ -19,9 +19,7 @@ export async function handleFileLoad(file) {
name.toLowerCase().endsWith(".mmp")
);
if (!projectFile)
- throw new Error(
- "Não foi possível encontrar um arquivo .mmp dentro do .mmpz"
- );
+ throw new Error("Não foi possível encontrar um arquivo .mmp dentro do .mmpz");
xmlContent = await zip.files[projectFile].async("string");
} else {
xmlContent = await file.text();
@@ -39,77 +37,159 @@ export async function parseMmpContent(xmlString) {
const xmlDoc = parser.parseFromString(xmlString, "application/xml");
appState.originalXmlDoc = xmlDoc;
- const newTracks = [];
+ let newTracks = [];
const head = xmlDoc.querySelector("head");
if (head) {
document.getElementById("bpm-input").value = head.getAttribute("bpm") || 140;
- document.getElementById("bars-input").value = head.getAttribute("num_bars") || 1;
document.getElementById("compasso-a-input").value = head.getAttribute("timesig_numerator") || 4;
document.getElementById("compasso-b-input").value = head.getAttribute("timesig_denominator") || 4;
}
- const sampleTrackElements = xmlDoc.querySelectorAll(
- 'instrument[name="audiofileprocessor"]'
- );
+ const allBBTrackNodes = Array.from(xmlDoc.querySelectorAll('song > trackcontainer[type="song"] > track[type="1"]'));
+ if (allBBTrackNodes.length === 0) {
+ appState.tracks = []; renderApp(); return;
+ }
+
+ // --- INÍCIO DA CORREÇÃO FINAL DE ORDENAÇÃO ---
+ // A lista de NOMES é ordenada em ordem CRESCENTE (a ordem correta, cronológica).
+ const sortedBBTrackNameNodes = [...allBBTrackNodes].sort((a, b) => {
+ const bbtcoA = a.querySelector('bbtco');
+ const bbtcoB = b.querySelector('bbtco');
+ const posA = bbtcoA ? parseInt(bbtcoA.getAttribute('pos'), 10) : Infinity;
+ const posB = bbtcoB ? parseInt(bbtcoB.getAttribute('pos'), 10) : Infinity;
+ return posA - posB; // Ordem crescente
+ });
+
+ const dataSourceTrack = allBBTrackNodes[0];
+ appState.currentBeatBasslineName = dataSourceTrack.getAttribute("name") || "Beat/Bassline";
+ const bbTrackContainer = dataSourceTrack.querySelector('bbtrack > trackcontainer');
+ if (!bbTrackContainer) {
+ appState.tracks = []; renderApp(); return;
+ }
+
+ const instrumentTracks = bbTrackContainer.querySelectorAll('track[type="0"]');
const pathMap = getSamplePathMap();
-
- sampleTrackElements.forEach((instrumentNode) => {
- const afpNode = instrumentNode.querySelector("audiofileprocessor");
- const instrumentTrackNode = instrumentNode.parentElement;
- const trackNode = instrumentTrackNode.parentElement;
- if (!afpNode || !instrumentTrackNode || !trackNode) return;
-
- const audioContext = getAudioContext();
- const mainGainNode = getMainGainNode();
+
+ newTracks = Array.from(instrumentTracks).map(trackNode => {
+ const instrumentNode = trackNode.querySelector("instrument");
+ const instrumentTrackNode = trackNode.querySelector("instrumenttrack");
+ if (!instrumentNode || !instrumentTrackNode) return null;
- const totalSteps = getTotalSteps();
- const newSteps = new Array(totalSteps).fill(false);
+ const trackName = trackNode.getAttribute("name");
- const ticksPerStep = 12;
+ if (instrumentNode.getAttribute("name") === 'tripleoscillator') {
+ return null;
+ }
- trackNode.querySelectorAll("note").forEach((noteNode) => {
- const pos = parseInt(noteNode.getAttribute("pos"), 10);
- const stepIndex = Math.round(pos / ticksPerStep);
- if (stepIndex < totalSteps) {
- newSteps[stepIndex] = true;
- }
+ const allPatternsNodeList = trackNode.querySelectorAll("pattern");
+ // A lista de CONTEÚDO dos patterns é ordenada de forma DECRESCENTE para corresponder.
+ const allPatternsArray = Array.from(allPatternsNodeList).sort((a, b) => {
+ const posA = parseInt(a.getAttribute('pos'), 10) || 0;
+ const posB = parseInt(b.getAttribute('pos'), 10) || 0;
+ return posB - posA; // Ordem decrescente
});
+ // --- FIM DA CORREÇÃO FINAL DE ORDENAÇÃO ---
- const srcAttribute = afpNode.getAttribute("src");
- const filename = srcAttribute.split("/").pop();
- const finalSamplePath = pathMap[filename] || `src/samples/${srcAttribute}`;
+ const patterns = sortedBBTrackNameNodes.map((bbTrack, index) => {
+ const patternNode = allPatternsArray[index];
+ const bbTrackName = bbTrack.getAttribute("name") || `Pattern ${index + 1}`;
- const newTrack = {
+ if (!patternNode) {
+ const firstPattern = allPatternsArray[0];
+ const stepsLength = firstPattern ? parseInt(firstPattern.getAttribute("steps"), 10) || 16 : 16;
+ return { name: bbTrackName, steps: new Array(stepsLength).fill(false), pos: 0 };
+ }
+
+ const patternSteps = parseInt(patternNode.getAttribute("steps"), 10) || 16;
+ const steps = new Array(patternSteps).fill(false);
+ const ticksPerStep = 12;
+
+ patternNode.querySelectorAll("note").forEach((noteNode) => {
+ const noteLocalPos = parseInt(noteNode.getAttribute("pos"), 10);
+ const stepIndex = Math.round(noteLocalPos / ticksPerStep);
+ if (stepIndex < patternSteps) {
+ steps[stepIndex] = true;
+ }
+ });
+
+ return {
+ name: bbTrackName,
+ steps: steps,
+ pos: parseInt(patternNode.getAttribute("pos"), 10) || 0
+ };
+ });
+
+ const hasNotes = patterns.some(p => p.steps.includes(true));
+ if (!hasNotes) return null;
+
+ const afpNode = instrumentNode.querySelector("audiofileprocessor");
+ const sampleSrc = afpNode ? afpNode.getAttribute("src") : null;
+ let finalSamplePath = null;
+ if (sampleSrc) {
+ const filename = sampleSrc.split("/").pop();
+ if (pathMap[filename]) {
+ finalSamplePath = pathMap[filename];
+ } else {
+ let cleanSrc = sampleSrc;
+ if (cleanSrc.startsWith('samples/')) {
+ cleanSrc = cleanSrc.substring('samples/'.length);
+ }
+ finalSamplePath = `src/samples/${cleanSrc}`;
+ }
+ }
+
+ const volFromFile = parseFloat(instrumentTrackNode.getAttribute("vol"));
+ const panFromFile = parseFloat(instrumentTrackNode.getAttribute("pan"));
+ const firstPatternWithNotesIndex = patterns.findIndex(p => p.steps.includes(true));
+
+ return {
id: Date.now() + Math.random(),
- name: filename || trackNode.getAttribute("name"),
+ name: trackName,
samplePath: finalSamplePath,
- audioBuffer: null,
- steps: newSteps,
- volume: parseFloat(instrumentTrackNode.getAttribute("vol")) / 100,
- pan: parseFloat(instrumentTrackNode.getAttribute("pan")) / 100,
- gainNode: audioContext.createGain(),
- pannerNode: audioContext.createStereoPanner(),
+ patterns: patterns,
+ activePatternIndex: firstPatternWithNotesIndex !== -1 ? firstPatternWithNotesIndex : 0,
+ volume: !isNaN(volFromFile) ? volFromFile / 100 : DEFAULT_VOLUME,
+ pan: !isNaN(panFromFile) ? panFromFile / 100 : DEFAULT_PAN,
+ instrumentName: instrumentNode.getAttribute("name"),
+ instrumentXml: instrumentNode.innerHTML,
};
- newTrack.gainNode.connect(newTrack.pannerNode);
- newTrack.pannerNode.connect(mainGainNode);
- newTrack.gainNode.gain.value = newTrack.volume;
- newTrack.pannerNode.pan.value = newTrack.pan;
- newTracks.push(newTrack);
+ }).filter(track => track !== null);
+
+ let isFirstTrackWithNotes = true;
+ newTracks.forEach(track => {
+ const audioContext = getAudioContext();
+ track.gainNode = audioContext.createGain();
+ track.pannerNode = audioContext.createStereoPanner();
+ track.gainNode.connect(track.pannerNode);
+ track.pannerNode.connect(getMainGainNode());
+ track.gainNode.gain.value = track.volume;
+ track.pannerNode.pan.value = track.pan;
+
+ if (isFirstTrackWithNotes) {
+ const activeIdx = track.activePatternIndex || 0;
+ const activePattern = track.patterns[activeIdx];
+ if (activePattern) {
+ const firstPatternSteps = activePattern.steps.length;
+ const stepsPerBar = 16;
+ const requiredBars = Math.ceil(firstPatternSteps / stepsPerBar);
+ document.getElementById("bars-input").value = requiredBars > 0 ? requiredBars : 1;
+ isFirstTrackWithNotes = false;
+ }
+ }
});
try {
const trackLoadPromises = newTracks.map(track => loadAudioForTrack(track));
await Promise.all(trackLoadPromises);
- console.log("Todos os áudios do projeto foram carregados.");
} catch (error) {
console.error("Ocorreu um erro ao carregar os áudios do projeto:", error);
}
appState.tracks = newTracks;
+ appState.activeTrackId = appState.tracks[0]?.id || null;
renderApp();
- console.log("Projeto carregado com sucesso!", appState);
}
export function generateMmpFile() {
@@ -120,8 +200,80 @@ export function generateMmpFile() {
}
}
+function createTrackXml(track) {
+ if (track.patterns.length === 0) return "";
+
+ const ticksPerStep = 12;
+ const lmmsVolume = Math.round(track.volume * 100);
+ const lmmsPan = Math.round(track.pan * 100);
+
+ const patternsXml = track.patterns.map(pattern => {
+ const patternNotes = pattern.steps.map((isActive, index) => {
+ if (isActive) {
+ const notePos = Math.round(index * ticksPerStep);
+ return `
Feito com MMPCreator no https://alice.ufsj.edu.br/MMPSearch/creator
+Feito com MMPCreator
]]> `; downloadFile(mmpContent, "novo_projeto.mmp"); } -function modifyAndSaveExistingMmp() { - console.log("Modificando arquivo .mmp existente..."); - const xmlDoc = appState.originalXmlDoc.cloneNode(true); - const head = xmlDoc.querySelector("head"); - if (head) { - head.setAttribute("bpm", document.getElementById("bpm-input").value); - head.setAttribute("num_bars", document.getElementById("bars-input").value); - head.setAttribute( - "timesig_numerator", - document.getElementById("compasso-a-input").value - ); - head.setAttribute( - "timesig_denominator", - document.getElementById("compasso-b-input").value - ); - } - const bbTrackContainer = xmlDoc.querySelector("bbtrack > trackcontainer"); - if (bbTrackContainer) { - const oldSampleTracks = bbTrackContainer.querySelectorAll( - 'instrument[name="audiofileprocessor"]' - ); - oldSampleTracks.forEach((node) => node.closest("track").remove()); - const tracksXml = appState.tracks - .map((track) => createTrackXml(track)) - .join(""); - const tempDoc = new DOMParser().parseFromString( - `