128 lines
4.0 KiB
JavaScript
128 lines
4.0 KiB
JavaScript
// js/state.js
|
|
import { DEFAULT_VOLUME, DEFAULT_PAN } from "./config.js";
|
|
import {
|
|
initializeAudioContext,
|
|
getAudioContext,
|
|
getMainGainNode,
|
|
} from "./audio.js";
|
|
import { renderApp } from "./ui.js";
|
|
import { getTotalSteps } from "./utils.js";
|
|
|
|
export let appState = {
|
|
tracks: [],
|
|
activeTrackId: null,
|
|
activePatternIndex: 0, // <-- VOLTOU A SER GLOBAL
|
|
isPlaying: false,
|
|
playbackIntervalId: null,
|
|
currentStep: 0,
|
|
metronomeEnabled: false,
|
|
originalXmlDoc: null,
|
|
masterVolume: DEFAULT_VOLUME,
|
|
masterPan: DEFAULT_PAN,
|
|
};
|
|
|
|
export async function loadAudioForTrack(track) {
|
|
if (!track.samplePath) return track;
|
|
try {
|
|
const audioContext = getAudioContext();
|
|
if (!audioContext) initializeAudioContext();
|
|
const response = await fetch(track.samplePath);
|
|
if (!response.ok) throw new Error(`Erro ao buscar o sample: ${response.statusText}`);
|
|
const arrayBuffer = await response.arrayBuffer();
|
|
track.audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
|
} catch (error) {
|
|
console.error(`Falha ao carregar áudio para a trilha ${track.name}:`, error);
|
|
track.audioBuffer = null;
|
|
}
|
|
return track;
|
|
}
|
|
|
|
export function addTrackToState() {
|
|
initializeAudioContext();
|
|
const audioContext = getAudioContext();
|
|
const mainGainNode = getMainGainNode();
|
|
const totalSteps = getTotalSteps();
|
|
const referenceTrack = appState.tracks[0];
|
|
|
|
const newTrack = {
|
|
id: Date.now(),
|
|
name: "novo instrumento",
|
|
samplePath: null,
|
|
audioBuffer: null,
|
|
patterns: referenceTrack
|
|
? referenceTrack.patterns.map(p => ({ name: p.name, steps: new Array(p.steps.length).fill(false), pos: p.pos }))
|
|
: [{ name: "Pattern 1", steps: new Array(totalSteps).fill(false), pos: 0 }],
|
|
// activePatternIndex foi removido daqui
|
|
volume: DEFAULT_VOLUME,
|
|
pan: DEFAULT_PAN,
|
|
gainNode: audioContext.createGain(),
|
|
pannerNode: audioContext.createStereoPanner(),
|
|
};
|
|
newTrack.gainNode.connect(newTrack.pannerNode);
|
|
newTrack.pannerNode.connect(mainGainNode);
|
|
newTrack.gainNode.gain.value = newTrack.volume;
|
|
newTrack.pannerNode.pan.value = newTrack.pan;
|
|
|
|
appState.tracks.push(newTrack);
|
|
if (!appState.activeTrackId) {
|
|
appState.activeTrackId = newTrack.id;
|
|
}
|
|
renderApp();
|
|
}
|
|
|
|
export function removeLastTrackFromState() {
|
|
if (appState.tracks.length > 0) {
|
|
const removedTrack = appState.tracks.pop();
|
|
if (appState.activeTrackId === removedTrack.id) {
|
|
appState.activeTrackId = appState.tracks[0]?.id || null;
|
|
}
|
|
renderApp();
|
|
}
|
|
}
|
|
|
|
export async function updateTrackSample(trackId, samplePath) {
|
|
const track = appState.tracks.find((t) => t.id == trackId);
|
|
if (track) {
|
|
track.samplePath = samplePath;
|
|
track.name = samplePath.split("/").pop();
|
|
track.audioBuffer = null;
|
|
await loadAudioForTrack(track);
|
|
const trackLane = document.querySelector(`.track-lane[data-track-id="${trackId}"] .track-name`);
|
|
if (trackLane) {
|
|
trackLane.textContent = track.name;
|
|
}
|
|
}
|
|
}
|
|
|
|
export function toggleStepState(trackId, stepIndex) {
|
|
const track = appState.tracks.find((t) => t.id == trackId);
|
|
if (track && track.patterns && track.patterns.length > 0) {
|
|
// Usa o índice GLOBAL para saber qual pattern modificar
|
|
const activePattern = track.patterns[appState.activePatternIndex];
|
|
if (activePattern && activePattern.steps.length > stepIndex) {
|
|
activePattern.steps[stepIndex] = !activePattern.steps[stepIndex];
|
|
}
|
|
}
|
|
}
|
|
|
|
export function updateTrackVolume(trackId, volume) {
|
|
const track = appState.tracks.find((t) => t.id == trackId);
|
|
if (track) {
|
|
const clampedVolume = Math.max(0, Math.min(1.5, volume));
|
|
track.volume = clampedVolume;
|
|
if (track.gainNode) {
|
|
track.gainNode.gain.setValueAtTime(clampedVolume, getAudioContext().currentTime);
|
|
}
|
|
}
|
|
}
|
|
|
|
export function updateTrackPan(trackId, pan) {
|
|
const track = appState.tracks.find((t) => t.id == trackId);
|
|
if (track) {
|
|
const clampedPan = Math.max(-1, Math.min(1, pan));
|
|
track.pan = clampedPan;
|
|
if (track.pannerNode) {
|
|
track.pannerNode.pan.setValueAtTime(clampedPan, getAudioContext().currentTime);
|
|
}
|
|
}
|
|
} |