mmpSearch/assets/js/creations/state.js

173 lines
5.4 KiB
JavaScript

// js/state.js
import { DEFAULT_VOLUME, DEFAULT_PAN } from "./config.js";
import {
initializeAudioContext,
getAudioContext,
getMainGainNode,
} from "./audio.js";
import { renderApp, renderAudioEditor } from "./ui.js";
import { getTotalSteps } from "./utils.js";
export let appState = {
tracks: [],
audioTracks: [],
activeTrackId: null,
activePatternIndex: 0,
isPlaying: false,
isAudioEditorPlaying: false,
activeAudioSources: [],
audioEditorStartTime: 0,
audioEditorAnimationId: null,
audioEditorPlaybackTime: 0,
playbackIntervalId: null,
currentStep: 0,
metronomeEnabled: false,
originalXmlDoc: null,
currentBeatBasslineName: 'Novo Projeto',
masterVolume: DEFAULT_VOLUME,
masterPan: DEFAULT_PAN,
};
export async function loadAudioForTrack(track) {
if (!track.samplePath) return track;
try {
const audioContext = getAudioContext();
if (!audioContext) initializeAudioContext();
const response = await fetch(track.samplePath);
if (!response.ok) throw new Error(`Erro ao buscar o sample: ${response.statusText}`);
const arrayBuffer = await response.arrayBuffer();
track.audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
} catch (error) {
console.error(`Falha ao carregar áudio para a trilha ${track.name}:`, error);
track.audioBuffer = null;
}
return track;
}
export function addAudioTrack(samplePath) {
initializeAudioContext();
const audioContext = getAudioContext();
const mainGainNode = getMainGainNode();
const newAudioTrack = {
id: Date.now() + Math.random(),
name: samplePath.split('/').pop(),
samplePath: samplePath,
audioBuffer: null,
volume: DEFAULT_VOLUME,
pan: DEFAULT_PAN,
isMuted: false,
isSoloed: false, // <-- ADICIONADO: Começa como não-solada
gainNode: audioContext.createGain(),
pannerNode: audioContext.createStereoPanner(),
};
newAudioTrack.gainNode.connect(newAudioTrack.pannerNode);
newAudioTrack.pannerNode.connect(mainGainNode);
newAudioTrack.gainNode.gain.value = newAudioTrack.volume;
newAudioTrack.pannerNode.pan.value = newAudioTrack.pan;
appState.audioTracks.push(newAudioTrack);
loadAudioForTrack(newAudioTrack).then(() => {
renderAudioEditor();
});
}
// A função de mute agora será a de solo.
export function toggleAudioTrackSolo(trackId) {
const track = appState.audioTracks.find(t => t.id == trackId);
if (track) {
track.isSoloed = !track.isSoloed;
renderAudioEditor(); // Re-renderiza para mostrar a nova cor
}
}
// Mantemos a função de mute caso precise no futuro, mas ela não está conectada ao botão.
export function toggleAudioTrackMute(trackId) {
const track = appState.audioTracks.find(t => t.id == trackId);
if (track) {
track.isMuted = !track.isMuted;
renderAudioEditor();
}
}
export function addTrackToState() {
initializeAudioContext();
const audioContext = getAudioContext();
const mainGainNode = getMainGainNode();
const totalSteps = getTotalSteps();
const referenceTrack = appState.tracks[0];
const newTrack = {
id: Date.now(),
name: "novo instrumento",
samplePath: null,
audioBuffer: null,
patterns: referenceTrack
? referenceTrack.patterns.map(p => ({ name: p.name, steps: new Array(p.steps.length).fill(false), pos: p.pos }))
: [{ name: "Pattern 1", steps: new Array(totalSteps).fill(false), pos: 0 }],
activePatternIndex: 0,
volume: DEFAULT_VOLUME,
pan: DEFAULT_PAN,
gainNode: audioContext.createGain(),
pannerNode: audioContext.createStereoPanner(),
};
newTrack.gainNode.connect(newTrack.pannerNode);
newTrack.pannerNode.connect(mainGainNode);
newTrack.gainNode.gain.value = newTrack.volume;
newTrack.pannerNode.pan.value = newTrack.pan;
appState.tracks.push(newTrack);
renderApp();
}
export function removeLastTrackFromState() {
if (appState.tracks.length > 0) {
appState.tracks.pop();
renderApp();
}
}
export async function updateTrackSample(trackId, samplePath) {
const track = appState.tracks.find((t) => t.id == trackId);
if (track) {
track.samplePath = samplePath;
track.name = samplePath.split("/").pop();
track.audioBuffer = null;
await loadAudioForTrack(track);
renderApp();
}
}
export function toggleStepState(trackId, stepIndex) {
const track = appState.tracks.find((t) => t.id == trackId);
if (track && track.patterns && track.patterns.length > 0) {
const activePattern = track.patterns[track.activePatternIndex];
if (activePattern && activePattern.steps.length > stepIndex) {
activePattern.steps[stepIndex] = !activePattern.steps[stepIndex];
}
}
}
export function updateTrackVolume(trackId, volume) {
const track = appState.tracks.find((t) => t.id == trackId) || appState.audioTracks.find((t) => t.id == trackId);
if (track) {
const clampedVolume = Math.max(0, Math.min(1.5, volume));
track.volume = clampedVolume;
if (track.gainNode) {
track.gainNode.gain.setValueAtTime(clampedVolume, getAudioContext().currentTime);
}
}
}
export function updateTrackPan(trackId, pan) {
const track = appState.tracks.find((t) => t.id == trackId) || appState.audioTracks.find((t) => t.id == trackId);
if (track) {
const clampedPan = Math.max(-1, Math.min(1, pan));
track.pan = clampedPan;
if (track.pannerNode) {
track.pannerNode.pan.setValueAtTime(clampedPan, getAudioContext().currentTime);
}
}
}