316 lines
9.7 KiB
JavaScript
316 lines
9.7 KiB
JavaScript
// js/audio.js
|
|
import { appState } from "./state.js";
|
|
import { highlightStep, updateAudioEditorUI, updatePlayheadVisual, resetPlayheadVisual } from "./ui.js";
|
|
import { getTotalSteps } from "./utils.js";
|
|
import { PIXELS_PER_STEP } from "./config.js";
|
|
|
|
let audioContext;
|
|
let mainGainNode;
|
|
let masterPannerNode;
|
|
|
|
const timerDisplay = document.getElementById('timer-display');
|
|
|
|
export function getAudioContext() {
|
|
return audioContext;
|
|
}
|
|
export function getMainGainNode() {
|
|
return mainGainNode;
|
|
}
|
|
|
|
export function initializeAudioContext() {
|
|
if (!audioContext) {
|
|
audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
|
mainGainNode = audioContext.createGain();
|
|
masterPannerNode = audioContext.createStereoPanner();
|
|
|
|
mainGainNode.connect(masterPannerNode);
|
|
masterPannerNode.connect(audioContext.destination);
|
|
}
|
|
if (audioContext.state === "suspended") {
|
|
audioContext.resume();
|
|
}
|
|
}
|
|
|
|
export function updateMasterVolume(volume) {
|
|
if (mainGainNode) {
|
|
mainGainNode.gain.setValueAtTime(volume, audioContext.currentTime);
|
|
}
|
|
}
|
|
|
|
export function updateMasterPan(pan) {
|
|
if (masterPannerNode) {
|
|
masterPannerNode.pan.setValueAtTime(pan, audioContext.currentTime);
|
|
}
|
|
}
|
|
|
|
function formatTime(milliseconds) {
|
|
const totalSeconds = Math.floor(milliseconds / 1000);
|
|
const minutes = Math.floor(totalSeconds / 60).toString().padStart(2, '0');
|
|
const seconds = (totalSeconds % 60).toString().padStart(2, '0');
|
|
const centiseconds = Math.floor((milliseconds % 1000) / 10).toString().padStart(2, '0');
|
|
return `${minutes}:${seconds}:${centiseconds}`;
|
|
}
|
|
|
|
export function playMetronomeSound(isDownbeat) {
|
|
initializeAudioContext();
|
|
const oscillator = audioContext.createOscillator();
|
|
const gainNode = audioContext.createGain();
|
|
const frequency = isDownbeat ? 1000 : 800;
|
|
oscillator.frequency.setValueAtTime(frequency, audioContext.currentTime);
|
|
oscillator.type = "sine";
|
|
gainNode.gain.setValueAtTime(1, audioContext.currentTime);
|
|
gainNode.gain.exponentialRampToValueAtTime(
|
|
0.00001,
|
|
audioContext.currentTime + 0.05
|
|
);
|
|
oscillator.connect(gainNode);
|
|
gainNode.connect(mainGainNode);
|
|
oscillator.start(audioContext.currentTime);
|
|
oscillator.stop(audioContext.currentTime + 0.05);
|
|
}
|
|
|
|
export function playSample(filePath, trackId) {
|
|
initializeAudioContext();
|
|
if (!filePath) return;
|
|
|
|
const track = trackId ? appState.tracks.find((t) => t.id == trackId) : null;
|
|
|
|
if (!track || !track.audioBuffer) {
|
|
const audio = new Audio(filePath);
|
|
audio.play();
|
|
return;
|
|
}
|
|
|
|
const source = audioContext.createBufferSource();
|
|
source.buffer = track.audioBuffer;
|
|
|
|
if (track.gainNode) {
|
|
source.connect(track.gainNode);
|
|
} else {
|
|
source.connect(mainGainNode);
|
|
}
|
|
|
|
source.start(0);
|
|
}
|
|
|
|
function tick() {
|
|
const totalSteps = getTotalSteps();
|
|
if (totalSteps === 0 || !appState.isPlaying) {
|
|
stopPlayback();
|
|
return;
|
|
}
|
|
|
|
const lastStepIndex = appState.currentStep === 0 ? totalSteps - 1 : appState.currentStep - 1;
|
|
highlightStep(lastStepIndex, false);
|
|
|
|
const bpm = parseInt(document.getElementById("bpm-input").value, 10) || 120;
|
|
const stepInterval = (60 * 1000) / (bpm * 4);
|
|
const currentTime = appState.currentStep * stepInterval;
|
|
if (timerDisplay) {
|
|
timerDisplay.textContent = formatTime(currentTime);
|
|
}
|
|
|
|
if (appState.metronomeEnabled) {
|
|
const noteValue = parseInt(document.getElementById("compasso-b-input").value, 10) || 4;
|
|
const stepsPerBeat = 16 / noteValue;
|
|
if (appState.currentStep % stepsPerBeat === 0) {
|
|
playMetronomeSound(appState.currentStep % (stepsPerBeat * 4) === 0);
|
|
}
|
|
}
|
|
|
|
appState.tracks.forEach((track) => {
|
|
if (!track.patterns || track.patterns.length === 0) return;
|
|
|
|
const activePattern = track.patterns[appState.activePatternIndex];
|
|
|
|
if (activePattern && activePattern.steps[appState.currentStep] && track.samplePath) {
|
|
playSample(track.samplePath, track.id);
|
|
}
|
|
});
|
|
|
|
highlightStep(appState.currentStep, true);
|
|
appState.currentStep = (appState.currentStep + 1) % totalSteps;
|
|
}
|
|
|
|
export function startPlayback() {
|
|
if (appState.isPlaying || appState.tracks.length === 0) return;
|
|
initializeAudioContext();
|
|
|
|
if (appState.currentStep === 0) {
|
|
rewindPlayback();
|
|
}
|
|
|
|
const bpm = parseInt(document.getElementById("bpm-input").value, 10) || 120;
|
|
const stepInterval = (60 * 1000) / (bpm * 4);
|
|
|
|
if (appState.playbackIntervalId) clearInterval(appState.playbackIntervalId);
|
|
|
|
appState.isPlaying = true;
|
|
document.getElementById("play-btn").classList.remove("fa-play");
|
|
document.getElementById("play-btn").classList.add("fa-pause");
|
|
|
|
tick();
|
|
appState.playbackIntervalId = setInterval(tick, stepInterval);
|
|
}
|
|
|
|
export function stopPlayback() {
|
|
if(appState.playbackIntervalId) {
|
|
clearInterval(appState.playbackIntervalId);
|
|
}
|
|
appState.playbackIntervalId = null;
|
|
appState.isPlaying = false;
|
|
|
|
document.querySelectorAll('.step.playing').forEach(s => s.classList.remove('playing'));
|
|
|
|
appState.currentStep = 0;
|
|
|
|
if (timerDisplay) timerDisplay.textContent = '00:00:00';
|
|
|
|
const playBtn = document.getElementById("play-btn");
|
|
if (playBtn) {
|
|
playBtn.classList.remove("fa-pause");
|
|
playBtn.classList.add("fa-play");
|
|
}
|
|
}
|
|
|
|
export function rewindPlayback() {
|
|
const lastStep = appState.currentStep > 0 ? appState.currentStep - 1 : getTotalSteps() - 1;
|
|
appState.currentStep = 0;
|
|
if (!appState.isPlaying) {
|
|
if (timerDisplay) timerDisplay.textContent = '00:00:00';
|
|
highlightStep(lastStep, false);
|
|
}
|
|
}
|
|
|
|
export function togglePlayback() {
|
|
initializeAudioContext();
|
|
if (appState.isPlaying) {
|
|
stopPlayback();
|
|
} else {
|
|
appState.currentStep = 0;
|
|
startPlayback();
|
|
}
|
|
}
|
|
|
|
function animationLoop() {
|
|
if (!appState.isAudioEditorPlaying || !audioContext) return;
|
|
|
|
const bpm = parseInt(document.getElementById("bpm-input").value, 10) || 120;
|
|
const stepsPerSecond = (bpm / 60) * 4;
|
|
const pixelsPerSecond = stepsPerSecond * PIXELS_PER_STEP;
|
|
|
|
let totalElapsedTime = (audioContext.currentTime - appState.audioEditorStartTime) + appState.audioEditorPlaybackTime;
|
|
|
|
const maxDuration = appState.audioTracks.reduce((max, track) =>
|
|
(track.audioBuffer && track.audioBuffer.duration > max) ? track.audioBuffer.duration : max, 0
|
|
);
|
|
|
|
if (appState.isAudioEditorLoopEnabled && maxDuration > 0) {
|
|
totalElapsedTime = totalElapsedTime % maxDuration;
|
|
} else {
|
|
if (totalElapsedTime >= maxDuration && maxDuration > 0) {
|
|
stopAudioEditorPlayback();
|
|
appState.audioEditorPlaybackTime = 0;
|
|
resetPlayheadVisual();
|
|
return;
|
|
}
|
|
}
|
|
|
|
const newPositionPx = totalElapsedTime * pixelsPerSecond;
|
|
updatePlayheadVisual(newPositionPx);
|
|
appState.audioEditorAnimationId = requestAnimationFrame(animationLoop);
|
|
}
|
|
|
|
export function startAudioEditorPlayback() {
|
|
if (appState.isAudioEditorPlaying || appState.audioTracks.length === 0) return;
|
|
initializeAudioContext();
|
|
|
|
appState.isAudioEditorPlaying = true;
|
|
appState.activeAudioSources = [];
|
|
updateAudioEditorUI();
|
|
|
|
const startTime = audioContext.currentTime;
|
|
appState.audioEditorStartTime = startTime;
|
|
|
|
appState.audioTracks.forEach(track => {
|
|
if (track.audioBuffer && !track.isMuted && track.isSoloed) {
|
|
if (appState.audioEditorPlaybackTime >= track.audioBuffer.duration) return;
|
|
|
|
const source = audioContext.createBufferSource();
|
|
source.buffer = track.audioBuffer;
|
|
source.loop = appState.isAudioEditorLoopEnabled;
|
|
source.connect(track.gainNode);
|
|
source.start(startTime, appState.audioEditorPlaybackTime);
|
|
appState.activeAudioSources.push(source);
|
|
}
|
|
});
|
|
|
|
if (appState.activeAudioSources.length > 0) {
|
|
if (appState.audioEditorAnimationId) {
|
|
cancelAnimationFrame(appState.audioEditorAnimationId);
|
|
}
|
|
animationLoop();
|
|
} else {
|
|
appState.isAudioEditorPlaying = false;
|
|
updateAudioEditorUI();
|
|
}
|
|
}
|
|
|
|
export function stopAudioEditorPlayback() {
|
|
if (!appState.isAudioEditorPlaying) return;
|
|
|
|
let totalElapsedTime = (audioContext.currentTime - appState.audioEditorStartTime) + appState.audioEditorPlaybackTime;
|
|
|
|
const maxDuration = appState.audioTracks.reduce((max, track) =>
|
|
(track.audioBuffer && track.audioBuffer.duration > max) ? track.audioBuffer.duration : max, 0
|
|
);
|
|
|
|
// --- CORREÇÃO FINAL E ROBUSTA ---
|
|
// Sempre aplica o módulo ao salvar o tempo.
|
|
// Se não estava em loop, totalElapsedTime < maxDuration, e o módulo não faz nada.
|
|
// Se estava em loop, ele corrige o valor para a posição visual correta.
|
|
if (maxDuration > 0) {
|
|
appState.audioEditorPlaybackTime = totalElapsedTime % maxDuration;
|
|
} else {
|
|
appState.audioEditorPlaybackTime = totalElapsedTime;
|
|
}
|
|
// --- FIM DA CORREÇÃO ---
|
|
|
|
if (appState.audioEditorAnimationId) {
|
|
cancelAnimationFrame(appState.audioEditorAnimationId);
|
|
appState.audioEditorAnimationId = null;
|
|
}
|
|
|
|
appState.activeAudioSources.forEach(source => {
|
|
try {
|
|
source.stop(0);
|
|
} catch (e) { /* Ignora erros */ }
|
|
});
|
|
|
|
appState.activeAudioSources = [];
|
|
appState.isAudioEditorPlaying = false;
|
|
updateAudioEditorUI();
|
|
}
|
|
|
|
export function seekAudioEditor(newTime) {
|
|
const wasPlaying = appState.isAudioEditorPlaying;
|
|
if (wasPlaying) {
|
|
stopAudioEditorPlayback();
|
|
}
|
|
appState.audioEditorPlaybackTime = newTime;
|
|
const bpm = parseInt(document.getElementById("bpm-input").value, 10) || 120;
|
|
const stepsPerSecond = (bpm / 60) * 4;
|
|
const pixelsPerSecond = stepsPerSecond * PIXELS_PER_STEP;
|
|
const newPositionPx = newTime * pixelsPerSecond;
|
|
updatePlayheadVisual(newPositionPx);
|
|
if (wasPlaying) {
|
|
startAudioEditorPlayback();
|
|
}
|
|
}
|
|
|
|
export function restartAudioEditorIfPlaying() {
|
|
if (appState.isAudioEditorPlaying) {
|
|
stopAudioEditorPlayback();
|
|
startAudioEditorPlayback();
|
|
}
|
|
} |