mmpSearch/assets/js/creations/audio/audio_state.js

316 lines
11 KiB
JavaScript
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

// js/audio/audio_state.js
import { DEFAULT_VOLUME, DEFAULT_PAN } from "../config.js";
import { renderAudioEditor } from "./audio_ui.js";
import { getMainGainNode, getAudioContext } from "../audio.js";
import * as Tone from "https://esm.sh/tone";
export let audioState = {
tracks: [],
clips: [],
// --- TEMPOS MOVIDOS DO audio_audio.js PARA O ESTADO GLOBAL ---
audioEditorSeekTime: 0,
audioEditorLogicalTime: 0,
// --- FIM DA MUDANÇA ---
audioEditorStartTime: 0,
audioEditorAnimationId: null,
audioEditorPlaybackTime: 0,
isAudioEditorLoopEnabled: false,
};
// ==== SNAPSHOT: exportação do estado atual (tracks + clips) ====
export function getAudioSnapshot() {
// Se seu estado “oficial” é audioState.* use ele;
// se for appState.audio.* troque abaixo.
const tracks = (audioState.tracks || []).map(t => ({
id: t.id, name: t.name
}));
const clips = (audioState.clips || []).map(c => ({
id: c.id,
trackId: c.trackId,
name: c.name,
sourcePath: c.sourcePath || null, // URL do asset (precisa ser acessível)
startTimeInSeconds: c.startTimeInSeconds || 0,
durationInSeconds: c.durationInSeconds || (c.buffer?.duration || 0),
offset: c.offset || 0,
pitch: c.pitch || 0,
volume: c.volume ?? 1,
pan: c.pan ?? 0,
originalDuration: c.originalDuration || (c.buffer?.duration || 0),
}));
return { tracks, clips };
}
// ==== SNAPSHOT: aplicação do estado recebido ====
export async function applyAudioSnapshot(snapshot) {
if (!snapshot) return;
// aplica trilhas (mantém ids/nome)
if (Array.isArray(snapshot.tracks) && snapshot.tracks.length) {
audioState.tracks = snapshot.tracks.map(t => ({ id: t.id, name: t.name }));
}
// insere clipes usando os MESMOS ids do emissor (idempotente)
if (Array.isArray(snapshot.clips)) {
for (const c of snapshot.clips) {
// evita duplicar se já existir (idempotência)
if (audioState.clips.some(x => String(x.id) === String(c.id))) continue;
// usa a própria função de criação (agora ela aceita id e nome)
// assinatura: addAudioClipToTimeline(samplePath, trackId, start, clipId, name)
addAudioClipToTimeline(c.sourcePath, c.trackId, c.startTimeInSeconds, c.id, c.name);
// aplica propriedades adicionais (dur/offset/pitch/vol/pan) no mesmo id
const idx = audioState.clips.findIndex(x => String(x.id) === String(c.id));
if (idx >= 0) {
const clip = audioState.clips[idx];
clip.durationInSeconds = c.durationInSeconds;
clip.offset = c.offset;
clip.pitch = c.pitch;
clip.volume = c.volume;
clip.pan = c.pan;
clip.originalDuration = c.originalDuration;
// reflete nos nós Tone já criados
if (clip.gainNode) clip.gainNode.gain.value = clip.volume ?? 1;
if (clip.pannerNode) clip.pannerNode.pan.value = clip.pan ?? 0;
}
}
}
// re-render geral do editor
renderAudioEditor();
}
export function initializeAudioState() {
audioState.clips.forEach(clip => {
if (clip.pannerNode) clip.pannerNode.dispose();
if (clip.gainNode) clip.gainNode.dispose();
});
Object.assign(audioState, {
tracks: [],
clips: [],
// --- ADICIONADO ---
audioEditorSeekTime: 0,
audioEditorLogicalTime: 0,
// --- FIM ---
audioEditorStartTime: 0,
audioEditorAnimationId: null,
audioEditorPlaybackTime: 0,
isAudioEditorLoopEnabled: false,
});
}
export async function loadAudioForClip(clip) {
// --- ADIÇÃO ---
// Se já temos um buffer (do bounce ou colagem), não faz fetch
if (clip.buffer) {
// Garante que as durações estão corretas
if (clip.originalDuration === 0) clip.originalDuration = clip.buffer.duration;
if (clip.durationInSeconds === 0) clip.durationInSeconds = clip.buffer.duration;
return clip;
}
// --- FIM DA ADIÇÃO ---
if (!clip.sourcePath) return clip;
const audioCtx = getAudioContext();
if (!audioCtx) {
console.error("AudioContext não disponível para carregar áudio.");
return clip;
}
try {
const response = await fetch(clip.sourcePath);
if (!response.ok) throw new Error(`Falha ao buscar áudio: ${clip.sourcePath}`);
const arrayBuffer = await response.arrayBuffer();
const audioBuffer = await audioCtx.decodeAudioData(arrayBuffer);
clip.buffer = audioBuffer;
// --- CORREÇÃO: Salva a duração original ---
if (clip.durationInSeconds === 0) {
clip.durationInSeconds = audioBuffer.duration;
}
// Salva a duração real do buffer para cálculos de stretch
clip.originalDuration = audioBuffer.duration;
} catch (error) {
console.error(`Falha ao carregar áudio para o clipe ${clip.name}:`, error);
}
return clip;
}
// helper de id (fallback se o emissor não mandar)
function genClipId() {
return (crypto?.randomUUID?.() || `clip_${Date.now()}_${Math.floor(Math.random()*1e6)}`);
}
// --- FUNÇÃO MODIFICADA ---
// agora aceita clipId e clipName vindos do emissor; mantém compat com chamadas antigas
export function addAudioClipToTimeline(samplePath, trackId = 1, startTime = 0, clipIdOrName = null, nameOrBuffer = null, maybeBuffer = null) {
// compat: se passaram (filePath, trackId, start, clipId)
// mas versões antigas chamavam (filePath, trackId, start) ou (filePath, trackId, start, name, buffer)
let incomingId = null;
let clipName = null;
let existingBuffer = null;
// heurística: se clipIdOrName parece um UUID/clip_ → é id, senão é nome
if (typeof clipIdOrName === 'string' && (clipIdOrName.startsWith('clip_') || clipIdOrName.length >= 16)) {
incomingId = clipIdOrName;
clipName = (typeof nameOrBuffer === 'string') ? nameOrBuffer : null;
existingBuffer = maybeBuffer || (nameOrBuffer && typeof nameOrBuffer !== 'string' ? nameOrBuffer : null);
} else {
// assinatura antiga: 4º arg era nome
clipName = (typeof clipIdOrName === 'string') ? clipIdOrName : null;
existingBuffer = (nameOrBuffer && typeof nameOrBuffer !== 'string') ? nameOrBuffer : null;
}
const finalId = incomingId || genClipId();
// idempotência: se o id já existe, não duplica
if (audioState.clips.some(c => String(c.id) === String(finalId))) {
return;
}
const newClip = {
id: finalId,
trackId: trackId,
sourcePath: samplePath, // Pode ser null se existingBuffer for fornecido
name: clipName || (samplePath ? String(samplePath).split('/').pop() : 'Bounced Clip'),
startTimeInSeconds: startTime,
offset: 0,
durationInSeconds: 0,
originalDuration: 0,
pitch: 0,
volume: DEFAULT_VOLUME,
pan: DEFAULT_PAN,
buffer: existingBuffer || null,
player: null,
};
// volume linear (01)
newClip.gainNode = new Tone.Gain(DEFAULT_VOLUME);
newClip.pannerNode = new Tone.Panner(DEFAULT_PAN);
// conecta tudo no grafo do Tone (mesmo contexto)
newClip.gainNode.connect(newClip.pannerNode);
newClip.pannerNode.connect(getMainGainNode());
audioState.clips.push(newClip);
// loadAudioForClip agora vai lidar com 'existingBuffer'
loadAudioForClip(newClip).then(() => {
renderAudioEditor();
});
}
export function updateAudioClipProperties(clipId, properties) {
const clip = audioState.clips.find(c => String(c.id) == String(clipId));
if (clip) {
Object.assign(clip, properties);
}
}
export function sliceAudioClip(clipId, sliceTimeInTimeline) {
const originalClip = audioState.clips.find(c => String(c.id) == String(clipId));
if (!originalClip ||
sliceTimeInTimeline <= originalClip.startTimeInSeconds ||
sliceTimeInTimeline >= (originalClip.startTimeInSeconds + originalClip.durationInSeconds)) {
console.warn("Corte inválido: fora dos limites do clipe.");
return;
}
const originalOffset = originalClip.offset || 0;
const cutPointInClip = sliceTimeInTimeline - originalClip.startTimeInSeconds;
const newClip = {
id: genClipId(),
trackId: originalClip.trackId,
sourcePath: originalClip.sourcePath,
name: originalClip.name,
buffer: originalClip.buffer,
startTimeInSeconds: sliceTimeInTimeline,
offset: originalOffset + cutPointInClip,
durationInSeconds: originalClip.durationInSeconds - cutPointInClip,
// --- CORREÇÃO: Propaga a duração original ---
originalDuration: originalClip.originalDuration,
pitch: originalClip.pitch,
volume: originalClip.volume,
pan: originalClip.pan,
gainNode: new Tone.Gain(originalClip.volume),
pannerNode: new Tone.Panner(originalClip.pan),
player: null
};
newClip.gainNode.connect(newClip.pannerNode);
newClip.pannerNode.connect(getMainGainNode());
originalClip.durationInSeconds = cutPointInClip;
audioState.clips.push(newClip);
console.log("Clipe dividido. Original:", originalClip, "Novo:", newClip);
}
export function updateClipVolume(clipId, volume) {
const clip = audioState.clips.find((c) => String(c.id) == String(clipId));
if (clip) {
const clampedVolume = Math.max(0, Math.min(1.5, volume));
clip.volume = clampedVolume;
if (clip.gainNode) {
clip.gainNode.gain.value = clampedVolume;
}
}
}
export function updateClipPan(clipId, pan) {
const clip = audioState.clips.find((c) => String(c.id) == String(clipId));
if (clip) {
const clampedPan = Math.max(-1, Math.min(1, pan));
clip.pan = clampedPan;
if (clip.pannerNode) {
clip.pannerNode.pan.value = clampedPan;
}
}
}
export function addAudioTrackLane() {
const newTrackName = `Pista de Áudio ${audioState.tracks.length + 1}`;
audioState.tracks.push({ id: Date.now(), name: newTrackName });
}
export function removeAudioClip(clipId) {
const clipIndex = audioState.clips.findIndex(c => String(c.id) == String(clipId));
if (clipIndex === -1) return false; // Retorna false se não encontrou
const clip = audioState.clips[clipIndex];
// 1. Limpa os nós de áudio do Tone.js
if (clip.gainNode) {
try { clip.gainNode.disconnect(); } catch {}
try { clip.gainNode.dispose(); } catch {}
}
if (clip.pannerNode) {
try { clip.pannerNode.disconnect(); } catch {}
try { clip.pannerNode.dispose(); } catch {}
}
// 2. Remove o clipe do array de estado
audioState.clips.splice(clipIndex, 1);
// 3. Retorna true para o chamador (Controller)
return true;
}