From 24cb9d1e0236ea6e83c086b9a184bbe730f92e24 Mon Sep 17 00:00:00 2001 From: JotaChina Date: Sat, 27 Dec 2025 23:21:58 -0300 Subject: [PATCH] =?UTF-8?q?ativar/desativar=20tracks=20de=20=C3=A1udio=20n?= =?UTF-8?q?a=20playlist,=20knobs=20funcionais?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- assets/js/creations/audio/audio_ui.js | 138 +++++++++++- assets/js/creations/pattern/pattern_audio.js | 218 ++++++++++++++++--- 2 files changed, 323 insertions(+), 33 deletions(-) diff --git a/assets/js/creations/audio/audio_ui.js b/assets/js/creations/audio/audio_ui.js index da38db1b..22621dd9 100755 --- a/assets/js/creations/audio/audio_ui.js +++ b/assets/js/creations/audio/audio_ui.js @@ -20,6 +20,73 @@ import { getSecondsPerStep, } from "../utils.js"; import { sendAction, sendActionSafe } from "../socket.js"; +import { setPatternTrackMute, setPatternTrackVolume, setPatternTrackPan } from "../pattern/pattern_state.js"; +import { initializeAudioContext } from "../audio.js"; + +// --- Knob helpers (mesmo comportamento do pattern_ui.js) --- +const _KNOB_MIN_DEG = -135; +const _KNOB_MAX_DEG = 135; +const _MAX_TRACK_VOLUME = 1.5; + +function _clampKnob(n, min, max) { + const x = Number(n); + if (!Number.isFinite(x)) return min; + return Math.min(max, Math.max(min, x)); +} + +function _valueToDeg(control, value) { + if (control === "pan") { + const v = _clampKnob(value, -1, 1); + const norm = (v + 1) / 2; + return _KNOB_MIN_DEG + norm * (_KNOB_MAX_DEG - _KNOB_MIN_DEG); + } + const v = _clampKnob(value, 0, _MAX_TRACK_VOLUME); + const norm = v / _MAX_TRACK_VOLUME; + return _KNOB_MIN_DEG + norm * (_KNOB_MAX_DEG - _KNOB_MIN_DEG); +} + +function _setKnobIndicator(knobEl, control, value) { + const ind = knobEl.querySelector(".knob-indicator"); + if (!ind) return; + ind.style.transform = `rotate(${_valueToDeg(control, value)}deg)`; +} + +function _attachKnobDrag(knobEl, { control, getCurrent, setLocal, commit }) { + if (!knobEl) return; + + knobEl.addEventListener("mousedown", (e) => { + e.preventDefault(); + e.stopPropagation(); + initializeAudioContext(); + + const startY = e.clientY; + const startVal = Number(getCurrent?.() ?? 0); + const sensitivity = control === "pan" ? 0.01 : 0.005; + + let lastVal = startVal; + + const onMove = (ev) => { + const delta = (startY - ev.clientY) * sensitivity; + let v = startVal + delta; + + if (control === "pan") v = _clampKnob(v, -1, 1); + else v = _clampKnob(v, 0, _MAX_TRACK_VOLUME); + + lastVal = v; + setLocal?.(v); + _setKnobIndicator(knobEl, control, v); + }; + + const onUp = () => { + window.removeEventListener("mousemove", onMove); + window.removeEventListener("mouseup", onUp); + commit?.(lastVal); + }; + + window.addEventListener("mousemove", onMove); + window.addEventListener("mouseup", onUp); + }); +} // ===================================================== // Playlist Patterns (Bassline clips) - Drag/Resize/Delete @@ -512,8 +579,13 @@ export function renderAudioEditor() {
-
VOL
-
PAN
+
VOL
+
+
+
+
+ PAN +
@@ -523,6 +595,68 @@ export function renderAudioEditor() { `; newTrackContainer.appendChild(audioTrackLane); + + // MUTE/VOL/PAN na playlist (principalmente p/ bassline/pattern lanes) + const muteBtn = audioTrackLane.querySelector(".track-mute"); + if (muteBtn) { + const isMutedNow = !!(trackData.isMuted || trackData.muted); + muteBtn.classList.toggle("muted", isMutedNow); + + muteBtn.addEventListener("click", (e) => { + e.preventDefault(); + e.stopPropagation(); + initializeAudioContext(); + + const cur = !!(trackData.isMuted || trackData.muted); + const next = !cur; + + setPatternTrackMute(trackData.id, next); + sendAction({ type: "SET_PATTERN_TRACK_MUTE", trackId: trackData.id, isMuted: next }); + + renderAudioEditor(); + restartAudioEditorIfPlaying?.(); + }); + } + + // Só aplica mixer se existir no Pattern State + const patternTrack = (appState.pattern?.tracks || []).find( + (t) => String(t.id) === String(trackData.id) + ); + + if (patternTrack) { + // Knobs (VOL/PAN) + audioTrackLane.querySelectorAll(".knob").forEach((knobEl) => { + const control = knobEl.dataset.control; + const trackId = knobEl.dataset.trackId; + + const getT = () => (appState.pattern?.tracks || []).find((x) => String(x.id) === String(trackId)); + const initial = control === "pan" ? (getT()?.pan ?? 0) : (getT()?.volume ?? 1); + _setKnobIndicator(knobEl, control, initial); + + _attachKnobDrag(knobEl, { + control, + getCurrent: () => (control === "pan" ? (getT()?.pan ?? 0) : (getT()?.volume ?? 1)), + setLocal: (v) => { + if (control === "pan") setPatternTrackPan(trackId, v); + else setPatternTrackVolume(trackId, v); + }, + commit: (v) => { + if (control === "pan") sendAction({ type: "SET_PATTERN_TRACK_PAN", trackId, pan: v }); + else sendAction({ type: "SET_PATTERN_TRACK_VOLUME", trackId, volume: v }); + }, + }); + }); + } else { + // opcional: desabilitar visualmente knobs/mute em lanes de áudio + audioTrackLane.querySelectorAll(".track-mute, .knob").forEach((el) => { + el.style.opacity = "0.35"; + el.style.pointerEvents = "none"; + }); + } + + // Knobs: igual pattern_ui (drag vertical) + // -> ao arrastar: setPatternTrackVolume/Pan() + // -> ao soltar: sendAction SET_PATTERN_TRACK_VOLUME/PAN const timelineContainer = audioTrackLane.querySelector(".timeline-container"); const grid = timelineContainer.querySelector(".spectrogram-view-grid"); diff --git a/assets/js/creations/pattern/pattern_audio.js b/assets/js/creations/pattern/pattern_audio.js index 280d158d..d4dd5a73 100755 --- a/assets/js/creations/pattern/pattern_audio.js +++ b/assets/js/creations/pattern/pattern_audio.js @@ -4,7 +4,7 @@ import * as Tone from "https://esm.sh/tone"; import { appState } from "../state.js"; import { highlightStep } from "./pattern_ui.js"; import { getTotalSteps } from "../utils.js"; -import { initializeAudioContext } from "../audio.js"; +import { initializeAudioContext, getMainGainNode } from "../audio.js"; import { TripleOscillator } from "../../audio/plugins/TripleOscillator.js"; import { Nes } from "../../audio/plugins/Nes.js"; @@ -12,6 +12,50 @@ import { SuperSaw } from "../../audio/plugins/SuperSaw.js"; import { Lb302 } from "../../audio/plugins/Lb302.js"; import { Kicker } from "../../audio/plugins/Kicker.js"; +const MAX_VOL = 1.5; +const clamp = (n, a, b) => Math.max(a, Math.min(b, Number(n) || 0)); + +function getBasslineByPatternIndex(pi) { + return (appState.pattern.tracks || []).find(t => t.type === "bassline" && Number(t.patternIndex) === Number(pi)); +} + +function getSongMix(track, patternIndex) { + track._songMix ??= {}; + if (track._songMix[patternIndex]) return track._songMix[patternIndex]; + + const instVol = new Tone.Volume(0); + const instPan = new Tone.Panner(0); + const pattVol = new Tone.Volume(0); + const pattPan = new Tone.Panner(0); + + instVol.connect(instPan); + instPan.connect(pattVol); + pattVol.connect(pattPan); + pattPan.connect(getMainGainNode()); + + track._songMix[patternIndex] = { instVol, instPan, pattVol, pattPan }; + return track._songMix[patternIndex]; +} + +function refreshSongMixFor(track, patternIndex) { + const b = getBasslineByPatternIndex(patternIndex); + const mix = getSongMix(track, patternIndex); + + const instMuted = !!(track.isMuted || track.muted); + const pattMuted = !!(b?.isMuted || b?.muted); + + const instV = clamp(track.volume ?? 1, 0, MAX_VOL); + const pattV = clamp(b?.volume ?? 1, 0, MAX_VOL); + + mix.instVol.volume.value = (instMuted || instV === 0) ? -Infinity : Tone.gainToDb(instV); + mix.pattVol.volume.value = (pattMuted || pattV === 0) ? -Infinity : Tone.gainToDb(pattV); + + mix.instPan.pan.value = clamp(track.pan ?? 0, -1, 1); + mix.pattPan.pan.value = clamp(b?.pan ?? 0, -1, 1); + + return mix; +} + function getActivePatternForTrack(track) { const idx = appState.pattern?.activePatternIndex ?? track.activePatternIndex ?? 0; return track.patterns?.[idx] ?? null; @@ -130,26 +174,67 @@ export function playSample(filePath, trackId) { } } +function getSongStepPlayer(track, patternIndex) { + if (track.type !== "sampler" || !track.buffer) return null; -function playSamplerNoteAtTime(track, midi, time, durationSec) { - if (!track?.buffer || !track.volumeNode) return; + track._songStepPlayer ??= {}; + if (track._songStepPlayer[patternIndex]) return track._songStepPlayer[patternIndex]; + + const mix = refreshSongMixFor(track, patternIndex); + const p = new Tone.Player(track.buffer); + p.connect(mix.instVol); + + track._songStepPlayer[patternIndex] = p; + return p; +} + +function _getPluginKey(track) { + return String( + track.pluginName || + track.instrumentName || + track.instrument?.constructor?.name || + "" + ).toLowerCase(); +} + +function getSongInstrument(track, patternIndex) { + if (track.type !== "plugin") return null; + + track._songInstrument ??= {}; + if (track._songInstrument[patternIndex]) return track._songInstrument[patternIndex]; + + const key = _getPluginKey(track); + const Cls = PLUGIN_CLASSES[key]; + if (!Cls) { + console.warn("[Song] Plugin não encontrado:", key, "track:", track.name); + return null; + } + + const mix = refreshSongMixFor(track, patternIndex); + const inst = new Cls(null, track.params || track.pluginData || {}); + inst.connect(mix.instVol); + + track._songInstrument[patternIndex] = inst; + return inst; +} + +function playSamplerNoteAtTime(track, midi, time, durationSec, destinationNode = null) { + if (!track?.buffer) return; const base = track.baseNote ?? 60; - const semitones = (midi - base); + const semitones = midi - base; const rate = Math.pow(2, semitones / 12); const player = new Tone.Player(track.buffer); player.playbackRate = rate; - player.connect(track.volumeNode); + + // destino: song bus (mix.instVol) OU cadeia normal do pattern editor + const dest = destinationNode || track.volumeNode || getMainGainNode(); + player.connect(dest); player.start(time); - // se quiser respeitar duração (bem básico) - if (durationSec && durationSec > 0) { - player.stop(time + durationSec); - } - - // limpeza + if (durationSec && durationSec > 0) player.stop(time + durationSec); player.onstop = () => player.dispose(); } @@ -587,7 +672,7 @@ export function startSongPatternPlaybackOnTransport() { // Patterns ativas neste tick (pelas basslines/playlist clips) const basslineTracks = appState.pattern.tracks.filter( - (t) => t.type === "bassline" && !t.isMuted + (t) => t.type === "bassline" && !(t.isMuted || t.muted) ); const activePatternHits = []; @@ -600,12 +685,36 @@ export function startSongPatternPlaybackOnTransport() { activePatternHits.push({ patternIndex: b.patternIndex, localStep }); } + // Quais patternIndex estão ativas AGORA (tem clip tocando neste tick) + const activePatternSet = new Set(activePatternHits.map(h => Number(h.patternIndex))); + + // Sempre atualiza buses já existentes e “fecha o gate” quando a pattern não está ativa + for (const tr of appState.pattern.tracks || []) { + if (tr.type === "bassline") continue; + + const mixMap = tr._songMix; + if (!mixMap) continue; + + for (const piStr of Object.keys(mixMap)) { + const pi = Number(piStr); + + // atualiza vol/pan/mute do instrumento + vol/pan/mute da pattern + const mix = refreshSongMixFor(tr, pi); + + // 🔑 gate: se essa patternIndex NÃO está ativa neste tick, corta IMEDIATAMENTE + if (!activePatternSet.has(pi)) { + mix.pattVol.volume.value = -Infinity; + } + } + } + + // Agora sim: se não tem hits, sai (mas já “gateou” o sustain) if (activePatternHits.length === 0) return; // Dispara tracks reais (samplers/plugins) for (const track of appState.pattern.tracks) { if (track.type === "bassline") continue; - if (track.muted) continue; + if (track.muted || track.isMuted) continue; for (const hit of activePatternHits) { const patt = track.patterns?.[hit.patternIndex]; @@ -641,13 +750,19 @@ export function startSongPatternPlaybackOnTransport() { ? (Math.floor(tickInPattern / LMMS_TICKS_PER_STEP) % pattLenSteps) : hit.localStep; - // ✅ 1) PLUGIN com piano roll (notes) + // ✅ 1) PLUGIN com piano roll (notes) — USAR instrumento por patternIndex if ( track.type === "plugin" && - track.instrument && Array.isArray(patt.notes) && patt.notes.length > 0 ) { + // cria/recupera a instância do plugin para esta patternIndex + const inst = getSongInstrument(track, hit.patternIndex); + if (!inst) continue; + + // garante que o mix/bus exista e esteja atualizado + refreshSongMixFor(track, hit.patternIndex); + const stepStartTick = tickInPattern; const stepEndTick = stepStartTick + LMMS_TICKS_PER_STEP; @@ -677,7 +792,7 @@ export function startSongPatternPlaybackOnTransport() { const freq = Tone.Frequency(midi, "midi").toFrequency(); try { - track.instrument.triggerAttackRelease(freq, durSec, t2, vel); + inst.triggerAttackRelease(freq, durSec, t2, vel); } catch (e) { console.warn("[Playlist] Falha ao tocar plugin note:", track.name, e); } @@ -717,7 +832,8 @@ export function startSongPatternPlaybackOnTransport() { const lenTicks = rawLen < 0 ? LMMS_TICKS_PER_STEP : Math.max(rawLen, LMMS_TICKS_PER_STEP); const durSec = Math.max(0.01, ticksToSec(lenTicks, stepIntervalSec)); - playSamplerNoteAtTime(track, Number(n.key) || 0, t2, durSec); + const mix = refreshSongMixFor(track, hit.patternIndex); + playSamplerNoteAtTime(track, Number(n.key) || 0, t2, durSec, mix.instVol); } continue; // não cai na lógica de steps @@ -727,22 +843,33 @@ export function startSongPatternPlaybackOnTransport() { if (!patt.steps) continue; if (patt.steps[stepInPattern]) { - if (track.type === "sampler" && track.player) { - try { - // ✅ retrigger LMMS-like: não “some” quando sample é longo - if (typeof track.player.restart === "function") { - track.player.restart(time); - } else { - if (track.player.state === "started") track.player.stop(time); - track.player.start(time); + const mix = refreshSongMixFor(track, hit.patternIndex); + + // sampler step -> player por patternIndex + if (track.type === "sampler" && track.buffer) { + const p = getSongStepPlayer(track, hit.patternIndex); + if (p) { + try { + if (typeof p.restart === "function") { + p.restart(time); + } else { + if (p.state === "started") p.stop(time); + p.start(time); + } + } catch (e) { + console.warn("[Playlist] Falha ao retrigger step:", track.name, e); } - } catch (e) { - console.warn("[Playlist] Falha ao retrigger sample:", track.name, e); } - } else if (track.type === "plugin" && track.instrument) { - try { - track.instrument.triggerAttackRelease("C5", "16n", time); - } catch {} + } + + // plugin step -> instrumento por patternIndex + else if (track.type === "plugin") { + const inst = getSongInstrument(track, hit.patternIndex); + if (inst) { + try { + inst.triggerAttackRelease("C5", "16n", time); + } catch {} + } } } } @@ -752,6 +879,35 @@ export function startSongPatternPlaybackOnTransport() { export function stopSongPatternPlaybackOnTransport() { if (songPatternScheduleId === null) return; + for (const tr of appState.pattern.tracks || []) { + // mix nodes + if (tr._songMix) { + for (const pi of Object.keys(tr._songMix)) { + const m = tr._songMix[pi]; + try { m.instVol?.dispose(); } catch {} + try { m.instPan?.dispose(); } catch {} + try { m.pattVol?.dispose(); } catch {} + try { m.pattPan?.dispose(); } catch {} + } + tr._songMix = null; + } + + // plugin por pattern + if (tr._songInstrument) { + for (const pi of Object.keys(tr._songInstrument)) { + try { tr._songInstrument[pi]?.dispose?.(); } catch {} + } + tr._songInstrument = null; + } + + // step players por pattern + if (tr._songStepPlayer) { + for (const pi of Object.keys(tr._songStepPlayer)) { + try { tr._songStepPlayer[pi]?.dispose?.(); } catch {} + } + tr._songStepPlayer = null; + } + } try { Tone.Transport.clear(songPatternScheduleId); } catch {}