ativar/desativar tracks de áudio na playlist, knobs funcionais
Deploy / Deploy (push) Successful in 1m57s Details

This commit is contained in:
JotaChina 2025-12-27 23:21:58 -03:00
parent a3b432ae29
commit 24cb9d1e02
2 changed files with 323 additions and 33 deletions

View File

@ -20,6 +20,73 @@ import {
getSecondsPerStep, getSecondsPerStep,
} from "../utils.js"; } from "../utils.js";
import { sendAction, sendActionSafe } from "../socket.js"; import { sendAction, sendActionSafe } from "../socket.js";
import { setPatternTrackMute, setPatternTrackVolume, setPatternTrackPan } from "../pattern/pattern_state.js";
import { initializeAudioContext } from "../audio.js";
// --- Knob helpers (mesmo comportamento do pattern_ui.js) ---
const _KNOB_MIN_DEG = -135;
const _KNOB_MAX_DEG = 135;
const _MAX_TRACK_VOLUME = 1.5;
function _clampKnob(n, min, max) {
const x = Number(n);
if (!Number.isFinite(x)) return min;
return Math.min(max, Math.max(min, x));
}
function _valueToDeg(control, value) {
if (control === "pan") {
const v = _clampKnob(value, -1, 1);
const norm = (v + 1) / 2;
return _KNOB_MIN_DEG + norm * (_KNOB_MAX_DEG - _KNOB_MIN_DEG);
}
const v = _clampKnob(value, 0, _MAX_TRACK_VOLUME);
const norm = v / _MAX_TRACK_VOLUME;
return _KNOB_MIN_DEG + norm * (_KNOB_MAX_DEG - _KNOB_MIN_DEG);
}
function _setKnobIndicator(knobEl, control, value) {
const ind = knobEl.querySelector(".knob-indicator");
if (!ind) return;
ind.style.transform = `rotate(${_valueToDeg(control, value)}deg)`;
}
function _attachKnobDrag(knobEl, { control, getCurrent, setLocal, commit }) {
if (!knobEl) return;
knobEl.addEventListener("mousedown", (e) => {
e.preventDefault();
e.stopPropagation();
initializeAudioContext();
const startY = e.clientY;
const startVal = Number(getCurrent?.() ?? 0);
const sensitivity = control === "pan" ? 0.01 : 0.005;
let lastVal = startVal;
const onMove = (ev) => {
const delta = (startY - ev.clientY) * sensitivity;
let v = startVal + delta;
if (control === "pan") v = _clampKnob(v, -1, 1);
else v = _clampKnob(v, 0, _MAX_TRACK_VOLUME);
lastVal = v;
setLocal?.(v);
_setKnobIndicator(knobEl, control, v);
};
const onUp = () => {
window.removeEventListener("mousemove", onMove);
window.removeEventListener("mouseup", onUp);
commit?.(lastVal);
};
window.addEventListener("mousemove", onMove);
window.addEventListener("mouseup", onUp);
});
}
// ===================================================== // =====================================================
// Playlist Patterns (Bassline clips) - Drag/Resize/Delete // Playlist Patterns (Bassline clips) - Drag/Resize/Delete
@ -512,8 +579,13 @@ export function renderAudioEditor() {
<div class="track-mute"></div> <div class="track-mute"></div>
</div> </div>
<div class="track-controls"> <div class="track-controls">
<div class="knob-container"> <div class="knob" data-control="volume"><div class="knob-indicator"></div></div> <span>VOL</span> </div> <div class="knob-container"> <div class="knob" data-control="volume" data-track-id="${trackData.id}"><div class="knob-indicator"></div></div> <span>VOL</span> </div>
<div class="knob-container"> <div class="knob" data-control="pan"><div class="knob-indicator"></div></div> <span>PAN</span> </div> <div class="knob-container">
<div class="knob" data-control="pan" data-track-id="${trackData.id}">
<div class="knob-indicator"></div>
</div>
<span>PAN</span>
</div>
</div> </div>
</div> </div>
<div class="timeline-container"> <div class="timeline-container">
@ -524,6 +596,68 @@ export function renderAudioEditor() {
newTrackContainer.appendChild(audioTrackLane); newTrackContainer.appendChild(audioTrackLane);
// MUTE/VOL/PAN na playlist (principalmente p/ bassline/pattern lanes)
const muteBtn = audioTrackLane.querySelector(".track-mute");
if (muteBtn) {
const isMutedNow = !!(trackData.isMuted || trackData.muted);
muteBtn.classList.toggle("muted", isMutedNow);
muteBtn.addEventListener("click", (e) => {
e.preventDefault();
e.stopPropagation();
initializeAudioContext();
const cur = !!(trackData.isMuted || trackData.muted);
const next = !cur;
setPatternTrackMute(trackData.id, next);
sendAction({ type: "SET_PATTERN_TRACK_MUTE", trackId: trackData.id, isMuted: next });
renderAudioEditor();
restartAudioEditorIfPlaying?.();
});
}
// Só aplica mixer se existir no Pattern State
const patternTrack = (appState.pattern?.tracks || []).find(
(t) => String(t.id) === String(trackData.id)
);
if (patternTrack) {
// Knobs (VOL/PAN)
audioTrackLane.querySelectorAll(".knob").forEach((knobEl) => {
const control = knobEl.dataset.control;
const trackId = knobEl.dataset.trackId;
const getT = () => (appState.pattern?.tracks || []).find((x) => String(x.id) === String(trackId));
const initial = control === "pan" ? (getT()?.pan ?? 0) : (getT()?.volume ?? 1);
_setKnobIndicator(knobEl, control, initial);
_attachKnobDrag(knobEl, {
control,
getCurrent: () => (control === "pan" ? (getT()?.pan ?? 0) : (getT()?.volume ?? 1)),
setLocal: (v) => {
if (control === "pan") setPatternTrackPan(trackId, v);
else setPatternTrackVolume(trackId, v);
},
commit: (v) => {
if (control === "pan") sendAction({ type: "SET_PATTERN_TRACK_PAN", trackId, pan: v });
else sendAction({ type: "SET_PATTERN_TRACK_VOLUME", trackId, volume: v });
},
});
});
} else {
// opcional: desabilitar visualmente knobs/mute em lanes de áudio
audioTrackLane.querySelectorAll(".track-mute, .knob").forEach((el) => {
el.style.opacity = "0.35";
el.style.pointerEvents = "none";
});
}
// Knobs: igual pattern_ui (drag vertical)
// -> ao arrastar: setPatternTrackVolume/Pan()
// -> ao soltar: sendAction SET_PATTERN_TRACK_VOLUME/PAN
const timelineContainer = audioTrackLane.querySelector(".timeline-container"); const timelineContainer = audioTrackLane.querySelector(".timeline-container");
const grid = timelineContainer.querySelector(".spectrogram-view-grid"); const grid = timelineContainer.querySelector(".spectrogram-view-grid");

View File

@ -4,7 +4,7 @@ import * as Tone from "https://esm.sh/tone";
import { appState } from "../state.js"; import { appState } from "../state.js";
import { highlightStep } from "./pattern_ui.js"; import { highlightStep } from "./pattern_ui.js";
import { getTotalSteps } from "../utils.js"; import { getTotalSteps } from "../utils.js";
import { initializeAudioContext } from "../audio.js"; import { initializeAudioContext, getMainGainNode } from "../audio.js";
import { TripleOscillator } from "../../audio/plugins/TripleOscillator.js"; import { TripleOscillator } from "../../audio/plugins/TripleOscillator.js";
import { Nes } from "../../audio/plugins/Nes.js"; import { Nes } from "../../audio/plugins/Nes.js";
@ -12,6 +12,50 @@ import { SuperSaw } from "../../audio/plugins/SuperSaw.js";
import { Lb302 } from "../../audio/plugins/Lb302.js"; import { Lb302 } from "../../audio/plugins/Lb302.js";
import { Kicker } from "../../audio/plugins/Kicker.js"; import { Kicker } from "../../audio/plugins/Kicker.js";
const MAX_VOL = 1.5;
const clamp = (n, a, b) => Math.max(a, Math.min(b, Number(n) || 0));
function getBasslineByPatternIndex(pi) {
return (appState.pattern.tracks || []).find(t => t.type === "bassline" && Number(t.patternIndex) === Number(pi));
}
function getSongMix(track, patternIndex) {
track._songMix ??= {};
if (track._songMix[patternIndex]) return track._songMix[patternIndex];
const instVol = new Tone.Volume(0);
const instPan = new Tone.Panner(0);
const pattVol = new Tone.Volume(0);
const pattPan = new Tone.Panner(0);
instVol.connect(instPan);
instPan.connect(pattVol);
pattVol.connect(pattPan);
pattPan.connect(getMainGainNode());
track._songMix[patternIndex] = { instVol, instPan, pattVol, pattPan };
return track._songMix[patternIndex];
}
function refreshSongMixFor(track, patternIndex) {
const b = getBasslineByPatternIndex(patternIndex);
const mix = getSongMix(track, patternIndex);
const instMuted = !!(track.isMuted || track.muted);
const pattMuted = !!(b?.isMuted || b?.muted);
const instV = clamp(track.volume ?? 1, 0, MAX_VOL);
const pattV = clamp(b?.volume ?? 1, 0, MAX_VOL);
mix.instVol.volume.value = (instMuted || instV === 0) ? -Infinity : Tone.gainToDb(instV);
mix.pattVol.volume.value = (pattMuted || pattV === 0) ? -Infinity : Tone.gainToDb(pattV);
mix.instPan.pan.value = clamp(track.pan ?? 0, -1, 1);
mix.pattPan.pan.value = clamp(b?.pan ?? 0, -1, 1);
return mix;
}
function getActivePatternForTrack(track) { function getActivePatternForTrack(track) {
const idx = appState.pattern?.activePatternIndex ?? track.activePatternIndex ?? 0; const idx = appState.pattern?.activePatternIndex ?? track.activePatternIndex ?? 0;
return track.patterns?.[idx] ?? null; return track.patterns?.[idx] ?? null;
@ -130,26 +174,67 @@ export function playSample(filePath, trackId) {
} }
} }
function getSongStepPlayer(track, patternIndex) {
if (track.type !== "sampler" || !track.buffer) return null;
function playSamplerNoteAtTime(track, midi, time, durationSec) { track._songStepPlayer ??= {};
if (!track?.buffer || !track.volumeNode) return; if (track._songStepPlayer[patternIndex]) return track._songStepPlayer[patternIndex];
const mix = refreshSongMixFor(track, patternIndex);
const p = new Tone.Player(track.buffer);
p.connect(mix.instVol);
track._songStepPlayer[patternIndex] = p;
return p;
}
function _getPluginKey(track) {
return String(
track.pluginName ||
track.instrumentName ||
track.instrument?.constructor?.name ||
""
).toLowerCase();
}
function getSongInstrument(track, patternIndex) {
if (track.type !== "plugin") return null;
track._songInstrument ??= {};
if (track._songInstrument[patternIndex]) return track._songInstrument[patternIndex];
const key = _getPluginKey(track);
const Cls = PLUGIN_CLASSES[key];
if (!Cls) {
console.warn("[Song] Plugin não encontrado:", key, "track:", track.name);
return null;
}
const mix = refreshSongMixFor(track, patternIndex);
const inst = new Cls(null, track.params || track.pluginData || {});
inst.connect(mix.instVol);
track._songInstrument[patternIndex] = inst;
return inst;
}
function playSamplerNoteAtTime(track, midi, time, durationSec, destinationNode = null) {
if (!track?.buffer) return;
const base = track.baseNote ?? 60; const base = track.baseNote ?? 60;
const semitones = (midi - base); const semitones = midi - base;
const rate = Math.pow(2, semitones / 12); const rate = Math.pow(2, semitones / 12);
const player = new Tone.Player(track.buffer); const player = new Tone.Player(track.buffer);
player.playbackRate = rate; player.playbackRate = rate;
player.connect(track.volumeNode);
// destino: song bus (mix.instVol) OU cadeia normal do pattern editor
const dest = destinationNode || track.volumeNode || getMainGainNode();
player.connect(dest);
player.start(time); player.start(time);
// se quiser respeitar duração (bem básico) if (durationSec && durationSec > 0) player.stop(time + durationSec);
if (durationSec && durationSec > 0) {
player.stop(time + durationSec);
}
// limpeza
player.onstop = () => player.dispose(); player.onstop = () => player.dispose();
} }
@ -587,7 +672,7 @@ export function startSongPatternPlaybackOnTransport() {
// Patterns ativas neste tick (pelas basslines/playlist clips) // Patterns ativas neste tick (pelas basslines/playlist clips)
const basslineTracks = appState.pattern.tracks.filter( const basslineTracks = appState.pattern.tracks.filter(
(t) => t.type === "bassline" && !t.isMuted (t) => t.type === "bassline" && !(t.isMuted || t.muted)
); );
const activePatternHits = []; const activePatternHits = [];
@ -600,12 +685,36 @@ export function startSongPatternPlaybackOnTransport() {
activePatternHits.push({ patternIndex: b.patternIndex, localStep }); activePatternHits.push({ patternIndex: b.patternIndex, localStep });
} }
// Quais patternIndex estão ativas AGORA (tem clip tocando neste tick)
const activePatternSet = new Set(activePatternHits.map(h => Number(h.patternIndex)));
// Sempre atualiza buses já existentes e “fecha o gate” quando a pattern não está ativa
for (const tr of appState.pattern.tracks || []) {
if (tr.type === "bassline") continue;
const mixMap = tr._songMix;
if (!mixMap) continue;
for (const piStr of Object.keys(mixMap)) {
const pi = Number(piStr);
// atualiza vol/pan/mute do instrumento + vol/pan/mute da pattern
const mix = refreshSongMixFor(tr, pi);
// 🔑 gate: se essa patternIndex NÃO está ativa neste tick, corta IMEDIATAMENTE
if (!activePatternSet.has(pi)) {
mix.pattVol.volume.value = -Infinity;
}
}
}
// Agora sim: se não tem hits, sai (mas já “gateou” o sustain)
if (activePatternHits.length === 0) return; if (activePatternHits.length === 0) return;
// Dispara tracks reais (samplers/plugins) // Dispara tracks reais (samplers/plugins)
for (const track of appState.pattern.tracks) { for (const track of appState.pattern.tracks) {
if (track.type === "bassline") continue; if (track.type === "bassline") continue;
if (track.muted) continue; if (track.muted || track.isMuted) continue;
for (const hit of activePatternHits) { for (const hit of activePatternHits) {
const patt = track.patterns?.[hit.patternIndex]; const patt = track.patterns?.[hit.patternIndex];
@ -641,13 +750,19 @@ export function startSongPatternPlaybackOnTransport() {
? (Math.floor(tickInPattern / LMMS_TICKS_PER_STEP) % pattLenSteps) ? (Math.floor(tickInPattern / LMMS_TICKS_PER_STEP) % pattLenSteps)
: hit.localStep; : hit.localStep;
// ✅ 1) PLUGIN com piano roll (notes) // ✅ 1) PLUGIN com piano roll (notes) — USAR instrumento por patternIndex
if ( if (
track.type === "plugin" && track.type === "plugin" &&
track.instrument &&
Array.isArray(patt.notes) && Array.isArray(patt.notes) &&
patt.notes.length > 0 patt.notes.length > 0
) { ) {
// cria/recupera a instância do plugin para esta patternIndex
const inst = getSongInstrument(track, hit.patternIndex);
if (!inst) continue;
// garante que o mix/bus exista e esteja atualizado
refreshSongMixFor(track, hit.patternIndex);
const stepStartTick = tickInPattern; const stepStartTick = tickInPattern;
const stepEndTick = stepStartTick + LMMS_TICKS_PER_STEP; const stepEndTick = stepStartTick + LMMS_TICKS_PER_STEP;
@ -677,7 +792,7 @@ export function startSongPatternPlaybackOnTransport() {
const freq = Tone.Frequency(midi, "midi").toFrequency(); const freq = Tone.Frequency(midi, "midi").toFrequency();
try { try {
track.instrument.triggerAttackRelease(freq, durSec, t2, vel); inst.triggerAttackRelease(freq, durSec, t2, vel);
} catch (e) { } catch (e) {
console.warn("[Playlist] Falha ao tocar plugin note:", track.name, e); console.warn("[Playlist] Falha ao tocar plugin note:", track.name, e);
} }
@ -717,7 +832,8 @@ export function startSongPatternPlaybackOnTransport() {
const lenTicks = rawLen < 0 ? LMMS_TICKS_PER_STEP : Math.max(rawLen, LMMS_TICKS_PER_STEP); const lenTicks = rawLen < 0 ? LMMS_TICKS_PER_STEP : Math.max(rawLen, LMMS_TICKS_PER_STEP);
const durSec = Math.max(0.01, ticksToSec(lenTicks, stepIntervalSec)); const durSec = Math.max(0.01, ticksToSec(lenTicks, stepIntervalSec));
playSamplerNoteAtTime(track, Number(n.key) || 0, t2, durSec); const mix = refreshSongMixFor(track, hit.patternIndex);
playSamplerNoteAtTime(track, Number(n.key) || 0, t2, durSec, mix.instVol);
} }
continue; // não cai na lógica de steps continue; // não cai na lógica de steps
@ -727,31 +843,71 @@ export function startSongPatternPlaybackOnTransport() {
if (!patt.steps) continue; if (!patt.steps) continue;
if (patt.steps[stepInPattern]) { if (patt.steps[stepInPattern]) {
if (track.type === "sampler" && track.player) { const mix = refreshSongMixFor(track, hit.patternIndex);
// sampler step -> player por patternIndex
if (track.type === "sampler" && track.buffer) {
const p = getSongStepPlayer(track, hit.patternIndex);
if (p) {
try { try {
// ✅ retrigger LMMS-like: não “some” quando sample é longo if (typeof p.restart === "function") {
if (typeof track.player.restart === "function") { p.restart(time);
track.player.restart(time);
} else { } else {
if (track.player.state === "started") track.player.stop(time); if (p.state === "started") p.stop(time);
track.player.start(time); p.start(time);
} }
} catch (e) { } catch (e) {
console.warn("[Playlist] Falha ao retrigger sample:", track.name, e); console.warn("[Playlist] Falha ao retrigger step:", track.name, e);
} }
} else if (track.type === "plugin" && track.instrument) { }
}
// plugin step -> instrumento por patternIndex
else if (track.type === "plugin") {
const inst = getSongInstrument(track, hit.patternIndex);
if (inst) {
try { try {
track.instrument.triggerAttackRelease("C5", "16n", time); inst.triggerAttackRelease("C5", "16n", time);
} catch {} } catch {}
} }
} }
} }
} }
}
}, "16n"); }, "16n");
} }
export function stopSongPatternPlaybackOnTransport() { export function stopSongPatternPlaybackOnTransport() {
if (songPatternScheduleId === null) return; if (songPatternScheduleId === null) return;
for (const tr of appState.pattern.tracks || []) {
// mix nodes
if (tr._songMix) {
for (const pi of Object.keys(tr._songMix)) {
const m = tr._songMix[pi];
try { m.instVol?.dispose(); } catch {}
try { m.instPan?.dispose(); } catch {}
try { m.pattVol?.dispose(); } catch {}
try { m.pattPan?.dispose(); } catch {}
}
tr._songMix = null;
}
// plugin por pattern
if (tr._songInstrument) {
for (const pi of Object.keys(tr._songInstrument)) {
try { tr._songInstrument[pi]?.dispose?.(); } catch {}
}
tr._songInstrument = null;
}
// step players por pattern
if (tr._songStepPlayer) {
for (const pi of Object.keys(tr._songStepPlayer)) {
try { tr._songStepPlayer[pi]?.dispose?.(); } catch {}
}
tr._songStepPlayer = null;
}
}
try { try {
Tone.Transport.clear(songPatternScheduleId); Tone.Transport.clear(songPatternScheduleId);
} catch {} } catch {}