sequential frames support

- Server now has configurable MAX_WS_BUFFER_BYTES defaulting to 2097152, and skips JPEG frames
    when the WebSocket is backed up instead of queueing stale frames in ws (server/index.js:30,
    server/index.js:1439).
  - Browser frame handling now decodes frames sequentially, drops late frames against the audio
    clock, caps pending/decoded queues, and draws only the latest due frame per animation tick
    (public/app.js:280, public/app.js:381).
  - Relay/split normal EOF closes are no longer mislabeled as client_disconnect, which should
    make logs around ffmpeg decode warnings less misleading (server/index.js:797, server/
    index.js:1071).
  - Documented MAX_WS_BUFFER_BYTES in README, Compose, and AGENTS.
This commit is contained in:
2026-05-04 00:00:34 -07:00
parent a3429dee85
commit 13b1d768dc
5 changed files with 218 additions and 52 deletions

View File

@@ -23,11 +23,18 @@ const elements = {
};
const context = elements.canvas.getContext('2d', { alpha: false });
const FRAME_LATE_GRACE_SECONDS = 0.25;
const MAX_PENDING_FRAME_QUEUE_SECONDS = 2;
const MAX_DECODED_FRAME_QUEUE_SECONDS = 3;
const MIN_PENDING_FRAME_QUEUE = 12;
const MIN_DECODED_FRAME_QUEUE = 24;
const state = {
generation: 0,
session: null,
websocket: null,
pendingFrames: [],
decodingFrames: false,
frames: [],
currentBitmap: null,
raf: 0,
@@ -270,23 +277,20 @@ async function playAudio() {
}
}
async function handleFramePacket(packet, generation) {
function handleFramePacket(packet, generation) {
if (!(packet instanceof ArrayBuffer) || packet.byteLength <= 8) {
return;
}
const timestamp = new DataView(packet, 0, 8).getFloat64(0, true);
const blob = new Blob([packet.slice(8)], { type: 'image/jpeg' });
const bitmap = await decodeImage(blob);
if (generation !== state.generation) {
releaseImage(bitmap);
if (generation !== state.generation || isLateFrame(timestamp)) {
return;
}
state.frames.push({ timestamp, bitmap });
state.frameCount += 1;
trimFrameQueue();
state.pendingFrames.push({ timestamp, jpeg: packet.slice(8), generation });
trimPendingFrameQueue();
void pumpFrameDecodeQueue();
}
function handleControlMessage(rawMessage) {
@@ -332,26 +336,34 @@ function drawReadyFrames() {
return;
}
dropLateDecodedFrames();
const frameLeadSeconds = 1 / Math.max(1, state.session.options.fps);
const targetTime = elements.audio.currentTime + frameLeadSeconds;
let drew = false;
let frameToDraw = null;
while (state.frames.length > 0 && state.frames[0].timestamp <= targetTime) {
const frame = state.frames.shift();
if (state.currentBitmap) {
releaseImage(state.currentBitmap);
if (frameToDraw) {
releaseImage(frameToDraw.bitmap);
}
state.currentBitmap = frame.bitmap;
drawBitmap(frame.bitmap);
drew = true;
frameToDraw = frame;
}
if (drew) {
elements.loader.hidden = true;
clearPlayerMessage();
if (!frameToDraw) {
return;
}
if (state.currentBitmap) {
releaseImage(state.currentBitmap);
}
state.currentBitmap = frameToDraw.bitmap;
drawBitmap(frameToDraw.bitmap);
elements.loader.hidden = true;
clearPlayerMessage();
}
function drawBitmap(bitmap) {
@@ -366,9 +378,70 @@ function drawBitmap(bitmap) {
context.drawImage(bitmap, 0, 0, elements.canvas.width, elements.canvas.height);
}
async function pumpFrameDecodeQueue() {
if (state.decodingFrames) {
return;
}
state.decodingFrames = true;
try {
while (state.pendingFrames.length > 0) {
dropLatePendingFrames();
const frame = state.pendingFrames.shift();
if (!frame) {
return;
}
if (frame.generation !== state.generation || isLateFrame(frame.timestamp)) {
continue;
}
let bitmap;
try {
bitmap = await decodeImage(new Blob([frame.jpeg], { type: 'image/jpeg' }));
} catch {
continue;
}
if (frame.generation !== state.generation || isLateFrame(frame.timestamp)) {
releaseImage(bitmap);
continue;
}
state.frames.push({ timestamp: frame.timestamp, bitmap });
state.frameCount += 1;
trimFrameQueue();
}
} finally {
state.decodingFrames = false;
if (state.pendingFrames.length > 0) {
window.setTimeout(() => {
void pumpFrameDecodeQueue();
}, 0);
}
}
}
function trimPendingFrameQueue() {
dropLatePendingFrames();
const maxQueuedFrames = getFrameQueueLimit(MAX_PENDING_FRAME_QUEUE_SECONDS, MIN_PENDING_FRAME_QUEUE);
const overflow = state.pendingFrames.length - maxQueuedFrames;
if (overflow > 0) {
state.pendingFrames.splice(0, overflow);
}
}
function trimFrameQueue() {
const fps = state.session?.options.fps ?? 24;
const maxQueuedFrames = Math.max(60, Math.ceil(fps * 8));
dropLateDecodedFrames();
const maxQueuedFrames = getFrameQueueLimit(MAX_DECODED_FRAME_QUEUE_SECONDS, MIN_DECODED_FRAME_QUEUE);
const overflow = state.frames.length - maxQueuedFrames;
if (overflow <= 0) {
@@ -382,6 +455,50 @@ function trimFrameQueue() {
}
}
function dropLatePendingFrames() {
let removeCount = 0;
while (removeCount < state.pendingFrames.length && isLateFrame(state.pendingFrames[removeCount].timestamp)) {
removeCount += 1;
}
if (removeCount > 0) {
state.pendingFrames.splice(0, removeCount);
}
}
function dropLateDecodedFrames() {
let removeCount = 0;
while (removeCount < state.frames.length && isLateFrame(state.frames[removeCount].timestamp)) {
removeCount += 1;
}
if (removeCount <= 0) {
return;
}
const removed = state.frames.splice(0, removeCount);
for (const frame of removed) {
releaseImage(frame.bitmap);
}
}
function getFrameQueueLimit(seconds, minimum) {
const fps = state.session?.options.fps ?? 24;
return Math.max(minimum, Math.ceil(fps * seconds));
}
function isLateFrame(timestamp) {
if (!state.session || state.isSeeking || elements.audio.paused || elements.audio.readyState === 0) {
return false;
}
const currentTime = Number.isFinite(elements.audio.currentTime) ? elements.audio.currentTime : 0;
return timestamp < currentTime - FRAME_LATE_GRACE_SECONDS;
}
function stopSession({ showEntry: shouldShowEntry = true } = {}) {
state.generation += 1;
state.session = null;
@@ -421,6 +538,8 @@ function stopSession({ showEntry: shouldShowEntry = true } = {}) {
}
function clearFrameQueue() {
state.pendingFrames = [];
for (const frame of state.frames) {
releaseImage(frame.bitmap);
}