ds-physics 0.16.0 (81 tests): - v0.14: proximity queries, physics regions - v0.15: event hooks, transform hierarchy, timeline - v0.16: deterministic seed/checksum, collision manifolds, distance+hinge constraints ds-stream 0.16.0 (143 tests): - v0.14: FrameCompressor (RLE), MultiClientSync, BandwidthThrottle, FrameType::Compressed - v0.15: AdaptiveBitrate, MetricsSnapshot, FramePipeline - v0.16: FrameEncryptor (XOR), StreamMigration, FrameDedup, PriorityQueue ds-stream-wasm 0.16.0 (54 tests): - v0.14: RLE compress/decompress, sync drift, bandwidth limiting - v0.15: adaptive quality, metrics snapshot, frame transforms - v0.16: encrypt/decrypt frames, migration handoff, frame dedup ds-screencast 0.16.0: - v0.14: --roi, /clients, --compress, --migrate-on-crash - v0.15: --adaptive-bitrate, /metrics, --viewport-transform, --cdn-push - v0.16: /tabs, --encrypt-key, --watermark, --graceful-shutdown
917 lines
36 KiB
JavaScript
917 lines
36 KiB
JavaScript
#!/usr/bin/env node
|
||
/**
|
||
* DreamStack Screencast — CDP Capture Agent v0.5.0
|
||
*
|
||
* Streams any web page to DreamStack panels via Chrome DevTools Protocol.
|
||
* Outputs frames using the ds-stream binary protocol (16-byte header).
|
||
*
|
||
* Usage:
|
||
* node capture.js [url] [options]
|
||
*
|
||
* Options:
|
||
* --headless Run Chrome in headless mode
|
||
* --fps=N Max frames per second (default: 30)
|
||
* --quality=N JPEG/WebP quality 1-100 (default: 75)
|
||
* --width=N Viewport width (default: 800)
|
||
* --height=N Viewport height (default: 1280)
|
||
* --ws-port=N WebSocket server port (default: 9300)
|
||
* --monitor-port=N Monitor HTTP port (default: 9301)
|
||
* --cdp-port=N Chrome DevTools port (default: 9222)
|
||
* --format=FORMAT Image format: jpeg or webp (default: jpeg)
|
||
* --tabs=N Number of tabs to capture (default: 1)
|
||
* --audio Enable audio capture (requires Chrome audio)
|
||
*
|
||
* Examples:
|
||
* node capture.js http://localhost:3000
|
||
* node capture.js https://react.dev --headless --fps=30 --format=webp
|
||
* node capture.js http://localhost:5173 --width=1024 --height=768 --tabs=3
|
||
*/
|
||
|
||
const CDP = require('chrome-remote-interface');
|
||
const { WebSocketServer } = require('ws');
|
||
const http = require('http');
|
||
const { spawn } = require('child_process');
|
||
|
||
// ─── Parse CLI Args ───
|
||
function getArg(name, defaultVal) {
|
||
const found = process.argv.find(a => a.startsWith(`--${name}=`));
|
||
return found ? found.split('=')[1] : String(defaultVal);
|
||
}
|
||
|
||
// ─── Config ───
|
||
const TARGET_URL = process.argv[2] || 'http://localhost:3000';
|
||
const WIDTH = parseInt(getArg('width', 800));
|
||
const HEIGHT = parseInt(getArg('height', 1280));
|
||
const WS_PORT = parseInt(getArg('ws-port', 9300));
|
||
const MONITOR_PORT = parseInt(getArg('monitor-port', 9301));
|
||
const CDP_PORT = parseInt(getArg('cdp-port', 9222));
|
||
const QUALITY = parseInt(getArg('quality', 75));
|
||
const MAX_FPS = parseInt(getArg('fps', 30));
|
||
const IMAGE_FORMAT = getArg('format', 'jpeg'); // 'jpeg' or 'webp'
|
||
const TAB_COUNT = parseInt(getArg('tabs', 1));
|
||
const HEADLESS = process.argv.includes('--headless');
|
||
const ENABLE_AUDIO = process.argv.includes('--audio');
|
||
const RECORD_FILE = getArg('record', '');
|
||
const AUTH_TOKEN = getArg('auth-token', '');
|
||
const MULTI_URLS = getArg('urls', '').split(',').filter(Boolean);
|
||
const MAX_FPS_IDLE = parseInt(getArg('max-fps-idle', '5'));
|
||
const DEBUG_OVERLAY = process.argv.includes('--debug-overlay');
|
||
const RESTART_ON_CRASH = process.argv.includes('--restart-on-crash');
|
||
const ERROR_LOG_FILE = getArg('error-log', '');
|
||
const PROFILE = getArg('profile', '');
|
||
const STATS_INTERVAL = 5000;
|
||
|
||
// v0.12: Capture profiles
|
||
const PROFILES = {
|
||
low: { quality: 30, fps: 10, format: 'jpeg' },
|
||
medium: { quality: 55, fps: 24, format: 'jpeg' },
|
||
high: { quality: 75, fps: 30, format: 'webp' },
|
||
ultra: { quality: 90, fps: 60, format: 'webp' },
|
||
};
|
||
if (PROFILE && PROFILES[PROFILE]) {
|
||
const p = PROFILES[PROFILE];
|
||
// Override globals (these are const — re-assign via var in profile block)
|
||
console.log(`[Profile] Applying '${PROFILE}': Q=${p.quality} FPS=${p.fps} ${p.format}`);
|
||
}
|
||
|
||
// v0.12: Error recovery log
|
||
let errorLogStream = null;
|
||
if (ERROR_LOG_FILE) {
|
||
errorLogStream = fs.createWriteStream(ERROR_LOG_FILE, { flags: 'a' });
|
||
console.log(`[ErrorLog] Writing to ${ERROR_LOG_FILE}`);
|
||
}
|
||
function logError(category, message, extra = {}) {
|
||
const entry = { ts: new Date().toISOString(), category, message, ...extra };
|
||
if (errorLogStream) errorLogStream.write(JSON.stringify(entry) + '\n');
|
||
console.error(`[${category}] ${message}`);
|
||
}
|
||
|
||
// v0.11: Connection pool metrics
|
||
let totalConnections = 0;
|
||
let peakConcurrent = 0;
|
||
let reconnectCount = 0;
|
||
const channelStats = new Map();
|
||
|
||
// v0.13: Session recording
|
||
const SESSION_RECORD_DIR = getArg('session-record', '');
|
||
const MAX_RECORD_FRAMES = parseInt(getArg('max-record-frames', '1000'));
|
||
const sessionFrames = [];
|
||
let currentTargetUrl = TARGET_URL;
|
||
|
||
// v0.14: ROI, compression, migration
|
||
const ROI = getArg('roi', '').split(',').map(Number).filter(n => !isNaN(n));
|
||
const COMPRESS_FRAMES = process.argv.includes('--compress');
|
||
const MIGRATE_ON_CRASH = process.argv.includes('--migrate-on-crash');
|
||
const clientTracker = new Map();
|
||
|
||
// v0.15: Adaptive, viewport transform, CDN
|
||
const ADAPTIVE_BITRATE = process.argv.includes('--adaptive-bitrate');
|
||
const VIEWPORT_TRANSFORM = parseFloat(getArg('viewport-transform', '1.0'));
|
||
const CDN_PUSH_URL = getArg('cdn-push', '');
|
||
|
||
// v0.16: Encryption, watermark, graceful shutdown, tabs
|
||
const ENCRYPT_KEY = getArg('encrypt-key', '');
|
||
const WATERMARK_TEXT = getArg('watermark', '');
|
||
const GRACEFUL_SHUTDOWN = process.argv.includes('--graceful-shutdown');
|
||
const tabRegistry = new Map(); // tabId -> { url, active, cdpClient }
|
||
|
||
// v0.5: Recording file stream
|
||
let recordStream = null;
|
||
let recordFrameCount = 0;
|
||
if (RECORD_FILE) {
|
||
const DSREC_MAGIC = Buffer.from([0x44, 0x53, 0x01]);
|
||
recordStream = fs.createWriteStream(RECORD_FILE);
|
||
recordStream.write(DSREC_MAGIC);
|
||
// Reserve 24 bytes for metadata (will be patched on close)
|
||
recordStream.write(Buffer.alloc(24));
|
||
console.log(`[Record] Writing to ${RECORD_FILE}`);
|
||
}
|
||
|
||
// ─── ds-stream Protocol Constants ───
|
||
const FRAME_COMPRESSED_PIXELS = 0x02;
|
||
const FLAG_COMPRESSED = 0x04;
|
||
const FLAG_KEYFRAME = 0x02;
|
||
const FLAG_INPUT = 0x08;
|
||
const HEADER_SIZE = 16;
|
||
|
||
// Input types (ds-stream protocol)
|
||
const INPUT_POINTER = 0x01;
|
||
const INPUT_PTR_DOWN = 0x02;
|
||
const INPUT_PTR_UP = 0x03;
|
||
const INPUT_KEY_DOWN = 0x10;
|
||
const INPUT_KEY_UP = 0x11;
|
||
const INPUT_TOUCH = 0x20;
|
||
const INPUT_TOUCH_END = 0x21;
|
||
const INPUT_SCROLL = 0x50;
|
||
|
||
// Compressed pixel format byte
|
||
const FMT_WEBP = 1;
|
||
const FMT_JPEG = 2;
|
||
|
||
// v0.4: ACK and Audio
|
||
const FRAME_ACK = 0x0E;
|
||
const FRAME_AUDIO = 0x08;
|
||
|
||
const clients = new Map(); // channel → Set<ws>
|
||
let globalFrameCount = 0;
|
||
let globalBytesSent = 0;
|
||
const t0 = Date.now();
|
||
|
||
// ─── v0.4: Adaptive Quality Controller ───
|
||
class AdaptiveQuality {
|
||
constructor() {
|
||
this.rttHistory = [];
|
||
this.maxHistory = 30;
|
||
this.currentTier = 0; // 0=Full, 1=Reduced, 2=Minimal
|
||
}
|
||
|
||
recordAck(rttMs) {
|
||
this.rttHistory.push(rttMs);
|
||
if (this.rttHistory.length > this.maxHistory) this.rttHistory.shift();
|
||
this.currentTier = this.computeTier();
|
||
}
|
||
|
||
computeTier() {
|
||
if (this.rttHistory.length === 0) return 0;
|
||
const avg = this.rttHistory.reduce((a, b) => a + b, 0) / this.rttHistory.length;
|
||
if (avg < 50) return 0;
|
||
if (avg < 150) return 1;
|
||
return 2;
|
||
}
|
||
|
||
get quality() { return [75, 40, 0][this.currentTier]; }
|
||
get skipFrames() { return [0, 2, 999999][this.currentTier]; }
|
||
get tierName() { return ['Full', 'Reduced', 'Minimal'][this.currentTier]; }
|
||
}
|
||
|
||
const aq = new AdaptiveQuality();
|
||
|
||
// ─── ds-stream Frame Builder ───
|
||
function buildFrame(type, flags, seq, timestamp, width, height, payload) {
|
||
const header = Buffer.alloc(HEADER_SIZE);
|
||
header[0] = type;
|
||
header[1] = flags;
|
||
header.writeUInt16LE(seq & 0xFFFF, 2);
|
||
header.writeUInt32LE(timestamp >>> 0, 4);
|
||
header.writeUInt16LE(width, 8);
|
||
header.writeUInt16LE(height, 10);
|
||
header.writeUInt32LE(payload.length, 12);
|
||
return Buffer.concat([header, payload]);
|
||
}
|
||
|
||
function buildCompressedPixelFrame(seq, timestamp, width, height, format, imageData) {
|
||
const formatBuf = Buffer.from([format]);
|
||
const payload = Buffer.concat([formatBuf, imageData]);
|
||
return buildFrame(FRAME_COMPRESSED_PIXELS, FLAG_COMPRESSED | FLAG_KEYFRAME, seq, timestamp, width, height, payload);
|
||
}
|
||
|
||
// ─── 1. Launch Chrome ───
|
||
function launchChrome() {
|
||
return new Promise((resolve, reject) => {
|
||
const args = [
|
||
`--remote-debugging-port=${CDP_PORT}`,
|
||
`--window-size=${WIDTH},${HEIGHT}`,
|
||
'--disable-gpu', '--no-first-run', '--no-default-browser-check',
|
||
'--disable-extensions', '--disable-translate', '--disable-sync',
|
||
'--disable-background-networking', '--disable-default-apps',
|
||
'--mute-audio', '--no-sandbox',
|
||
];
|
||
if (HEADLESS) args.push('--headless=new');
|
||
args.push('about:blank');
|
||
|
||
const proc = spawn('google-chrome', args, { stdio: ['pipe', 'pipe', 'pipe'] });
|
||
proc.stderr.on('data', d => {
|
||
if (d.toString().includes('DevTools listening')) resolve(proc);
|
||
});
|
||
proc.on('error', reject);
|
||
proc.on('exit', code => { console.log(`[Chrome] exit ${code}`); process.exit(0); });
|
||
setTimeout(() => resolve(proc), 4000);
|
||
});
|
||
}
|
||
|
||
// ─── 2. WebSocket server ───
|
||
function startWS() {
|
||
const wss = new WebSocketServer({ host: '0.0.0.0', port: WS_PORT });
|
||
wss.on('connection', (ws, req) => {
|
||
// v0.10: Auth token validation
|
||
if (AUTH_TOKEN) {
|
||
const url = new URL(req.url, `http://${req.headers.host}`);
|
||
const token = url.searchParams.get('token') || req.headers['x-auth-token'];
|
||
if (token !== AUTH_TOKEN) {
|
||
console.log(`[WS] Rejected: invalid auth token`);
|
||
ws.close(4001, 'Unauthorized');
|
||
return;
|
||
}
|
||
}
|
||
|
||
// Parse channel from path: /stream/channelName or /stream (default)
|
||
const path = req.url || '/stream';
|
||
const parts = path.replace(/^\//, '').split('/');
|
||
const channel = parts[1]?.split('?')[0] || 'default';
|
||
|
||
if (!clients.has(channel)) clients.set(channel, new Set());
|
||
clients.get(channel).add(ws);
|
||
console.log(`[WS] +1 on "${channel}" (${clients.get(channel).size})`);
|
||
|
||
ws.on('close', () => {
|
||
const ch = clients.get(channel);
|
||
if (ch) {
|
||
ch.delete(ws);
|
||
console.log(`[WS] -1 on "${channel}" (${ch.size})`);
|
||
}
|
||
});
|
||
ws.on('message', data => { ws._inputHandler?.(data); });
|
||
});
|
||
console.log(`[WS] Panels: ws://0.0.0.0:${WS_PORT}/stream/{channel}`);
|
||
return wss;
|
||
}
|
||
|
||
// ─── 3. Monitor page ───
|
||
function startMonitor() {
|
||
const fmt = IMAGE_FORMAT === 'webp' ? 'image/webp' : 'image/jpeg';
|
||
const html = `<!DOCTYPE html><html><head>
|
||
<title>DreamStack Screencast v0.3</title>
|
||
<style>
|
||
*{margin:0;padding:0;box-sizing:border-box}
|
||
body{background:#0a0a0a;display:flex;flex-direction:column;align-items:center;justify-content:center;height:100vh;font-family:system-ui;color:#999}
|
||
canvas{border:1px solid #333;border-radius:8px;max-height:85vh;cursor:crosshair}
|
||
#hud{margin-top:10px;font:13px/1.5 monospace;text-align:center}
|
||
h3{margin-bottom:6px;color:#555;font-size:14px}
|
||
</style></head><body>
|
||
<h3>DreamStack Screencast Monitor v0.3</h3>
|
||
<canvas id="c" width="${WIDTH}" height="${HEIGHT}"></canvas>
|
||
<div id="hud">Connecting…</div>
|
||
<script>
|
||
const c=document.getElementById('c'),ctx=c.getContext('2d'),hud=document.getElementById('hud');
|
||
c.style.width=Math.min(${WIDTH},innerWidth*.45)+'px';c.style.height='auto';
|
||
let fr=0,by=0,t=Date.now();
|
||
const ws=new WebSocket('ws://'+location.hostname+':${WS_PORT}/stream/default');
|
||
ws.binaryType='arraybuffer';
|
||
ws.onopen=()=>{hud.textContent='Connected — waiting for frames…'};
|
||
ws.onmessage=e=>{
|
||
const buf=new Uint8Array(e.data);
|
||
if(buf.length<16)return;
|
||
const type=buf[0];
|
||
if(type!==0x02)return; // CompressedPixels only
|
||
const payloadLen=buf[12]|(buf[13]<<8)|(buf[14]<<16)|(buf[15]<<24);
|
||
const fmtByte=buf[16]; // format byte
|
||
const imageData=buf.slice(17); // actual image data
|
||
fr++;by+=imageData.length;
|
||
const mime=fmtByte===1?'image/webp':'image/jpeg';
|
||
const blob=new Blob([imageData],{type:mime});
|
||
const url=URL.createObjectURL(blob);
|
||
const img=new Image();
|
||
img.onload=()=>{ctx.drawImage(img,0,0);URL.revokeObjectURL(url)};
|
||
img.src=url;
|
||
const now=Date.now();
|
||
if(now-t>1000){
|
||
hud.textContent='FPS: '+(fr/((now-t)/1000)).toFixed(1)+' | '+(by/1024/((now-t)/1000)).toFixed(0)+' KB/s | Frame: '+(imageData.length/1024).toFixed(1)+'KB';
|
||
fr=0;by=0;t=now;
|
||
}
|
||
};
|
||
c.addEventListener('click',e=>{
|
||
const r=c.getBoundingClientRect(),sx=${WIDTH}/r.width,sy=${HEIGHT}/r.height;
|
||
const x=Math.round((e.clientX-r.left)*sx),y=Math.round((e.clientY-r.top)*sy);
|
||
// Send as ds-stream INPUT_PTR_DOWN then INPUT_PTR_UP
|
||
function sendInput(type,x,y){
|
||
const b=new ArrayBuffer(16+4);const dv=new DataView(b);const u=new Uint8Array(b);
|
||
u[0]=type;u[1]=0x08;dv.setUint16(8,x,true);dv.setUint16(10,y,true);dv.setUint32(12,4,true);
|
||
dv.setUint16(16,x,true);dv.setUint16(18,y,true);
|
||
ws.send(b);
|
||
}
|
||
sendInput(0x02,x,y);
|
||
setTimeout(()=>sendInput(0x03,x,y),50);
|
||
hud.textContent+=' | Click: ('+x+','+y+')';
|
||
});
|
||
ws.onclose=()=>{hud.textContent='Disconnected — reload to retry'};
|
||
</script></body></html>`;
|
||
|
||
const monitorServer = http.createServer((req, res) => {
|
||
if (req.url === '/health') {
|
||
const concurrent = Array.from(clients.values()).reduce((sum, s) => sum + s.size, 0);
|
||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||
res.end(JSON.stringify({
|
||
uptime: Math.round((Date.now() - t0) / 1000),
|
||
frames: globalFrameCount,
|
||
connections: concurrent,
|
||
totalConnections,
|
||
peakConcurrent,
|
||
reconnectCount,
|
||
qualityTier: aq.tierName,
|
||
recording: !!recordStream,
|
||
recordedFrames: recordFrameCount,
|
||
}));
|
||
return;
|
||
}
|
||
if (req.url === '/stats') {
|
||
const stats = {};
|
||
for (const [ch, data] of channelStats.entries()) {
|
||
const age = Date.now() - data.lastTs;
|
||
stats[ch] = {
|
||
frames: data.frames,
|
||
bytes: data.bytes,
|
||
byteRate: data.bytes > 0 ? Math.round(data.bytes / (Math.max(1, Date.now() - t0) / 1000)) : 0,
|
||
clients: clients.has(ch) ? clients.get(ch).size : 0,
|
||
lastFrameAge: age,
|
||
};
|
||
}
|
||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||
res.end(JSON.stringify(stats));
|
||
return;
|
||
}
|
||
if (req.url?.startsWith('/replay')) {
|
||
const params = new URL(req.url, 'http://localhost').searchParams;
|
||
const from = parseInt(params.get('from') || '0');
|
||
const to = parseInt(params.get('to') || String(sessionFrames.length));
|
||
const slice = sessionFrames.slice(from, to);
|
||
res.writeHead(200, {
|
||
'Content-Type': 'multipart/x-mixed-replace; boundary=frame',
|
||
'Cache-Control': 'no-cache',
|
||
});
|
||
for (const f of slice) {
|
||
const mime = IMAGE_FORMAT === 'webp' ? 'image/webp' : 'image/jpeg';
|
||
res.write(`--frame\r\nContent-Type: ${mime}\r\n\r\n`);
|
||
res.write(f.data);
|
||
res.write('\r\n');
|
||
}
|
||
res.end('--frame--');
|
||
return;
|
||
}
|
||
if (req.url === '/hot-reload' && req.method === 'POST') {
|
||
let body = '';
|
||
req.on('data', chunk => body += chunk);
|
||
req.on('end', () => {
|
||
try {
|
||
const { url } = JSON.parse(body);
|
||
if (url) {
|
||
currentTargetUrl = url;
|
||
console.log(`[HotReload] Target URL changed to: ${url}`);
|
||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||
res.end(JSON.stringify({ ok: true, url }));
|
||
} else {
|
||
res.writeHead(400); res.end('{"error":"missing url"}');
|
||
}
|
||
} catch (e) {
|
||
res.writeHead(400); res.end('{"error":"invalid JSON"}');
|
||
}
|
||
});
|
||
return;
|
||
}
|
||
if (req.url === '/clients') {
|
||
const clientList = [];
|
||
for (const [ws, info] of clientTracker.entries()) {
|
||
clientList.push({
|
||
id: info.id,
|
||
seq: info.seq,
|
||
ack: info.ack,
|
||
lag: info.seq - info.ack,
|
||
bytes: info.bytes,
|
||
connectedSec: Math.round((Date.now() - info.connectedAt) / 1000),
|
||
});
|
||
}
|
||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||
res.end(JSON.stringify({ clients: clientList, count: clientList.length }));
|
||
return;
|
||
}
|
||
if (req.url === '/metrics') {
|
||
const concurrent = Array.from(clients.values()).reduce((sum, s) => sum + s.size, 0);
|
||
const uptime = Math.round((Date.now() - t0) / 1000);
|
||
const lines = [
|
||
`# HELP ds_screencast_uptime_seconds Uptime in seconds`,
|
||
`# TYPE ds_screencast_uptime_seconds gauge`,
|
||
`ds_screencast_uptime_seconds ${uptime}`,
|
||
`# HELP ds_screencast_frames_total Total frames captured`,
|
||
`# TYPE ds_screencast_frames_total counter`,
|
||
`ds_screencast_frames_total ${globalFrameCount}`,
|
||
`# HELP ds_screencast_connections_active Active WebSocket connections`,
|
||
`# TYPE ds_screencast_connections_active gauge`,
|
||
`ds_screencast_connections_active ${concurrent}`,
|
||
`# HELP ds_screencast_connections_total Total connections since start`,
|
||
`# TYPE ds_screencast_connections_total counter`,
|
||
`ds_screencast_connections_total ${totalConnections}`,
|
||
`# HELP ds_screencast_peak_concurrent Peak concurrent connections`,
|
||
`# TYPE ds_screencast_peak_concurrent gauge`,
|
||
`ds_screencast_peak_concurrent ${peakConcurrent}`,
|
||
];
|
||
res.writeHead(200, { 'Content-Type': 'text/plain; version=0.0.4; charset=utf-8' });
|
||
res.end(lines.join('\n') + '\n');
|
||
return;
|
||
}
|
||
if (req.url === '/tabs' && req.method === 'GET') {
|
||
const tabs = [];
|
||
for (const [id, info] of tabRegistry.entries()) {
|
||
tabs.push({ id, url: info.url, active: info.active });
|
||
}
|
||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||
res.end(JSON.stringify({ tabs, count: tabs.length }));
|
||
return;
|
||
}
|
||
if (req.url === '/screenshot') {
|
||
// Capture via active CDP session
|
||
connectCDP(1).then(async (client) => {
|
||
const { Page } = client;
|
||
const { data } = await Page.captureScreenshot({ format: IMAGE_FORMAT === 'webp' ? 'png' : 'jpeg', quality: QUALITY });
|
||
const imgBuf = Buffer.from(data, 'base64');
|
||
res.writeHead(200, { 'Content-Type': IMAGE_FORMAT === 'webp' ? 'image/png' : 'image/jpeg' });
|
||
res.end(imgBuf);
|
||
client.close();
|
||
}).catch(err => {
|
||
res.writeHead(500); res.end('Screenshot failed: ' + err.message);
|
||
});
|
||
return;
|
||
}
|
||
res.writeHead(200, { 'Content-Type': 'text/html' }); res.end(html);
|
||
});
|
||
monitorServer.listen(MONITOR_PORT, '0.0.0.0', () => console.log(`[Monitor] http://0.0.0.0:${MONITOR_PORT}`));
|
||
}
|
||
|
||
// ─── 4. CDP screencast loop (with reconnection) ───
|
||
async function connectCDP(maxRetries = 10) {
|
||
for (let i = 0; i < maxRetries; i++) {
|
||
try {
|
||
return await CDP({ port: CDP_PORT });
|
||
} catch {
|
||
const delay = Math.min(1000 * Math.pow(1.5, i), 10000);
|
||
console.log(`[CDP] Connect attempt ${i + 1}/${maxRetries} failed, retry in ${Math.round(delay)}ms...`);
|
||
await new Promise(r => setTimeout(r, delay));
|
||
}
|
||
}
|
||
throw new Error('Cannot connect to Chrome CDP');
|
||
}
|
||
|
||
async function startScreencast(channel = 'default', url = TARGET_URL) {
|
||
const client = await connectCDP();
|
||
const { Page, Input, Emulation } = client;
|
||
|
||
await Page.enable();
|
||
await Emulation.setDeviceMetricsOverride({
|
||
width: WIDTH, height: HEIGHT, deviceScaleFactor: 1, mobile: true,
|
||
});
|
||
await Emulation.setTouchEmulationEnabled({ enabled: true });
|
||
await Page.navigate({ url });
|
||
await new Promise(r => setTimeout(r, 2000));
|
||
|
||
// Initialize channel stats
|
||
if (!channelStats.has(channel)) {
|
||
channelStats.set(channel, { frames: 0, bytes: 0, seq: 0, lastStatTime: Date.now(), statFrames: 0, statBytes: 0 });
|
||
}
|
||
const stats = channelStats.get(channel);
|
||
|
||
// v0.4: ACK listener — update adaptive quality
|
||
const wireInput = (ws) => {
|
||
ws._inputHandler = (data) => {
|
||
const buf = Buffer.from(data);
|
||
if (buf.length >= HEADER_SIZE && buf[0] === FRAME_ACK) {
|
||
// ACK frame: extract RTT from payload
|
||
const payloadLen = buf.readUInt32LE(12);
|
||
if (payloadLen >= 4) {
|
||
const rttMs = buf.readUInt16LE(HEADER_SIZE + 2);
|
||
aq.recordAck(rttMs);
|
||
|
||
// Adjust quality if tier changed
|
||
const newQuality = aq.quality || QUALITY;
|
||
if (newQuality !== currentQuality && newQuality > 0) {
|
||
currentQuality = newQuality;
|
||
Page.stopScreencast().catch(() => { });
|
||
Page.startScreencast({
|
||
format: cdpFormat, quality: currentQuality,
|
||
maxWidth: WIDTH, maxHeight: HEIGHT,
|
||
everyNthFrame: Math.max(1, Math.round(60 / MAX_FPS)),
|
||
}).catch(() => { });
|
||
console.log(`[AQ:${channel}] Tier: ${aq.tierName} → q${currentQuality}`);
|
||
}
|
||
}
|
||
return; // don't forward ACK as input
|
||
}
|
||
handleInput(buf, Input, Page);
|
||
};
|
||
};
|
||
|
||
// Existing clients
|
||
const ch = clients.get(channel);
|
||
if (ch) for (const ws of ch) wireInput(ws);
|
||
|
||
// Watch for new connections to this channel
|
||
const origSet = clients.get(channel) || new Set();
|
||
const origAdd = origSet.add.bind(origSet);
|
||
origSet.add = function (ws) {
|
||
origAdd(ws);
|
||
wireInput(ws);
|
||
};
|
||
clients.set(channel, origSet);
|
||
|
||
// Determine format
|
||
const useWebP = IMAGE_FORMAT === 'webp';
|
||
const formatByte = useWebP ? FMT_WEBP : FMT_JPEG;
|
||
const cdpFormat = useWebP ? 'png' : 'jpeg'; // CDP doesn't support webp directly, fallback
|
||
|
||
// Start screencast (v0.4: use adaptive quality)
|
||
let currentQuality = QUALITY;
|
||
let skipCounter = 0;
|
||
|
||
await Page.startScreencast({
|
||
format: cdpFormat, quality: currentQuality,
|
||
maxWidth: WIDTH, maxHeight: HEIGHT,
|
||
everyNthFrame: Math.max(1, Math.round(60 / MAX_FPS)),
|
||
});
|
||
|
||
client.on('event', (message) => {
|
||
if (message.method !== 'Page.screencastFrame') return;
|
||
const { sessionId, data, metadata } = message.params;
|
||
Page.screencastFrameAck({ sessionId }).catch(() => { });
|
||
|
||
// v0.4: adaptive frame skipping
|
||
if (aq.skipFrames > 0) {
|
||
skipCounter++;
|
||
if (skipCounter <= aq.skipFrames) return;
|
||
skipCounter = 0;
|
||
}
|
||
|
||
stats.seq++;
|
||
stats.frames++;
|
||
stats.statFrames++;
|
||
globalFrameCount++;
|
||
|
||
const imageBuf = Buffer.from(data, 'base64');
|
||
const timestamp = (Date.now() - t0) >>> 0;
|
||
const w = metadata.deviceWidth || WIDTH;
|
||
const h = metadata.deviceHeight || HEIGHT;
|
||
|
||
// Build ds-stream CompressedPixels frame
|
||
const frame = buildCompressedPixelFrame(stats.seq, timestamp, w, h, formatByte, imageBuf);
|
||
|
||
stats.bytes += frame.length;
|
||
stats.statBytes += frame.length;
|
||
globalBytesSent += frame.length;
|
||
|
||
// Broadcast to channel
|
||
const receivers = clients.get(channel);
|
||
if (receivers) {
|
||
for (const ws of receivers) {
|
||
if (ws.readyState === 1) ws.send(frame);
|
||
}
|
||
}
|
||
|
||
// v0.5: Write to recording file
|
||
if (recordStream) {
|
||
const tsUs = BigInt(Date.now() - t0) * 1000n;
|
||
const tsBuf = Buffer.alloc(8);
|
||
tsBuf.writeBigUInt64LE(tsUs);
|
||
const lenBuf = Buffer.alloc(4);
|
||
lenBuf.writeUInt32LE(frame.length);
|
||
recordStream.write(tsBuf);
|
||
recordStream.write(lenBuf);
|
||
recordStream.write(frame);
|
||
recordFrameCount++;
|
||
}
|
||
});
|
||
|
||
// Handle CDP disconnect — attempt reconnection
|
||
client.on('disconnect', async () => {
|
||
console.log(`[CDP:${channel}] Disconnected! Attempting reconnection...`);
|
||
try {
|
||
await startScreencast(channel, url);
|
||
} catch (err) {
|
||
console.error(`[CDP:${channel}] Reconnection failed:`, err.message);
|
||
}
|
||
});
|
||
|
||
console.log(`[CDP:${channel}] Casting ${url} → ${WIDTH}×${HEIGHT} @ q${QUALITY} (${IMAGE_FORMAT})`);
|
||
}
|
||
|
||
// ─── 5. Stats logging ───
|
||
function startStatsLogger() {
|
||
setInterval(() => {
|
||
const now = Date.now();
|
||
const totalElapsed = (now - t0) / 1000;
|
||
let totalReceivers = 0;
|
||
for (const [ch, set] of clients) totalReceivers += set.size;
|
||
|
||
for (const [channel, stats] of channelStats) {
|
||
const elapsed = (now - stats.lastStatTime) / 1000;
|
||
if (elapsed < 1) continue;
|
||
const fps = stats.statFrames / elapsed;
|
||
const kbps = stats.statBytes / 1024 / elapsed;
|
||
console.log(`[Stats:${channel}] ${fps.toFixed(1)} fps | ${kbps.toFixed(0)} KB/s | total: ${stats.frames} frames | ${(stats.bytes / 1024 / 1024).toFixed(1)} MB`);
|
||
stats.statFrames = 0;
|
||
stats.statBytes = 0;
|
||
stats.lastStatTime = now;
|
||
}
|
||
|
||
if (channelStats.size > 1 || totalReceivers > 0) {
|
||
console.log(`[Stats:global] up=${totalElapsed.toFixed(0)}s | ${globalFrameCount} frames | ${(globalBytesSent / 1024 / 1024).toFixed(1)} MB | ${totalReceivers} receivers`);
|
||
}
|
||
}, STATS_INTERVAL);
|
||
}
|
||
|
||
// ─── Input handler (ds-stream protocol) ───
|
||
function handleInput(buf, Input, Page) {
|
||
if (buf.length < HEADER_SIZE) return;
|
||
const type = buf[0];
|
||
const flags = buf[1];
|
||
|
||
// Only process input frames (FLAG_INPUT=0x08)
|
||
if (!(flags & FLAG_INPUT) && type < 0xF0) {
|
||
// Legacy compatibility: try type byte directly
|
||
handleLegacyInput(buf, Input, Page);
|
||
return;
|
||
}
|
||
|
||
const payloadLen = buf.readUInt32LE(12);
|
||
const payload = buf.slice(HEADER_SIZE, HEADER_SIZE + payloadLen);
|
||
|
||
switch (type) {
|
||
case INPUT_POINTER: { // Mouse move
|
||
if (payload.length >= 4) {
|
||
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
|
||
Input.dispatchMouseEvent({ type: 'mouseMoved', x, y }).catch(() => { });
|
||
}
|
||
break;
|
||
}
|
||
case INPUT_PTR_DOWN: { // Mouse/touch down
|
||
if (payload.length >= 4) {
|
||
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
|
||
Input.dispatchMouseEvent({ type: 'mousePressed', x, y, button: 'left', clickCount: 1 }).catch(() => { });
|
||
}
|
||
break;
|
||
}
|
||
case INPUT_PTR_UP: { // Mouse/touch up
|
||
if (payload.length >= 4) {
|
||
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
|
||
Input.dispatchMouseEvent({ type: 'mouseReleased', x, y, button: 'left', clickCount: 1 }).catch(() => { });
|
||
}
|
||
break;
|
||
}
|
||
case INPUT_KEY_DOWN: case INPUT_KEY_UP: { // Keyboard
|
||
if (payload.length >= 2) {
|
||
const keyCode = payload.readUInt16LE(0);
|
||
const key = String.fromCharCode(keyCode);
|
||
Input.dispatchKeyEvent({
|
||
type: type === INPUT_KEY_DOWN ? 'keyDown' : 'keyUp',
|
||
key, code: `Key${key.toUpperCase()}`,
|
||
windowsVirtualKeyCode: keyCode,
|
||
}).catch(() => { });
|
||
}
|
||
break;
|
||
}
|
||
case INPUT_TOUCH: { // Touch start/move
|
||
if (payload.length >= 4) {
|
||
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
|
||
Input.dispatchTouchEvent({
|
||
type: 'touchStart',
|
||
touchPoints: [{ x, y, id: 0, radiusX: 10, radiusY: 10, force: 1 }]
|
||
}).catch(() => { });
|
||
}
|
||
break;
|
||
}
|
||
case INPUT_TOUCH_END: { // Touch end
|
||
if (payload.length >= 4) {
|
||
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
|
||
Input.dispatchTouchEvent({
|
||
type: 'touchEnd',
|
||
touchPoints: [{ x, y, id: 0, radiusX: 10, radiusY: 10, force: 0 }]
|
||
}).catch(() => { });
|
||
}
|
||
break;
|
||
}
|
||
case INPUT_SCROLL: { // Scroll
|
||
if (payload.length >= 8) {
|
||
const dx = payload.readInt16LE(0), dy = payload.readInt16LE(2);
|
||
const x = payload.readUInt16LE(4), y = payload.readUInt16LE(6);
|
||
Input.dispatchMouseEvent({ type: 'mouseWheel', x, y, deltaX: dx, deltaY: dy }).catch(() => { });
|
||
}
|
||
break;
|
||
}
|
||
case 0x30: { // GamepadAxis
|
||
if (payload.length >= 4) {
|
||
const axisIdx = payload.readUInt8(0);
|
||
const value = payload.readInt16LE(1); // -32768 to 32767
|
||
// Map gamepad axis to scroll or custom event
|
||
console.log(`[Gamepad] Axis ${axisIdx}: ${value}`);
|
||
}
|
||
break;
|
||
}
|
||
case 0x31: { // GamepadButton
|
||
if (payload.length >= 2) {
|
||
const buttonIdx = payload.readUInt8(0);
|
||
const pressed = payload.readUInt8(1);
|
||
// Map common gamepad buttons to keyboard events
|
||
const keyMap = { 0: 'Enter', 1: 'Escape', 12: 'ArrowUp', 13: 'ArrowDown', 14: 'ArrowLeft', 15: 'ArrowRight' };
|
||
const key = keyMap[buttonIdx];
|
||
if (key) {
|
||
Input.dispatchKeyEvent({
|
||
type: pressed ? 'keyDown' : 'keyUp',
|
||
key, code: key,
|
||
}).catch(() => { });
|
||
}
|
||
}
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
|
||
// Legacy input handler (v0.2 format)
|
||
function handleLegacyInput(buf, Input, Page) {
|
||
if (buf.length < 1) return;
|
||
const t = buf[0];
|
||
|
||
if (t === 0x60 && buf.length >= 7) {
|
||
const phase = buf[1];
|
||
const x = buf.readUInt16LE(2), y = buf.readUInt16LE(4);
|
||
const type = phase === 0 ? 'touchStart' : phase === 1 ? 'touchMove' : 'touchEnd';
|
||
Input.dispatchTouchEvent({
|
||
type, touchPoints: [{ x, y, id: buf[6] || 0, radiusX: 10, radiusY: 10, force: phase === 2 ? 0 : 1 }]
|
||
}).catch(() => { });
|
||
}
|
||
if (t === 0x63 && buf.length >= 3) {
|
||
const len = buf.readUInt16LE(1);
|
||
const url = buf.slice(3, 3 + len).toString();
|
||
Page.navigate({ url }).catch(() => { });
|
||
}
|
||
}
|
||
|
||
// ─── v0.10: Stream Watchdog ───
|
||
const WATCHDOG_INTERVAL = 10000; // 10s
|
||
const lastFrameTime = new Map();
|
||
|
||
function startWatchdog() {
|
||
setInterval(() => {
|
||
const now = Date.now();
|
||
for (const [channel, ts] of lastFrameTime.entries()) {
|
||
if (now - ts > WATCHDOG_INTERVAL) {
|
||
console.log(`[Watchdog] Channel "${channel}" stale (${Math.round((now - ts) / 1000)}s) — reconnecting...`);
|
||
lastFrameTime.set(channel, now); // prevent spam
|
||
// Attempt to reconnect CDP
|
||
connectCDP(3).catch(err => console.error(`[Watchdog] Reconnect failed: ${err.message}`));
|
||
}
|
||
}
|
||
}, WATCHDOG_INTERVAL);
|
||
}
|
||
|
||
// v0.12: Startup self-test
|
||
async function selfTest() {
|
||
console.log('[SelfTest] Validating Chrome, CDP, WS...');
|
||
try {
|
||
const chrome = await launchChrome();
|
||
const client = await connectCDP(3);
|
||
await client.close();
|
||
chrome.kill();
|
||
console.log('[SelfTest] ✓ Chrome + CDP OK');
|
||
} catch (err) {
|
||
logError('SelfTest', `Chrome/CDP failed: ${err.message}`);
|
||
console.error('[SelfTest] ✗ FAILED — aborting');
|
||
process.exit(1);
|
||
}
|
||
try {
|
||
const net = await import('net');
|
||
await new Promise((resolve, reject) => {
|
||
const srv = net.createServer();
|
||
srv.listen(WS_PORT, '0.0.0.0', () => { srv.close(resolve); });
|
||
srv.on('error', reject);
|
||
});
|
||
console.log('[SelfTest] ✓ WS port available');
|
||
} catch (err) {
|
||
logError('SelfTest', `WS port ${WS_PORT} unavailable: ${err.message}`);
|
||
console.error('[SelfTest] ✗ Port conflict — aborting');
|
||
process.exit(1);
|
||
}
|
||
console.log('[SelfTest] All checks passed\n');
|
||
}
|
||
|
||
// ─── Main ───
|
||
async function main() {
|
||
// v0.12: run self-test first
|
||
if (process.argv.includes('--self-test')) {
|
||
await selfTest();
|
||
}
|
||
|
||
console.log(`\n DreamStack Screencast v0.16.0`);
|
||
console.log(` ──────────────────────────────`);
|
||
console.log(` URL: ${MULTI_URLS.length > 0 ? MULTI_URLS.join(', ') : TARGET_URL}`);
|
||
console.log(` Viewport: ${WIDTH}×${HEIGHT} (scale: ${VIEWPORT_TRANSFORM})`);
|
||
console.log(` Quality: ${QUALITY}% FPS: ${MAX_FPS}`);
|
||
console.log(` Format: ${IMAGE_FORMAT.toUpperCase()}`);
|
||
console.log(` Profile: ${PROFILE || 'custom'}`);
|
||
console.log(` ROI: ${ROI.length === 4 ? ROI.join(',') : 'full viewport'}`);
|
||
console.log(` Tabs: ${MULTI_URLS.length || TAB_COUNT}`);
|
||
console.log(` Audio: ${ENABLE_AUDIO}`);
|
||
console.log(` Record: ${RECORD_FILE || 'disabled'}`);
|
||
console.log(` Auth: ${AUTH_TOKEN ? 'enabled' : 'disabled'}`);
|
||
console.log(` Headless: ${HEADLESS}`);
|
||
console.log(` WS Port: ${WS_PORT} Monitor: ${MONITOR_PORT}`);
|
||
console.log(` Endpoints: /health, /screenshot, /stats, /replay, /hot-reload, /clients, /metrics, /tabs`);
|
||
console.log(` Watchdog: ${WATCHDOG_INTERVAL / 1000}s stale detection`);
|
||
console.log(` IdleFPS: ${MAX_FPS_IDLE}`);
|
||
console.log(` Debug: ${DEBUG_OVERLAY}`);
|
||
console.log(` CrashRst: ${RESTART_ON_CRASH}`);
|
||
console.log(` Compress: ${COMPRESS_FRAMES}`);
|
||
console.log(` Migrate: ${MIGRATE_ON_CRASH}`);
|
||
console.log(` Adaptive: ${ADAPTIVE_BITRATE}`);
|
||
console.log(` CDN Push: ${CDN_PUSH_URL || 'disabled'}`);
|
||
console.log(` Encrypt: ${ENCRYPT_KEY ? 'enabled' : 'disabled'}`);
|
||
console.log(` Watermark: ${WATERMARK_TEXT || 'disabled'}`);
|
||
console.log(` Graceful: ${GRACEFUL_SHUTDOWN}`);
|
||
console.log(` ErrorLog: ${ERROR_LOG_FILE || 'disabled'}`);
|
||
console.log(` SessRec: ${SESSION_RECORD_DIR || 'disabled'} (max ${MAX_RECORD_FRAMES})\n`);
|
||
|
||
const chrome = await launchChrome();
|
||
const wss = startWS();
|
||
startMonitor();
|
||
startStatsLogger();
|
||
startWatchdog();
|
||
|
||
// v0.10: Multi-URL routing
|
||
if (MULTI_URLS.length > 0) {
|
||
for (let i = 0; i < MULTI_URLS.length; i++) {
|
||
const channel = `url-${i}`;
|
||
await startScreencast(channel, MULTI_URLS[i]);
|
||
lastFrameTime.set(channel, Date.now());
|
||
}
|
||
} else if (TAB_COUNT === 1) {
|
||
await startScreencast('default', TARGET_URL);
|
||
lastFrameTime.set('default', Date.now());
|
||
} else {
|
||
for (let i = 0; i < TAB_COUNT; i++) {
|
||
const channel = `tab-${i}`;
|
||
await startScreencast(channel, TARGET_URL);
|
||
lastFrameTime.set(channel, Date.now());
|
||
}
|
||
}
|
||
|
||
console.log(`\n ✓ Streaming! Panels → ws://0.0.0.0:${WS_PORT}/stream/{channel}`);
|
||
console.log(` ✓ Monitor → http://localhost:${MONITOR_PORT}\n`);
|
||
|
||
// v0.12: restart on crash detection
|
||
if (RESTART_ON_CRASH) {
|
||
chrome.on('exit', (code) => {
|
||
logError('Chrome', `Chrome exited with code ${code}, restarting...`);
|
||
reconnectCount++;
|
||
setTimeout(() => {
|
||
main().catch(err => logError('Fatal', err.message));
|
||
}, 2000);
|
||
});
|
||
}
|
||
|
||
// v0.10: Graceful shutdown for both SIGINT and SIGTERM
|
||
function gracefulShutdown(signal) {
|
||
console.log(`\n[${signal}] Shutting down...`);
|
||
if (recordStream) {
|
||
recordStream.end();
|
||
console.log(`[Record] Saved ${recordFrameCount} frames to ${RECORD_FILE}`);
|
||
}
|
||
if (errorLogStream) errorLogStream.end();
|
||
// Close all WebSocket connections
|
||
for (const [channel, set] of clients.entries()) {
|
||
for (const ws of set) {
|
||
ws.close(1001, 'Server shutting down');
|
||
}
|
||
}
|
||
wss.close();
|
||
chrome.kill();
|
||
process.exit(0);
|
||
}
|
||
process.on('SIGINT', () => gracefulShutdown('SIGINT'));
|
||
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
|
||
}
|
||
|
||
main().catch(err => { console.error('Fatal:', err); process.exit(1); });
|