v0.4 — CLI Modularity: - Split monolithic main.rs (2,038 lines) into 8 command modules + slim dispatch (107 lines) - Add 12 JS codegen tests (signals, derived, views, events, loops, match, enums, components, interpolation, springs, tree-shaking) v0.5 — Diagnostic Quality + Analyzer Confidence: - Add From<ParseError> for Diagnostic (E0001) in ds-diagnostic - Add errors_as_diagnostics() to TypeChecker (E0100–E0110) - Wire Elm-style diagnostics through dreamstack check and build commands - Switch incremental compiler to parse_program_resilient() for multi-error collection - Add 12 analyzer tests (chains, fan-out, diamond deps, empty programs, conditionals, handlers, views) - Add 2 diagnostic conversion tests Test suite: 97 → 123 tests (26 new, 0 failures)
642 lines
24 KiB
JavaScript
642 lines
24 KiB
JavaScript
#!/usr/bin/env node
|
||
/**
|
||
* DreamStack Screencast — CDP Capture Agent v0.5.0
|
||
*
|
||
* Streams any web page to DreamStack panels via Chrome DevTools Protocol.
|
||
* Outputs frames using the ds-stream binary protocol (16-byte header).
|
||
*
|
||
* Usage:
|
||
* node capture.js [url] [options]
|
||
*
|
||
* Options:
|
||
* --headless Run Chrome in headless mode
|
||
* --fps=N Max frames per second (default: 30)
|
||
* --quality=N JPEG/WebP quality 1-100 (default: 75)
|
||
* --width=N Viewport width (default: 800)
|
||
* --height=N Viewport height (default: 1280)
|
||
* --ws-port=N WebSocket server port (default: 9300)
|
||
* --monitor-port=N Monitor HTTP port (default: 9301)
|
||
* --cdp-port=N Chrome DevTools port (default: 9222)
|
||
* --format=FORMAT Image format: jpeg or webp (default: jpeg)
|
||
* --tabs=N Number of tabs to capture (default: 1)
|
||
* --audio Enable audio capture (requires Chrome audio)
|
||
*
|
||
* Examples:
|
||
* node capture.js http://localhost:3000
|
||
* node capture.js https://react.dev --headless --fps=30 --format=webp
|
||
* node capture.js http://localhost:5173 --width=1024 --height=768 --tabs=3
|
||
*/
|
||
|
||
const CDP = require('chrome-remote-interface');
|
||
const { WebSocketServer } = require('ws');
|
||
const http = require('http');
|
||
const { spawn } = require('child_process');
|
||
|
||
// ─── Parse CLI Args ───
|
||
function getArg(name, defaultVal) {
|
||
const found = process.argv.find(a => a.startsWith(`--${name}=`));
|
||
return found ? found.split('=')[1] : String(defaultVal);
|
||
}
|
||
|
||
// ─── Config ───
|
||
const TARGET_URL = process.argv[2] || 'http://localhost:3000';
|
||
const WIDTH = parseInt(getArg('width', 800));
|
||
const HEIGHT = parseInt(getArg('height', 1280));
|
||
const WS_PORT = parseInt(getArg('ws-port', 9300));
|
||
const MONITOR_PORT = parseInt(getArg('monitor-port', 9301));
|
||
const CDP_PORT = parseInt(getArg('cdp-port', 9222));
|
||
const QUALITY = parseInt(getArg('quality', 75));
|
||
const MAX_FPS = parseInt(getArg('fps', 30));
|
||
const IMAGE_FORMAT = getArg('format', 'jpeg'); // 'jpeg' or 'webp'
|
||
const TAB_COUNT = parseInt(getArg('tabs', 1));
|
||
const HEADLESS = process.argv.includes('--headless');
|
||
const ENABLE_AUDIO = process.argv.includes('--audio');
|
||
const RECORD_FILE = getArg('record', '');
|
||
const STATS_INTERVAL = 5000;
|
||
|
||
// v0.5: Recording file stream
|
||
let recordStream = null;
|
||
let recordFrameCount = 0;
|
||
if (RECORD_FILE) {
|
||
const DSREC_MAGIC = Buffer.from([0x44, 0x53, 0x01]);
|
||
recordStream = fs.createWriteStream(RECORD_FILE);
|
||
recordStream.write(DSREC_MAGIC);
|
||
// Reserve 24 bytes for metadata (will be patched on close)
|
||
recordStream.write(Buffer.alloc(24));
|
||
console.log(`[Record] Writing to ${RECORD_FILE}`);
|
||
}
|
||
|
||
// ─── ds-stream Protocol Constants ───
|
||
const FRAME_COMPRESSED_PIXELS = 0x02;
|
||
const FLAG_COMPRESSED = 0x04;
|
||
const FLAG_KEYFRAME = 0x02;
|
||
const FLAG_INPUT = 0x08;
|
||
const HEADER_SIZE = 16;
|
||
|
||
// Input types (ds-stream protocol)
|
||
const INPUT_POINTER = 0x01;
|
||
const INPUT_PTR_DOWN = 0x02;
|
||
const INPUT_PTR_UP = 0x03;
|
||
const INPUT_KEY_DOWN = 0x10;
|
||
const INPUT_KEY_UP = 0x11;
|
||
const INPUT_TOUCH = 0x20;
|
||
const INPUT_TOUCH_END = 0x21;
|
||
const INPUT_SCROLL = 0x50;
|
||
|
||
// Compressed pixel format byte
|
||
const FMT_WEBP = 1;
|
||
const FMT_JPEG = 2;
|
||
|
||
// v0.4: ACK and Audio
|
||
const FRAME_ACK = 0x0E;
|
||
const FRAME_AUDIO = 0x08;
|
||
|
||
const clients = new Map(); // channel → Set<ws>
|
||
let globalFrameCount = 0;
|
||
let globalBytesSent = 0;
|
||
const t0 = Date.now();
|
||
|
||
// Stats per channel
|
||
const channelStats = new Map();
|
||
|
||
// ─── v0.4: Adaptive Quality Controller ───
|
||
class AdaptiveQuality {
|
||
constructor() {
|
||
this.rttHistory = [];
|
||
this.maxHistory = 30;
|
||
this.currentTier = 0; // 0=Full, 1=Reduced, 2=Minimal
|
||
}
|
||
|
||
recordAck(rttMs) {
|
||
this.rttHistory.push(rttMs);
|
||
if (this.rttHistory.length > this.maxHistory) this.rttHistory.shift();
|
||
this.currentTier = this.computeTier();
|
||
}
|
||
|
||
computeTier() {
|
||
if (this.rttHistory.length === 0) return 0;
|
||
const avg = this.rttHistory.reduce((a, b) => a + b, 0) / this.rttHistory.length;
|
||
if (avg < 50) return 0;
|
||
if (avg < 150) return 1;
|
||
return 2;
|
||
}
|
||
|
||
get quality() { return [75, 40, 0][this.currentTier]; }
|
||
get skipFrames() { return [0, 2, 999999][this.currentTier]; }
|
||
get tierName() { return ['Full', 'Reduced', 'Minimal'][this.currentTier]; }
|
||
}
|
||
|
||
const aq = new AdaptiveQuality();
|
||
|
||
// ─── ds-stream Frame Builder ───
|
||
function buildFrame(type, flags, seq, timestamp, width, height, payload) {
|
||
const header = Buffer.alloc(HEADER_SIZE);
|
||
header[0] = type;
|
||
header[1] = flags;
|
||
header.writeUInt16LE(seq & 0xFFFF, 2);
|
||
header.writeUInt32LE(timestamp >>> 0, 4);
|
||
header.writeUInt16LE(width, 8);
|
||
header.writeUInt16LE(height, 10);
|
||
header.writeUInt32LE(payload.length, 12);
|
||
return Buffer.concat([header, payload]);
|
||
}
|
||
|
||
function buildCompressedPixelFrame(seq, timestamp, width, height, format, imageData) {
|
||
const formatBuf = Buffer.from([format]);
|
||
const payload = Buffer.concat([formatBuf, imageData]);
|
||
return buildFrame(FRAME_COMPRESSED_PIXELS, FLAG_COMPRESSED | FLAG_KEYFRAME, seq, timestamp, width, height, payload);
|
||
}
|
||
|
||
// ─── 1. Launch Chrome ───
|
||
function launchChrome() {
|
||
return new Promise((resolve, reject) => {
|
||
const args = [
|
||
`--remote-debugging-port=${CDP_PORT}`,
|
||
`--window-size=${WIDTH},${HEIGHT}`,
|
||
'--disable-gpu', '--no-first-run', '--no-default-browser-check',
|
||
'--disable-extensions', '--disable-translate', '--disable-sync',
|
||
'--disable-background-networking', '--disable-default-apps',
|
||
'--mute-audio', '--no-sandbox',
|
||
];
|
||
if (HEADLESS) args.push('--headless=new');
|
||
args.push('about:blank');
|
||
|
||
const proc = spawn('google-chrome', args, { stdio: ['pipe', 'pipe', 'pipe'] });
|
||
proc.stderr.on('data', d => {
|
||
if (d.toString().includes('DevTools listening')) resolve(proc);
|
||
});
|
||
proc.on('error', reject);
|
||
proc.on('exit', code => { console.log(`[Chrome] exit ${code}`); process.exit(0); });
|
||
setTimeout(() => resolve(proc), 4000);
|
||
});
|
||
}
|
||
|
||
// ─── 2. WebSocket server ───
|
||
function startWS() {
|
||
const wss = new WebSocketServer({ host: '0.0.0.0', port: WS_PORT });
|
||
wss.on('connection', (ws, req) => {
|
||
// Parse channel from path: /stream/channelName or /stream (default)
|
||
const path = req.url || '/stream';
|
||
const parts = path.replace(/^\//, '').split('/');
|
||
const channel = parts[1] || 'default';
|
||
|
||
if (!clients.has(channel)) clients.set(channel, new Set());
|
||
clients.get(channel).add(ws);
|
||
console.log(`[WS] +1 on "${channel}" (${clients.get(channel).size})`);
|
||
|
||
ws.on('close', () => {
|
||
const ch = clients.get(channel);
|
||
if (ch) {
|
||
ch.delete(ws);
|
||
console.log(`[WS] -1 on "${channel}" (${ch.size})`);
|
||
}
|
||
});
|
||
ws.on('message', data => { ws._inputHandler?.(data); });
|
||
});
|
||
console.log(`[WS] Panels: ws://0.0.0.0:${WS_PORT}/stream/{channel}`);
|
||
return wss;
|
||
}
|
||
|
||
// ─── 3. Monitor page ───
|
||
function startMonitor() {
|
||
const fmt = IMAGE_FORMAT === 'webp' ? 'image/webp' : 'image/jpeg';
|
||
const html = `<!DOCTYPE html><html><head>
|
||
<title>DreamStack Screencast v0.3</title>
|
||
<style>
|
||
*{margin:0;padding:0;box-sizing:border-box}
|
||
body{background:#0a0a0a;display:flex;flex-direction:column;align-items:center;justify-content:center;height:100vh;font-family:system-ui;color:#999}
|
||
canvas{border:1px solid #333;border-radius:8px;max-height:85vh;cursor:crosshair}
|
||
#hud{margin-top:10px;font:13px/1.5 monospace;text-align:center}
|
||
h3{margin-bottom:6px;color:#555;font-size:14px}
|
||
</style></head><body>
|
||
<h3>DreamStack Screencast Monitor v0.3</h3>
|
||
<canvas id="c" width="${WIDTH}" height="${HEIGHT}"></canvas>
|
||
<div id="hud">Connecting…</div>
|
||
<script>
|
||
const c=document.getElementById('c'),ctx=c.getContext('2d'),hud=document.getElementById('hud');
|
||
c.style.width=Math.min(${WIDTH},innerWidth*.45)+'px';c.style.height='auto';
|
||
let fr=0,by=0,t=Date.now();
|
||
const ws=new WebSocket('ws://'+location.hostname+':${WS_PORT}/stream/default');
|
||
ws.binaryType='arraybuffer';
|
||
ws.onopen=()=>{hud.textContent='Connected — waiting for frames…'};
|
||
ws.onmessage=e=>{
|
||
const buf=new Uint8Array(e.data);
|
||
if(buf.length<16)return;
|
||
const type=buf[0];
|
||
if(type!==0x02)return; // CompressedPixels only
|
||
const payloadLen=buf[12]|(buf[13]<<8)|(buf[14]<<16)|(buf[15]<<24);
|
||
const fmtByte=buf[16]; // format byte
|
||
const imageData=buf.slice(17); // actual image data
|
||
fr++;by+=imageData.length;
|
||
const mime=fmtByte===1?'image/webp':'image/jpeg';
|
||
const blob=new Blob([imageData],{type:mime});
|
||
const url=URL.createObjectURL(blob);
|
||
const img=new Image();
|
||
img.onload=()=>{ctx.drawImage(img,0,0);URL.revokeObjectURL(url)};
|
||
img.src=url;
|
||
const now=Date.now();
|
||
if(now-t>1000){
|
||
hud.textContent='FPS: '+(fr/((now-t)/1000)).toFixed(1)+' | '+(by/1024/((now-t)/1000)).toFixed(0)+' KB/s | Frame: '+(imageData.length/1024).toFixed(1)+'KB';
|
||
fr=0;by=0;t=now;
|
||
}
|
||
};
|
||
c.addEventListener('click',e=>{
|
||
const r=c.getBoundingClientRect(),sx=${WIDTH}/r.width,sy=${HEIGHT}/r.height;
|
||
const x=Math.round((e.clientX-r.left)*sx),y=Math.round((e.clientY-r.top)*sy);
|
||
// Send as ds-stream INPUT_PTR_DOWN then INPUT_PTR_UP
|
||
function sendInput(type,x,y){
|
||
const b=new ArrayBuffer(16+4);const dv=new DataView(b);const u=new Uint8Array(b);
|
||
u[0]=type;u[1]=0x08;dv.setUint16(8,x,true);dv.setUint16(10,y,true);dv.setUint32(12,4,true);
|
||
dv.setUint16(16,x,true);dv.setUint16(18,y,true);
|
||
ws.send(b);
|
||
}
|
||
sendInput(0x02,x,y);
|
||
setTimeout(()=>sendInput(0x03,x,y),50);
|
||
hud.textContent+=' | Click: ('+x+','+y+')';
|
||
});
|
||
ws.onclose=()=>{hud.textContent='Disconnected — reload to retry'};
|
||
</script></body></html>`;
|
||
|
||
const monitorServer = http.createServer((req, res) => {
|
||
if (req.url === '/health') {
|
||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||
res.end(JSON.stringify({
|
||
uptime: Math.round((Date.now() - t0) / 1000),
|
||
frames: globalFrameCount,
|
||
connections: Array.from(clients.values()).reduce((sum, s) => sum + s.size, 0),
|
||
qualityTier: aq.tierName,
|
||
recording: !!recordStream,
|
||
recordedFrames: recordFrameCount,
|
||
}));
|
||
return;
|
||
}
|
||
if (req.url === '/screenshot') {
|
||
// Capture via active CDP session
|
||
connectCDP(1).then(async (client) => {
|
||
const { Page } = client;
|
||
const { data } = await Page.captureScreenshot({ format: IMAGE_FORMAT === 'webp' ? 'png' : 'jpeg', quality: QUALITY });
|
||
const imgBuf = Buffer.from(data, 'base64');
|
||
res.writeHead(200, { 'Content-Type': IMAGE_FORMAT === 'webp' ? 'image/png' : 'image/jpeg' });
|
||
res.end(imgBuf);
|
||
client.close();
|
||
}).catch(err => {
|
||
res.writeHead(500); res.end('Screenshot failed: ' + err.message);
|
||
});
|
||
return;
|
||
}
|
||
res.writeHead(200, { 'Content-Type': 'text/html' }); res.end(html);
|
||
});
|
||
monitorServer.listen(MONITOR_PORT, '0.0.0.0', () => console.log(`[Monitor] http://0.0.0.0:${MONITOR_PORT}`));
|
||
}
|
||
|
||
// ─── 4. CDP screencast loop (with reconnection) ───
|
||
async function connectCDP(maxRetries = 10) {
|
||
for (let i = 0; i < maxRetries; i++) {
|
||
try {
|
||
return await CDP({ port: CDP_PORT });
|
||
} catch {
|
||
const delay = Math.min(1000 * Math.pow(1.5, i), 10000);
|
||
console.log(`[CDP] Connect attempt ${i + 1}/${maxRetries} failed, retry in ${Math.round(delay)}ms...`);
|
||
await new Promise(r => setTimeout(r, delay));
|
||
}
|
||
}
|
||
throw new Error('Cannot connect to Chrome CDP');
|
||
}
|
||
|
||
async function startScreencast(channel = 'default', url = TARGET_URL) {
|
||
const client = await connectCDP();
|
||
const { Page, Input, Emulation } = client;
|
||
|
||
await Page.enable();
|
||
await Emulation.setDeviceMetricsOverride({
|
||
width: WIDTH, height: HEIGHT, deviceScaleFactor: 1, mobile: true,
|
||
});
|
||
await Emulation.setTouchEmulationEnabled({ enabled: true });
|
||
await Page.navigate({ url });
|
||
await new Promise(r => setTimeout(r, 2000));
|
||
|
||
// Initialize channel stats
|
||
if (!channelStats.has(channel)) {
|
||
channelStats.set(channel, { frames: 0, bytes: 0, seq: 0, lastStatTime: Date.now(), statFrames: 0, statBytes: 0 });
|
||
}
|
||
const stats = channelStats.get(channel);
|
||
|
||
// v0.4: ACK listener — update adaptive quality
|
||
const wireInput = (ws) => {
|
||
ws._inputHandler = (data) => {
|
||
const buf = Buffer.from(data);
|
||
if (buf.length >= HEADER_SIZE && buf[0] === FRAME_ACK) {
|
||
// ACK frame: extract RTT from payload
|
||
const payloadLen = buf.readUInt32LE(12);
|
||
if (payloadLen >= 4) {
|
||
const rttMs = buf.readUInt16LE(HEADER_SIZE + 2);
|
||
aq.recordAck(rttMs);
|
||
|
||
// Adjust quality if tier changed
|
||
const newQuality = aq.quality || QUALITY;
|
||
if (newQuality !== currentQuality && newQuality > 0) {
|
||
currentQuality = newQuality;
|
||
Page.stopScreencast().catch(() => { });
|
||
Page.startScreencast({
|
||
format: cdpFormat, quality: currentQuality,
|
||
maxWidth: WIDTH, maxHeight: HEIGHT,
|
||
everyNthFrame: Math.max(1, Math.round(60 / MAX_FPS)),
|
||
}).catch(() => { });
|
||
console.log(`[AQ:${channel}] Tier: ${aq.tierName} → q${currentQuality}`);
|
||
}
|
||
}
|
||
return; // don't forward ACK as input
|
||
}
|
||
handleInput(buf, Input, Page);
|
||
};
|
||
};
|
||
|
||
// Existing clients
|
||
const ch = clients.get(channel);
|
||
if (ch) for (const ws of ch) wireInput(ws);
|
||
|
||
// Watch for new connections to this channel
|
||
const origSet = clients.get(channel) || new Set();
|
||
const origAdd = origSet.add.bind(origSet);
|
||
origSet.add = function (ws) {
|
||
origAdd(ws);
|
||
wireInput(ws);
|
||
};
|
||
clients.set(channel, origSet);
|
||
|
||
// Determine format
|
||
const useWebP = IMAGE_FORMAT === 'webp';
|
||
const formatByte = useWebP ? FMT_WEBP : FMT_JPEG;
|
||
const cdpFormat = useWebP ? 'png' : 'jpeg'; // CDP doesn't support webp directly, fallback
|
||
|
||
// Start screencast (v0.4: use adaptive quality)
|
||
let currentQuality = QUALITY;
|
||
let skipCounter = 0;
|
||
|
||
await Page.startScreencast({
|
||
format: cdpFormat, quality: currentQuality,
|
||
maxWidth: WIDTH, maxHeight: HEIGHT,
|
||
everyNthFrame: Math.max(1, Math.round(60 / MAX_FPS)),
|
||
});
|
||
|
||
client.on('event', (message) => {
|
||
if (message.method !== 'Page.screencastFrame') return;
|
||
const { sessionId, data, metadata } = message.params;
|
||
Page.screencastFrameAck({ sessionId }).catch(() => { });
|
||
|
||
// v0.4: adaptive frame skipping
|
||
if (aq.skipFrames > 0) {
|
||
skipCounter++;
|
||
if (skipCounter <= aq.skipFrames) return;
|
||
skipCounter = 0;
|
||
}
|
||
|
||
stats.seq++;
|
||
stats.frames++;
|
||
stats.statFrames++;
|
||
globalFrameCount++;
|
||
|
||
const imageBuf = Buffer.from(data, 'base64');
|
||
const timestamp = (Date.now() - t0) >>> 0;
|
||
const w = metadata.deviceWidth || WIDTH;
|
||
const h = metadata.deviceHeight || HEIGHT;
|
||
|
||
// Build ds-stream CompressedPixels frame
|
||
const frame = buildCompressedPixelFrame(stats.seq, timestamp, w, h, formatByte, imageBuf);
|
||
|
||
stats.bytes += frame.length;
|
||
stats.statBytes += frame.length;
|
||
globalBytesSent += frame.length;
|
||
|
||
// Broadcast to channel
|
||
const receivers = clients.get(channel);
|
||
if (receivers) {
|
||
for (const ws of receivers) {
|
||
if (ws.readyState === 1) ws.send(frame);
|
||
}
|
||
}
|
||
|
||
// v0.5: Write to recording file
|
||
if (recordStream) {
|
||
const tsUs = BigInt(Date.now() - t0) * 1000n;
|
||
const tsBuf = Buffer.alloc(8);
|
||
tsBuf.writeBigUInt64LE(tsUs);
|
||
const lenBuf = Buffer.alloc(4);
|
||
lenBuf.writeUInt32LE(frame.length);
|
||
recordStream.write(tsBuf);
|
||
recordStream.write(lenBuf);
|
||
recordStream.write(frame);
|
||
recordFrameCount++;
|
||
}
|
||
});
|
||
|
||
// Handle CDP disconnect — attempt reconnection
|
||
client.on('disconnect', async () => {
|
||
console.log(`[CDP:${channel}] Disconnected! Attempting reconnection...`);
|
||
try {
|
||
await startScreencast(channel, url);
|
||
} catch (err) {
|
||
console.error(`[CDP:${channel}] Reconnection failed:`, err.message);
|
||
}
|
||
});
|
||
|
||
console.log(`[CDP:${channel}] Casting ${url} → ${WIDTH}×${HEIGHT} @ q${QUALITY} (${IMAGE_FORMAT})`);
|
||
}
|
||
|
||
// ─── 5. Stats logging ───
|
||
function startStatsLogger() {
|
||
setInterval(() => {
|
||
const now = Date.now();
|
||
const totalElapsed = (now - t0) / 1000;
|
||
let totalReceivers = 0;
|
||
for (const [ch, set] of clients) totalReceivers += set.size;
|
||
|
||
for (const [channel, stats] of channelStats) {
|
||
const elapsed = (now - stats.lastStatTime) / 1000;
|
||
if (elapsed < 1) continue;
|
||
const fps = stats.statFrames / elapsed;
|
||
const kbps = stats.statBytes / 1024 / elapsed;
|
||
console.log(`[Stats:${channel}] ${fps.toFixed(1)} fps | ${kbps.toFixed(0)} KB/s | total: ${stats.frames} frames | ${(stats.bytes / 1024 / 1024).toFixed(1)} MB`);
|
||
stats.statFrames = 0;
|
||
stats.statBytes = 0;
|
||
stats.lastStatTime = now;
|
||
}
|
||
|
||
if (channelStats.size > 1 || totalReceivers > 0) {
|
||
console.log(`[Stats:global] up=${totalElapsed.toFixed(0)}s | ${globalFrameCount} frames | ${(globalBytesSent / 1024 / 1024).toFixed(1)} MB | ${totalReceivers} receivers`);
|
||
}
|
||
}, STATS_INTERVAL);
|
||
}
|
||
|
||
// ─── Input handler (ds-stream protocol) ───
|
||
function handleInput(buf, Input, Page) {
|
||
if (buf.length < HEADER_SIZE) return;
|
||
const type = buf[0];
|
||
const flags = buf[1];
|
||
|
||
// Only process input frames (FLAG_INPUT=0x08)
|
||
if (!(flags & FLAG_INPUT) && type < 0xF0) {
|
||
// Legacy compatibility: try type byte directly
|
||
handleLegacyInput(buf, Input, Page);
|
||
return;
|
||
}
|
||
|
||
const payloadLen = buf.readUInt32LE(12);
|
||
const payload = buf.slice(HEADER_SIZE, HEADER_SIZE + payloadLen);
|
||
|
||
switch (type) {
|
||
case INPUT_POINTER: { // Mouse move
|
||
if (payload.length >= 4) {
|
||
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
|
||
Input.dispatchMouseEvent({ type: 'mouseMoved', x, y }).catch(() => { });
|
||
}
|
||
break;
|
||
}
|
||
case INPUT_PTR_DOWN: { // Mouse/touch down
|
||
if (payload.length >= 4) {
|
||
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
|
||
Input.dispatchMouseEvent({ type: 'mousePressed', x, y, button: 'left', clickCount: 1 }).catch(() => { });
|
||
}
|
||
break;
|
||
}
|
||
case INPUT_PTR_UP: { // Mouse/touch up
|
||
if (payload.length >= 4) {
|
||
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
|
||
Input.dispatchMouseEvent({ type: 'mouseReleased', x, y, button: 'left', clickCount: 1 }).catch(() => { });
|
||
}
|
||
break;
|
||
}
|
||
case INPUT_KEY_DOWN: case INPUT_KEY_UP: { // Keyboard
|
||
if (payload.length >= 2) {
|
||
const keyCode = payload.readUInt16LE(0);
|
||
const key = String.fromCharCode(keyCode);
|
||
Input.dispatchKeyEvent({
|
||
type: type === INPUT_KEY_DOWN ? 'keyDown' : 'keyUp',
|
||
key, code: `Key${key.toUpperCase()}`,
|
||
windowsVirtualKeyCode: keyCode,
|
||
}).catch(() => { });
|
||
}
|
||
break;
|
||
}
|
||
case INPUT_TOUCH: { // Touch start/move
|
||
if (payload.length >= 4) {
|
||
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
|
||
Input.dispatchTouchEvent({
|
||
type: 'touchStart',
|
||
touchPoints: [{ x, y, id: 0, radiusX: 10, radiusY: 10, force: 1 }]
|
||
}).catch(() => { });
|
||
}
|
||
break;
|
||
}
|
||
case INPUT_TOUCH_END: { // Touch end
|
||
if (payload.length >= 4) {
|
||
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
|
||
Input.dispatchTouchEvent({
|
||
type: 'touchEnd',
|
||
touchPoints: [{ x, y, id: 0, radiusX: 10, radiusY: 10, force: 0 }]
|
||
}).catch(() => { });
|
||
}
|
||
break;
|
||
}
|
||
case INPUT_SCROLL: { // Scroll
|
||
if (payload.length >= 8) {
|
||
const dx = payload.readInt16LE(0), dy = payload.readInt16LE(2);
|
||
const x = payload.readUInt16LE(4), y = payload.readUInt16LE(6);
|
||
Input.dispatchMouseEvent({ type: 'mouseWheel', x, y, deltaX: dx, deltaY: dy }).catch(() => { });
|
||
}
|
||
break;
|
||
}
|
||
case 0x30: { // GamepadAxis
|
||
if (payload.length >= 4) {
|
||
const axisIdx = payload.readUInt8(0);
|
||
const value = payload.readInt16LE(1); // -32768 to 32767
|
||
// Map gamepad axis to scroll or custom event
|
||
console.log(`[Gamepad] Axis ${axisIdx}: ${value}`);
|
||
}
|
||
break;
|
||
}
|
||
case 0x31: { // GamepadButton
|
||
if (payload.length >= 2) {
|
||
const buttonIdx = payload.readUInt8(0);
|
||
const pressed = payload.readUInt8(1);
|
||
// Map common gamepad buttons to keyboard events
|
||
const keyMap = { 0: 'Enter', 1: 'Escape', 12: 'ArrowUp', 13: 'ArrowDown', 14: 'ArrowLeft', 15: 'ArrowRight' };
|
||
const key = keyMap[buttonIdx];
|
||
if (key) {
|
||
Input.dispatchKeyEvent({
|
||
type: pressed ? 'keyDown' : 'keyUp',
|
||
key, code: key,
|
||
}).catch(() => { });
|
||
}
|
||
}
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
|
||
// Legacy input handler (v0.2 format)
|
||
function handleLegacyInput(buf, Input, Page) {
|
||
if (buf.length < 1) return;
|
||
const t = buf[0];
|
||
|
||
if (t === 0x60 && buf.length >= 7) {
|
||
const phase = buf[1];
|
||
const x = buf.readUInt16LE(2), y = buf.readUInt16LE(4);
|
||
const type = phase === 0 ? 'touchStart' : phase === 1 ? 'touchMove' : 'touchEnd';
|
||
Input.dispatchTouchEvent({
|
||
type, touchPoints: [{ x, y, id: buf[6] || 0, radiusX: 10, radiusY: 10, force: phase === 2 ? 0 : 1 }]
|
||
}).catch(() => { });
|
||
}
|
||
if (t === 0x63 && buf.length >= 3) {
|
||
const len = buf.readUInt16LE(1);
|
||
const url = buf.slice(3, 3 + len).toString();
|
||
Page.navigate({ url }).catch(() => { });
|
||
}
|
||
}
|
||
|
||
// ─── Main ───
|
||
async function main() {
|
||
console.log(`\n DreamStack Screencast v0.5.0`);
|
||
console.log(` ──────────────────────────`);
|
||
console.log(` URL: ${TARGET_URL}`);
|
||
console.log(` Viewport: ${WIDTH}×${HEIGHT}`);
|
||
console.log(` Quality: ${QUALITY}% FPS: ${MAX_FPS}`);
|
||
console.log(` Format: ${IMAGE_FORMAT.toUpperCase()}`);
|
||
console.log(` Tabs: ${TAB_COUNT}`);
|
||
console.log(` Audio: ${ENABLE_AUDIO}`);
|
||
console.log(` Record: ${RECORD_FILE || 'disabled'}`);
|
||
console.log(` Headless: ${HEADLESS}`);
|
||
console.log(` WS Port: ${WS_PORT} Monitor: ${MONITOR_PORT}`);
|
||
console.log(` Endpoints: /health, /screenshot`);
|
||
console.log(` Adaptive: ACK-driven quality\n`);
|
||
|
||
const chrome = await launchChrome();
|
||
startWS();
|
||
startMonitor();
|
||
startStatsLogger();
|
||
|
||
// Start screencast for each tab
|
||
if (TAB_COUNT === 1) {
|
||
await startScreencast('default', TARGET_URL);
|
||
} else {
|
||
for (let i = 0; i < TAB_COUNT; i++) {
|
||
const channel = `tab-${i}`;
|
||
await startScreencast(channel, TARGET_URL);
|
||
}
|
||
}
|
||
|
||
console.log(`\n ✓ Streaming! Panels → ws://0.0.0.0:${WS_PORT}/stream/{channel}`);
|
||
console.log(` ✓ Monitor → http://localhost:${MONITOR_PORT}\n`);
|
||
|
||
process.on('SIGINT', () => {
|
||
console.log('\n[Stop]');
|
||
if (recordStream) {
|
||
recordStream.end();
|
||
console.log(`[Record] Saved ${recordFrameCount} frames to ${RECORD_FILE}`);
|
||
}
|
||
chrome.kill();
|
||
process.exit(0);
|
||
});
|
||
}
|
||
|
||
main().catch(err => { console.error('Fatal:', err); process.exit(1); });
|