feat(compiler): v0.4 + v0.5 — CLI modularity, diagnostic pipeline, test suite

v0.4 — CLI Modularity:
- Split monolithic main.rs (2,038 lines) into 8 command modules + slim dispatch (107 lines)
- Add 12 JS codegen tests (signals, derived, views, events, loops, match, enums, components, interpolation, springs, tree-shaking)

v0.5 — Diagnostic Quality + Analyzer Confidence:
- Add From<ParseError> for Diagnostic (E0001) in ds-diagnostic
- Add errors_as_diagnostics() to TypeChecker (E0100–E0110)
- Wire Elm-style diagnostics through dreamstack check and build commands
- Switch incremental compiler to parse_program_resilient() for multi-error collection
- Add 12 analyzer tests (chains, fan-out, diamond deps, empty programs, conditionals, handlers, views)
- Add 2 diagnostic conversion tests

Test suite: 97 → 123 tests (26 new, 0 failures)
This commit is contained in:
enzotar 2026-03-10 09:09:02 -07:00
parent 878e55b962
commit b0440e2e47
42 changed files with 7170 additions and 2220 deletions

View file

@ -2,6 +2,7 @@
resolver = "2"
members = [
"compiler/ds-parser",
"compiler/ds-diagnostic",
"compiler/ds-analyzer",
"compiler/ds-codegen",
"compiler/ds-layout",
@ -21,6 +22,7 @@ license = ""
[workspace.dependencies]
ds-parser = { path = "compiler/ds-parser" }
ds-diagnostic = { path = "compiler/ds-diagnostic" }
ds-analyzer = { path = "compiler/ds-analyzer" }
ds-codegen = { path = "compiler/ds-codegen" }
ds-layout = { path = "compiler/ds-layout" }

View file

@ -5,3 +5,4 @@ edition.workspace = true
[dependencies]
ds-parser = { workspace = true }
ds-diagnostic = { workspace = true }

View file

@ -5,7 +5,8 @@
/// - Derived signals: `let doubled = count * 2` (computed, auto-tracked)
/// - Effects: DOM bindings that update when their dependencies change
use ds_parser::{Program, Declaration, Expr, BinOp, Container, Element, LetDecl, ViewDecl};
use ds_parser::{Program, Declaration, Expr, BinOp, Container, Element, LetDecl, ViewDecl, Span};
use ds_diagnostic::{Diagnostic, Severity};
use std::collections::{HashMap, HashSet};
/// The complete signal dependency graph for a program.
@ -239,33 +240,114 @@ impl SignalGraph {
}
/// Get topological order for signal propagation.
pub fn topological_order(&self) -> Vec<usize> {
/// Returns (order, diagnostics) — diagnostics contain cycle errors if any.
pub fn topological_order(&self) -> (Vec<usize>, Vec<Diagnostic>) {
let mut visited = HashSet::new();
let mut in_stack = HashSet::new(); // for cycle detection
let mut order = Vec::new();
let mut diagnostics = Vec::new();
for node in &self.nodes {
if !visited.contains(&node.id) {
self.topo_visit(node.id, &mut visited, &mut order);
self.topo_visit(node.id, &mut visited, &mut in_stack, &mut order, &mut diagnostics);
}
}
order
(order, diagnostics)
}
fn topo_visit(&self, id: usize, visited: &mut HashSet<usize>, order: &mut Vec<usize>) {
fn topo_visit(
&self,
id: usize,
visited: &mut HashSet<usize>,
in_stack: &mut HashSet<usize>,
order: &mut Vec<usize>,
diagnostics: &mut Vec<Diagnostic>,
) {
if visited.contains(&id) {
return;
}
visited.insert(id);
if in_stack.contains(&id) {
// Cycle detected!
let node = &self.nodes[id];
diagnostics.push(Diagnostic::error(
format!("circular signal dependency: `{}` depends on itself", node.name),
Span { start: 0, end: 0, line: 0, col: 0 },
).with_code("E1001"));
return;
}
in_stack.insert(id);
for dep in &self.nodes[id].dependencies {
if let Some(dep_id) = dep.signal_id {
self.topo_visit(dep_id, visited, order);
self.topo_visit(dep_id, visited, in_stack, order, diagnostics);
}
}
in_stack.remove(&id);
visited.insert(id);
order.push(id);
}
/// Detect signals not referenced by any view or export (dead signals).
pub fn dead_signals(&self, program: &Program) -> Vec<Diagnostic> {
let mut referenced = HashSet::new();
// Collect all signal names referenced in views
for decl in &program.declarations {
if let Declaration::View(view) = decl {
let deps = extract_dependencies(&view.body);
for dep in deps {
referenced.insert(dep);
}
}
}
// Also include signals referenced by derived signals
for node in &self.nodes {
for dep in &node.dependencies {
referenced.insert(dep.signal_name.clone());
}
}
// Also include streams and event handler targets
for decl in &program.declarations {
if let Declaration::OnHandler(h) = decl {
let deps = extract_dependencies(&h.body);
for dep in deps {
referenced.insert(dep);
}
}
}
let mut warnings = Vec::new();
for node in &self.nodes {
if matches!(node.kind, SignalKind::Source) && !referenced.contains(&node.name) {
warnings.push(Diagnostic::warning(
format!("signal `{}` is never read", node.name),
Span { start: 0, end: 0, line: 0, col: 0 },
).with_code("W1001"));
}
}
warnings
}
/// Build signal graph and return diagnostics from analysis.
pub fn from_program_with_diagnostics(program: &Program) -> (Self, Vec<Diagnostic>) {
let graph = Self::from_program(program);
let mut diagnostics = Vec::new();
// Cycle detection
let (_order, cycle_diags) = graph.topological_order();
diagnostics.extend(cycle_diags);
// Dead signal detection
let dead_diags = graph.dead_signals(program);
diagnostics.extend(dead_diags);
(graph, diagnostics)
}
}
/// Extract all signal names referenced in an expression.
@ -497,7 +579,8 @@ mod tests {
#[test]
fn test_topological_order() {
let (graph, _) = analyze("let count = 0\nlet doubled = count * 2");
let order = graph.topological_order();
let (order, diags) = graph.topological_order();
assert!(diags.is_empty(), "no cycle expected");
// count (id=0) should come before doubled (id=1)
let pos_count = order.iter().position(|&id| id == 0).unwrap();
let pos_doubled = order.iter().position(|&id| id == 1).unwrap();
@ -536,4 +619,161 @@ view counter =
let count_node = graph.nodes.iter().find(|n| n.name == "count").unwrap();
assert!(!count_node.streamable, "signals should not be streamable without stream decl");
}
#[test]
fn test_cycle_detection() {
// Create circular dependency: a depends on b, b depends on a
let (graph, _) = analyze("let a = b * 2\nlet b = a + 1");
let (_order, diags) = graph.topological_order();
assert!(!diags.is_empty(), "cycle should produce diagnostic");
assert!(diags[0].message.contains("circular"), "diagnostic should mention circular");
}
#[test]
fn test_dead_signal_warning() {
// `unused` is never referenced by any view or derived signal
let src = "let unused = 42\nlet used = 0\nview main = column [ text used ]";
let (graph, _) = analyze(src);
let program = {
let mut lexer = ds_parser::Lexer::new(src);
let tokens = lexer.tokenize();
let mut parser = ds_parser::Parser::new(tokens);
parser.parse_program().expect("parse failed")
};
let warnings = graph.dead_signals(&program);
assert!(!warnings.is_empty(), "should have dead signal warning");
assert!(warnings.iter().any(|d| d.message.contains("unused")),
"warning should mention 'unused'");
}
// ── New v0.5 tests ──────────────────────────────────────
#[test]
fn test_multi_level_chain() {
// A → B → C dependency chain
let (graph, _) = analyze("let a = 0\nlet b = a + 1\nlet c = b * 2");
assert_eq!(graph.nodes.len(), 3);
assert!(matches!(graph.nodes[0].kind, SignalKind::Source));
assert!(matches!(graph.nodes[1].kind, SignalKind::Derived));
assert!(matches!(graph.nodes[2].kind, SignalKind::Derived));
// c should depend on b
assert_eq!(graph.nodes[2].dependencies[0].signal_name, "b");
// topological_order: a before b before c
let (order, diags) = graph.topological_order();
assert!(diags.is_empty());
let pos_a = order.iter().position(|&id| id == 0).unwrap();
let pos_b = order.iter().position(|&id| id == 1).unwrap();
let pos_c = order.iter().position(|&id| id == 2).unwrap();
assert!(pos_a < pos_b && pos_b < pos_c);
}
#[test]
fn test_fan_out() {
// One source → multiple derived
let (graph, _) = analyze("let x = 10\nlet a = x + 1\nlet b = x + 2\nlet c = x + 3");
assert_eq!(graph.nodes.len(), 4);
// a, b, c all depend on x
for i in 1..=3 {
assert_eq!(graph.nodes[i].dependencies.len(), 1);
assert_eq!(graph.nodes[i].dependencies[0].signal_name, "x");
}
}
#[test]
fn test_diamond_dependency() {
// x → a, x → b, a+b → d
let (graph, _) = analyze("let x = 0\nlet a = x + 1\nlet b = x * 2\nlet d = a + b");
assert_eq!(graph.nodes.len(), 4);
// d depends on both a and b
let d_deps: Vec<&str> = graph.nodes[3].dependencies.iter()
.map(|d| d.signal_name.as_str()).collect();
assert!(d_deps.contains(&"a"));
assert!(d_deps.contains(&"b"));
}
#[test]
fn test_empty_program() {
let (graph, views) = analyze("");
assert_eq!(graph.nodes.len(), 0);
assert_eq!(views.len(), 0);
}
#[test]
fn test_only_views_no_signals() {
let (graph, views) = analyze("view main = column [\n text \"hello\"\n text \"world\"\n]");
assert_eq!(graph.nodes.len(), 0);
assert_eq!(views.len(), 1);
assert_eq!(views[0].name, "main");
}
#[test]
fn test_event_handler_mutations() {
let (graph, _) = analyze(
"let count = 0\non click -> count = count + 1\nview main = text \"hi\""
);
// Should have source signal + handler
let handlers: Vec<_> = graph.nodes.iter().filter(|n| matches!(n.kind, SignalKind::Handler { .. })).collect();
assert!(!handlers.is_empty(), "should detect handler from on click");
}
#[test]
fn test_conditional_binding() {
let (_, views) = analyze(
"let show = true\nview main = column [\n when show -> text \"visible\"\n]"
);
assert_eq!(views.len(), 1);
let has_conditional = views[0].bindings.iter().any(|b| {
matches!(b.kind, BindingKind::Conditional { .. })
});
assert!(has_conditional, "should detect conditional binding from `when`");
}
#[test]
fn test_static_text_binding() {
let (_, views) = analyze("view main = text \"hello world\"");
assert_eq!(views.len(), 1);
let has_static = views[0].bindings.iter().any(|b| {
matches!(b.kind, BindingKind::StaticText { .. })
});
assert!(has_static, "should detect static text binding");
}
#[test]
fn test_multiple_views() {
let (_, views) = analyze(
"view header = text \"Header\"\nview footer = text \"Footer\""
);
assert_eq!(views.len(), 2);
assert!(views.iter().any(|v| v.name == "header"));
assert!(views.iter().any(|v| v.name == "footer"));
}
#[test]
fn test_timer_no_signal_nodes() {
// `every` declarations are handled at codegen level, not as signal nodes
let (graph, _) = analyze(
"let x = 0\nevery 33 -> x = x + 1\nview main = text x"
);
// x should be a source signal; every is not a signal node
assert_eq!(graph.nodes.len(), 1);
assert_eq!(graph.nodes[0].name, "x");
}
#[test]
fn test_string_signal() {
let (graph, _) = analyze("let name = \"world\"");
assert_eq!(graph.nodes.len(), 1);
assert!(matches!(graph.nodes[0].kind, SignalKind::Source));
// Check initial value
assert!(graph.nodes[0].initial_value.is_some());
}
#[test]
fn test_array_signal() {
let (graph, _) = analyze("let items = [1, 2, 3]");
assert_eq!(graph.nodes.len(), 1);
assert!(matches!(graph.nodes[0].kind, SignalKind::Source));
assert_eq!(graph.nodes[0].name, "items");
}
}

View file

@ -12,6 +12,8 @@ ds-parser = { workspace = true }
ds-analyzer = { workspace = true }
ds-codegen = { workspace = true }
ds-incremental = { workspace = true }
ds-diagnostic = { workspace = true }
ds-types = { workspace = true }
clap = { version = "4", features = ["derive"] }
notify = "8"
tiny_http = "0.12"

View file

@ -0,0 +1,156 @@
/// Add command — install components from the DreamStack registry.
use std::fs;
use std::path::Path;
struct RegistryItem {
name: &'static str,
description: &'static str,
source: &'static str,
deps: &'static [&'static str],
}
const REGISTRY: &[RegistryItem] = &[
RegistryItem {
name: "button",
description: "Styled button with variant support (primary, secondary, ghost, destructive)",
source: include_str!("../../../../registry/components/button.ds"),
deps: &[],
},
RegistryItem {
name: "input",
description: "Text input with label, placeholder, and error state",
source: include_str!("../../../../registry/components/input.ds"),
deps: &[],
},
RegistryItem {
name: "card",
description: "Content container with title and styled border",
source: include_str!("../../../../registry/components/card.ds"),
deps: &[],
},
RegistryItem {
name: "badge",
description: "Status badge with color variants",
source: include_str!("../../../../registry/components/badge.ds"),
deps: &[],
},
RegistryItem {
name: "dialog",
description: "Modal dialog with overlay and close button",
source: include_str!("../../../../registry/components/dialog.ds"),
deps: &["button"],
},
RegistryItem {
name: "toast",
description: "Notification toast with auto-dismiss",
source: include_str!("../../../../registry/components/toast.ds"),
deps: &[],
},
RegistryItem {
name: "progress",
description: "Animated progress bar with percentage",
source: include_str!("../../../../registry/components/progress.ds"),
deps: &[],
},
RegistryItem {
name: "alert",
description: "Alert banner with info/warning/error/success variants",
source: include_str!("../../../../registry/components/alert.ds"),
deps: &[],
},
RegistryItem {
name: "separator",
description: "Visual divider between content sections",
source: include_str!("../../../../registry/components/separator.ds"),
deps: &[],
},
RegistryItem {
name: "toggle",
description: "On/off switch toggle",
source: include_str!("../../../../registry/components/toggle.ds"),
deps: &[],
},
RegistryItem {
name: "avatar",
description: "User avatar with initials fallback",
source: include_str!("../../../../registry/components/avatar.ds"),
deps: &[],
},
];
pub fn cmd_add(name: Option<String>, list: bool, all: bool) {
if list {
println!("📦 Available DreamStack components:\n");
for item in REGISTRY {
let deps = if item.deps.is_empty() {
String::new()
} else {
format!(" (deps: {})", item.deps.join(", "))
};
println!(" {}{}{}", item.name, item.description, deps);
}
println!("\n Use: dreamstack add <name>");
return;
}
let components_dir = Path::new("components");
if !components_dir.exists() {
fs::create_dir_all(components_dir).expect("Failed to create components/ directory");
}
let names_to_add: Vec<String> = if all {
REGISTRY.iter().map(|r| r.name.to_string()).collect()
} else if let Some(name) = name {
vec![name]
} else {
println!("Usage: dreamstack add <component>\n dreamstack add --list\n dreamstack add --all");
return;
};
let mut added = std::collections::HashSet::new();
for name in &names_to_add {
add_component(name, components_dir, &mut added);
}
if added.is_empty() {
println!("❌ No components found. Use 'dreamstack add --list' to see available.");
}
}
fn add_component(name: &str, dest: &Path, added: &mut std::collections::HashSet<String>) {
if added.contains(name) {
return;
}
let item = match REGISTRY.iter().find(|r| r.name == name) {
Some(item) => item,
None => {
println!(" ❌ Unknown component: {name}");
return;
}
};
// Add dependencies first
for dep in item.deps {
add_component(dep, dest, added);
}
let dest_file = dest.join(format!("{}.ds", name));
// Fix imports: ./button → ./button (relative within components/)
let source = item.source.replace("from \"./", "from \"./");
fs::write(&dest_file, source).expect("Failed to write component file");
let dep_info = if !item.deps.is_empty() {
" (dependency)"
} else {
""
};
println!(" ✅ Added components/{}.ds{}", name, dep_info);
added.insert(name.to_string());
}
/// Get registry for use by init command.
pub fn get_registry_source(name: &str) -> Option<&'static str> {
REGISTRY.iter().find(|r| r.name == name).map(|r| r.source)
}

View file

@ -0,0 +1,213 @@
/// Build command — compile .ds files to HTML+JS or Panel IR.
use std::fs;
use std::path::{Path, PathBuf};
use std::collections::HashSet;
/// Compile error with source for diagnostic rendering.
pub struct CompileError {
pub message: String,
pub source: Option<String>,
}
impl std::fmt::Display for CompileError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.message)
}
}
pub fn compile(source: &str, base_dir: &Path, minify: bool) -> Result<String, CompileError> {
// 1. Lex
let mut lexer = ds_parser::Lexer::new(source);
let tokens = lexer.tokenize();
// Check for lexer errors
for tok in &tokens {
if let ds_parser::TokenKind::Error(msg) = &tok.kind {
return Err(CompileError {
message: format!("Lexer error at line {}: {}", tok.line, msg),
source: None,
});
}
}
// 2. Parse
let mut parser = ds_parser::Parser::with_source(tokens, source);
let mut program = parser.parse_program().map_err(|e| {
let diag = ds_diagnostic::Diagnostic::from(e);
CompileError {
message: ds_diagnostic::render(&diag, source),
source: Some(source.to_string()),
}
})?;
// 3. Resolve imports — inline exported declarations from imported files
resolve_imports(&mut program, base_dir).map_err(|e| CompileError { message: e, source: None })?;
// 4. Analyze
let graph = ds_analyzer::SignalGraph::from_program(&program);
let views = ds_analyzer::SignalGraph::analyze_views(&program);
// 5. Codegen
let html = ds_codegen::JsEmitter::emit_html(&program, &graph, &views, minify);
Ok(html)
}
/// Compile a DreamStack source file to Panel IR JSON for ESP32 LVGL panels.
pub fn compile_panel_ir(source: &str, base_dir: &Path) -> Result<String, CompileError> {
// 1. Lex
let mut lexer = ds_parser::Lexer::new(source);
let tokens = lexer.tokenize();
for tok in &tokens {
if let ds_parser::TokenKind::Error(msg) = &tok.kind {
return Err(CompileError {
message: format!("Lexer error at line {}: {}", tok.line, msg),
source: None,
});
}
}
// 2. Parse
let mut parser = ds_parser::Parser::with_source(tokens, source);
let mut program = parser.parse_program().map_err(|e| {
let diag = ds_diagnostic::Diagnostic::from(e);
CompileError {
message: ds_diagnostic::render(&diag, source),
source: Some(source.to_string()),
}
})?;
// 3. Resolve imports
resolve_imports(&mut program, base_dir).map_err(|e| CompileError { message: e, source: None })?;
// 4. Analyze
let graph = ds_analyzer::SignalGraph::from_program(&program);
// 5. Codegen → Panel IR
let ir = ds_codegen::IrEmitter::emit_ir(&program, &graph);
Ok(ir)
}
/// Resolve `import { X, Y } from "./file"` by parsing the imported file
/// and inlining the matching `export`ed declarations.
pub fn resolve_imports(program: &mut ds_parser::Program, base_dir: &Path) -> Result<(), String> {
let mut imported_decls = Vec::new();
let mut seen_files: HashSet<PathBuf> = HashSet::new();
for decl in &program.declarations {
if let ds_parser::Declaration::Import(import) = decl {
// Resolve the file path relative to base_dir
let mut import_path = base_dir.join(&import.source);
if !import_path.extension().map_or(false, |e| e == "ds") {
import_path.set_extension("ds");
}
let import_path = import_path.canonicalize().unwrap_or(import_path.clone());
if seen_files.contains(&import_path) {
continue; // Skip duplicate imports
}
seen_files.insert(import_path.clone());
// Read and parse the imported file
let imported_source = fs::read_to_string(&import_path)
.map_err(|e| format!("Cannot import '{}': {}", import.source, e))?;
let mut lexer = ds_parser::Lexer::new(&imported_source);
let tokens = lexer.tokenize();
for tok in &tokens {
if let ds_parser::TokenKind::Error(msg) = &tok.kind {
return Err(format!("Lexer error in '{}' at line {}: {}", import.source, tok.line, msg));
}
}
let mut parser = ds_parser::Parser::new(tokens);
let mut imported_program = parser.parse_program()
.map_err(|e| format!("Parse error in '{}': {}", import.source, e))?;
// Recursively resolve imports in the imported file
let imported_dir = import_path.parent().unwrap_or(base_dir);
resolve_imports(&mut imported_program, imported_dir)?;
// Extract matching exports
let names: HashSet<&str> = import.names.iter().map(|s| s.as_str()).collect();
for d in &imported_program.declarations {
match d {
ds_parser::Declaration::Export(name, inner) if names.contains(name.as_str()) => {
imported_decls.push(*inner.clone());
}
// Also include non-exported decls that exports depend on
// (for now, include all let decls from the imported file)
ds_parser::Declaration::Let(_) => {
imported_decls.push(d.clone());
}
_ => {}
}
}
}
}
// Remove Import declarations and prepend imported decls
program.declarations.retain(|d| !matches!(d, ds_parser::Declaration::Import(_)));
let mut merged = imported_decls;
merged.append(&mut program.declarations);
program.declarations = merged;
Ok(())
}
pub fn cmd_build(file: &Path, output: &Path, minify: bool, target: &str) {
println!("🔨 DreamStack build (target: {}){}", target, if minify { " (minified)" } else { "" });
println!(" source: {}", file.display());
let source = match fs::read_to_string(file) {
Ok(s) => s,
Err(e) => {
eprintln!("❌ Could not read {}: {}", file.display(), e);
std::process::exit(1);
}
};
let base_dir = file.parent().unwrap_or(Path::new("."));
match target {
"panel" => {
// Panel IR target — emit JSON for ESP32 LVGL runtime
match compile_panel_ir(&source, base_dir) {
Ok(ir) => {
fs::create_dir_all(output).unwrap();
let out_path = output.join("app.ir.json");
fs::write(&out_path, &ir).unwrap();
println!(" output: {}", out_path.display());
println!("✅ Panel IR built ({} bytes)", ir.len());
}
Err(e) => {
eprintln!("{}", e.message);
std::process::exit(1);
}
}
}
_ => {
// Default HTML target
match compile(&source, base_dir, minify) {
Ok(html) => {
fs::create_dir_all(output).unwrap();
let out_path = output.join("index.html");
fs::write(&out_path, &html).unwrap();
println!(" output: {}", out_path.display());
println!("✅ Build complete! ({} bytes)", html.len());
println!();
println!(" Open in browser:");
println!(" file://{}", fs::canonicalize(&out_path).unwrap().display());
}
Err(e) => {
eprintln!("{}", e.message);
std::process::exit(1);
}
}
}
}
}

View file

@ -0,0 +1,134 @@
/// Check command — type-check and analyze without compiling.
/// Outputs Elm-style diagnostics for any errors found.
use std::fs;
use std::path::Path;
pub fn cmd_check(file: &Path) {
println!("🔍 DreamStack check");
println!(" file: {}", file.display());
let source = match fs::read_to_string(file) {
Ok(s) => s,
Err(e) => {
eprintln!("❌ Could not read {}: {}", file.display(), e);
std::process::exit(1);
}
};
let mut diagnostics: Vec<ds_diagnostic::Diagnostic> = Vec::new();
// Lex
let mut lexer = ds_parser::Lexer::new(&source);
let tokens = lexer.tokenize();
for tok in &tokens {
if let ds_parser::TokenKind::Error(msg) = &tok.kind {
diagnostics.push(ds_diagnostic::Diagnostic::error(
msg.clone(),
ds_parser::Span {
start: 0,
end: 0,
line: tok.line,
col: tok.col,
},
).with_code("E0000"));
}
}
// Parse (resilient — collect multiple errors)
let mut parser = ds_parser::Parser::with_source(tokens, &source);
let parse_result = parser.parse_program_resilient();
// Convert parse errors → diagnostics
for err in &parse_result.errors {
diagnostics.push(ds_diagnostic::Diagnostic::from(err.clone()));
}
let program = parse_result.program;
// Type check
let mut checker = ds_types::TypeChecker::new();
checker.check_program(&program);
if checker.has_errors() {
diagnostics.extend(checker.errors_as_diagnostics());
}
// Analyze
let graph = ds_analyzer::SignalGraph::from_program(&program);
let views = ds_analyzer::SignalGraph::analyze_views(&program);
// Cycle detection diagnostics
let (topo, cycle_diags) = graph.topological_order();
diagnostics.extend(cycle_diags);
// Sort all diagnostics: errors first, then by line
ds_diagnostic::sort_diagnostics(&mut diagnostics);
// Render diagnostics
let error_count = diagnostics.iter().filter(|d| d.severity == ds_diagnostic::Severity::Error).count();
let warning_count = diagnostics.iter().filter(|d| d.severity == ds_diagnostic::Severity::Warning).count();
if !diagnostics.is_empty() {
println!();
for diag in &diagnostics {
eprintln!("{}", ds_diagnostic::render(diag, &source));
}
}
// Signal graph report
println!();
println!(" 📊 Signal Graph:");
for node in &graph.nodes {
let kind_str = match &node.kind {
ds_analyzer::SignalKind::Source => "source",
ds_analyzer::SignalKind::Derived => "derived",
ds_analyzer::SignalKind::Handler { .. } => "handler",
};
let deps: Vec<&str> = node.dependencies.iter().map(|d| d.signal_name.as_str()).collect();
if deps.is_empty() {
println!(" {} [{}]", node.name, kind_str);
} else {
println!(" {} [{}] ← depends on: {}", node.name, kind_str, deps.join(", "));
}
}
println!();
println!(" 🖼️ Views:");
for view in &views {
println!(" {} ({} bindings)", view.name, view.bindings.len());
for binding in &view.bindings {
match &binding.kind {
ds_analyzer::BindingKind::TextContent { signal } => {
println!(" 📝 text bound to: {signal}");
}
ds_analyzer::BindingKind::EventHandler { element_tag, event, .. } => {
println!("{element_tag}.{event}");
}
ds_analyzer::BindingKind::Conditional { condition_signals } => {
println!(" ❓ conditional on: {}", condition_signals.join(", "));
}
ds_analyzer::BindingKind::StaticContainer { kind, child_count } => {
println!(" 📦 {kind} ({child_count} children)");
}
ds_analyzer::BindingKind::StaticText { text } => {
println!(" 📄 static: \"{text}\"");
}
}
}
}
println!();
println!(" 🔄 Propagation order: {:?}", topo.iter().map(|&id| &graph.nodes[id].name).collect::<Vec<_>>());
// Summary
println!();
if error_count == 0 && warning_count == 0 {
println!("✅ No errors found");
} else if error_count == 0 {
println!("⚠️ {} warning(s)", warning_count);
} else {
eprintln!("{} error(s), {} warning(s)", error_count, warning_count);
std::process::exit(1);
}
}

View file

@ -0,0 +1,519 @@
/// Convert command — React/TSX → DreamStack converter.
use std::fs;
use std::path::Path;
pub fn cmd_convert(name: &str, shadcn: bool, output: Option<&Path>) {
let tsx_source = if shadcn {
// Fetch from shadcn/ui GitHub
let url = format!(
"https://raw.githubusercontent.com/shadcn-ui/taxonomy/main/components/ui/{}.tsx",
name
);
println!(" 📥 Fetching {}.tsx from shadcn/ui...", name);
match fetch_url_blocking(&url) {
Ok(source) => source,
Err(e) => {
println!(" ❌ Failed to fetch: {e}");
println!(" Try providing a local .tsx file instead.");
return;
}
}
} else {
// Read local file
match fs::read_to_string(name) {
Ok(source) => source,
Err(e) => {
println!(" ❌ Cannot read '{name}': {e}");
return;
}
}
};
let ds_output = convert_tsx_to_ds(&tsx_source, name);
if let Some(out_path) = output {
fs::write(out_path, &ds_output).expect("Failed to write output file");
println!(" ✅ Converted to {}", out_path.display());
} else {
println!("{}", ds_output);
}
}
/// Best-effort TSX → DreamStack converter.
/// Pattern-matches common React/shadcn idioms rather than full TypeScript parsing.
fn convert_tsx_to_ds(tsx: &str, file_hint: &str) -> String {
let mut out = String::new();
// Extract component name
let comp_name = extract_component_name(tsx)
.unwrap_or_else(|| {
// Derive from filename
let base = Path::new(file_hint)
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("Component");
let mut chars = base.chars();
match chars.next() {
Some(c) => format!("{}{}", c.to_uppercase().collect::<String>(), chars.collect::<String>()),
None => "Component".to_string(),
}
});
// Extract props
let props = extract_props(tsx);
// Header comment
out.push_str(&format!("-- Converted from {}\n", file_hint));
out.push_str("-- Auto-generated by dreamstack convert\n\n");
// Extract useState hooks → let declarations
let state_vars = extract_use_state(tsx);
for (name, default) in &state_vars {
out.push_str(&format!("let {} = {}\n", name, default));
}
if !state_vars.is_empty() {
out.push('\n');
}
// Extract cva variants
let variants = extract_cva_variants(tsx);
if !variants.is_empty() {
out.push_str("-- Variants:\n");
for (variant_name, values) in &variants {
out.push_str(&format!("-- {}: {}\n", variant_name, values.join(", ")));
}
out.push('\n');
}
// Extract JSX body
let jsx_body = extract_jsx_body(tsx);
// Build component declaration
let props_str = if props.is_empty() {
String::new()
} else {
format!("({})", props.join(", "))
};
out.push_str(&format!("export component {}{} =\n", comp_name, props_str));
if jsx_body.is_empty() {
out.push_str(" text \"TODO: convert JSX body\"\n");
} else {
out.push_str(&jsx_body);
}
out
}
/// Extract component name from React.forwardRef or function/const declaration
fn extract_component_name(tsx: &str) -> Option<String> {
// Pattern: `const Button = React.forwardRef`
for line in tsx.lines() {
let trimmed = line.trim();
if trimmed.contains("forwardRef") || trimmed.contains("React.forwardRef") {
if let Some(pos) = trimmed.find("const ") {
let rest = &trimmed[pos + 6..];
if let Some(eq_pos) = rest.find(" =") {
return Some(rest[..eq_pos].trim().to_string());
}
}
}
// Pattern: `export function Button(`
if trimmed.starts_with("export function ") || trimmed.starts_with("function ") {
let rest = trimmed.strip_prefix("export ").unwrap_or(trimmed);
let rest = rest.strip_prefix("function ").unwrap_or(rest);
if let Some(paren) = rest.find('(') {
let name = rest[..paren].trim();
if name.chars().next().map_or(false, |c| c.is_uppercase()) {
return Some(name.to_string());
}
}
}
}
None
}
/// Extract props from destructured function parameters
fn extract_props(tsx: &str) -> Vec<String> {
let mut props = Vec::new();
// Look for destructured props: `({ className, variant, size, ...props })`
if let Some(start) = tsx.find("({ ") {
if let Some(end) = tsx[start..].find(" })") {
let inner = &tsx[start + 3..start + end];
for part in inner.split(',') {
let part = part.trim();
if part.starts_with("...") { continue; } // skip rest props
if part.is_empty() { continue; }
// Handle defaults: `variant = "default"` → just `variant`
let name = part.split('=').next().unwrap_or(part).trim();
let name = name.split(':').next().unwrap_or(name).trim();
if !name.is_empty()
&& name != "className"
&& name != "ref"
&& name != "children"
&& !props.contains(&name.to_string())
{
props.push(name.to_string());
}
}
}
}
props
}
/// Extract useState hooks: `const [open, setOpen] = useState(false)` → ("open", "false")
fn extract_use_state(tsx: &str) -> Vec<(String, String)> {
let mut states = Vec::new();
for line in tsx.lines() {
let trimmed = line.trim();
if trimmed.contains("useState") {
// const [name, setName] = useState(default)
if let Some(bracket_start) = trimmed.find('[') {
if let Some(comma) = trimmed[bracket_start..].find(',') {
let name = trimmed[bracket_start + 1..bracket_start + comma].trim();
// Extract default value from useState(...)
if let Some(paren_start) = trimmed.find("useState(") {
let rest = &trimmed[paren_start + 9..];
if let Some(paren_end) = rest.find(')') {
let default = rest[..paren_end].trim();
let ds_default = convert_value(default);
states.push((name.to_string(), ds_default));
}
}
}
}
}
}
states
}
/// Extract cva variant definitions
fn extract_cva_variants(tsx: &str) -> Vec<(String, Vec<String>)> {
let mut variants = Vec::new();
let mut in_variants = false;
let mut current_variant = String::new();
let mut current_values = Vec::new();
for line in tsx.lines() {
let trimmed = line.trim();
if trimmed == "variants: {" {
in_variants = true;
continue;
}
if in_variants {
if trimmed == "}," || trimmed == "}" {
if !current_variant.is_empty() {
variants.push((current_variant.clone(), current_values.clone()));
current_variant.clear();
current_values.clear();
}
if trimmed == "}," {
// Could be end of a variant group or end of variants
if trimmed == "}," { continue; }
}
in_variants = false;
continue;
}
// Variant group: `variant: {`
if trimmed.ends_with(": {") || trimmed.ends_with(":{") {
if !current_variant.is_empty() {
variants.push((current_variant.clone(), current_values.clone()));
current_values.clear();
}
current_variant = trimmed.split(':').next().unwrap_or("").trim().to_string();
continue;
}
// Variant value: `default: "bg-primary text-primary-foreground",`
if trimmed.contains(":") && !current_variant.is_empty() {
let name = trimmed.split(':').next().unwrap_or("").trim().trim_matches('"');
if !name.is_empty() {
current_values.push(name.to_string());
}
}
}
}
if !current_variant.is_empty() {
variants.push((current_variant, current_values));
}
variants
}
/// Convert a JSX body to DreamStack view syntax (best-effort)
fn extract_jsx_body(tsx: &str) -> String {
let mut out = String::new();
let mut in_return = false;
let mut depth = 0;
for line in tsx.lines() {
let trimmed = line.trim();
// Find the return statement
if trimmed.starts_with("return (") || trimmed == "return (" {
in_return = true;
depth = 1;
continue;
}
if !in_return { continue; }
// Track parens
for c in trimmed.chars() {
match c {
'(' => depth += 1,
')' => {
depth -= 1;
if depth <= 0 {
in_return = false;
}
}
_ => {}
}
}
if !in_return && depth <= 0 { break; }
// Convert JSX elements
let converted = convert_jsx_line(trimmed);
if !converted.is_empty() {
out.push_str(" ");
out.push_str(&converted);
out.push('\n');
}
}
out
}
/// Convert a single JSX line to DreamStack syntax
fn convert_jsx_line(jsx: &str) -> String {
let trimmed = jsx.trim();
// Skip closing tags
if trimmed.starts_with("</") { return String::new(); }
// Skip fragments
if trimmed == "<>" || trimmed == "</>" { return String::new(); }
// Skip className-only attributes
if trimmed.starts_with("className=") { return String::new(); }
// Self-closing tag: `<Component prop="val" />`
if trimmed.starts_with('<') && trimmed.ends_with("/>") {
let inner = &trimmed[1..trimmed.len() - 2].trim();
let parts: Vec<&str> = inner.splitn(2, ' ').collect();
let tag = parts[0];
let ds_tag = convert_html_tag(tag);
if parts.len() > 1 {
let attrs = convert_jsx_attrs(parts[1]);
return format!("{} {{ {} }}", ds_tag, attrs);
}
return ds_tag;
}
// Opening tag: `<button ... >`
if trimmed.starts_with('<') && !trimmed.starts_with("</") {
let close = trimmed.find('>').unwrap_or(trimmed.len());
let inner = &trimmed[1..close].trim();
let parts: Vec<&str> = inner.splitn(2, ' ').collect();
let tag = parts[0];
let ds_tag = convert_html_tag(tag);
// Check for text content after >
if close < trimmed.len() - 1 {
let content = trimmed[close + 1..].trim();
let content = content.trim_end_matches(&format!("</{}>", tag));
if !content.is_empty() {
return format!("{} \"{}\"", ds_tag, content);
}
}
if parts.len() > 1 {
let attrs = convert_jsx_attrs(parts[1]);
return format!("{} {{ {} }}", ds_tag, attrs);
}
return ds_tag;
}
// JSX expression: `{children}`, `{title}`
if trimmed.starts_with('{') && trimmed.ends_with('}') {
let expr = &trimmed[1..trimmed.len() - 1].trim();
// Conditional: `{condition && <X/>}`
if expr.contains(" && ") {
let parts: Vec<&str> = expr.splitn(2, " && ").collect();
return format!("-- when {} -> ...", parts[0]);
}
return format!("text {}", expr);
}
// Plain text content
if !trimmed.is_empty() && !trimmed.starts_with("//") && !trimmed.starts_with("/*") {
return format!("-- {}", trimmed);
}
String::new()
}
/// Convert HTML tag to DreamStack element
fn convert_html_tag(tag: &str) -> String {
match tag {
"button" => "button".to_string(),
"input" => "input".to_string(),
"div" => "column [".to_string(),
"span" => "text".to_string(),
"p" => "text".to_string(),
"h1" | "h2" | "h3" | "h4" | "h5" | "h6" => "text".to_string(),
"img" => "image".to_string(),
"a" => "link".to_string(),
"label" => "label".to_string(),
"form" => "column [".to_string(),
"ul" | "ol" => "column [".to_string(),
"li" => "text".to_string(),
// Capitalized = component use
_ if tag.chars().next().map_or(false, |c| c.is_uppercase()) => {
format!("{}", tag)
}
_ => format!("-- unknown: <{}>", tag),
}
}
/// Convert JSX attributes to DreamStack props
fn convert_jsx_attrs(attrs: &str) -> String {
let mut props = Vec::new();
// Simple: key="value" or key={expr}
let mut remaining = attrs.trim().trim_end_matches('>').trim_end_matches('/').trim();
while !remaining.is_empty() {
// Skip className
if remaining.starts_with("className=") {
// Skip to next attr
if let Some(quote_end) = skip_attr_value(remaining) {
remaining = remaining[quote_end..].trim();
continue;
}
break;
}
// Skip ref
if remaining.starts_with("ref=") {
if let Some(quote_end) = skip_attr_value(remaining) {
remaining = remaining[quote_end..].trim();
continue;
}
break;
}
// Skip {...props}
if remaining.starts_with("{...") {
if let Some(end) = remaining.find('}') {
remaining = remaining[end + 1..].trim();
continue;
}
break;
}
// Parse key=value
if let Some(eq_pos) = remaining.find('=') {
let key = remaining[..eq_pos].trim();
let rest = remaining[eq_pos + 1..].trim();
let ds_key = convert_event_name(key);
if rest.starts_with('"') {
// String value
if let Some(end) = rest[1..].find('"') {
let val = &rest[1..1 + end];
props.push(format!("{}: \"{}\"", ds_key, val));
remaining = rest[end + 2..].trim();
} else {
break;
}
} else if rest.starts_with('{') {
// Expression value
if let Some(end) = find_matching_brace(rest) {
let expr = &rest[1..end].trim();
props.push(format!("{}: {}", ds_key, expr));
remaining = rest[end + 1..].trim();
} else {
break;
}
} else {
break;
}
} else {
break;
}
}
props.join(", ")
}
fn convert_event_name(name: &str) -> String {
match name {
"onClick" => "click".to_string(),
"onChange" => "change".to_string(),
"onSubmit" => "submit".to_string(),
"onKeyDown" => "keydown".to_string(),
"onFocus" => "focus".to_string(),
"onBlur" => "blur".to_string(),
"disabled" => "disabled".to_string(),
"placeholder" => "placeholder".to_string(),
"type" => "type".to_string(),
"value" => "value".to_string(),
"href" => "href".to_string(),
"src" => "src".to_string(),
"alt" => "alt".to_string(),
_ => name.to_string(),
}
}
fn convert_value(val: &str) -> String {
match val {
"true" => "true".to_string(),
"false" => "false".to_string(),
"null" | "undefined" => "0".to_string(),
s if s.starts_with('"') => s.to_string(),
s if s.starts_with('\'') => format!("\"{}\"", &s[1..s.len()-1]),
s => s.to_string(),
}
}
fn skip_attr_value(s: &str) -> Option<usize> {
let eq = s.find('=')?;
let rest = &s[eq + 1..];
if rest.starts_with('"') {
let end = rest[1..].find('"')?;
Some(eq + 1 + end + 2)
} else if rest.starts_with('{') {
let end = find_matching_brace(rest)?;
Some(eq + 1 + end + 1)
} else {
Some(eq + 1)
}
}
fn find_matching_brace(s: &str) -> Option<usize> {
let mut depth = 0;
for (i, c) in s.chars().enumerate() {
match c {
'{' => depth += 1,
'}' => {
depth -= 1;
if depth == 0 { return Some(i); }
}
_ => {}
}
}
None
}
/// Simple blocking HTTP fetch (no async runtime needed)
fn fetch_url_blocking(url: &str) -> Result<String, String> {
// Use std::process to call curl
let output = std::process::Command::new("curl")
.args(["-sL", "--fail", url])
.output()
.map_err(|e| format!("Failed to run curl: {e}"))?;
if !output.status.success() {
return Err(format!("HTTP request failed (status {})", output.status));
}
String::from_utf8(output.stdout)
.map_err(|e| format!("Invalid UTF-8 in response: {e}"))
}

View file

@ -0,0 +1,251 @@
/// Dev server command — file watching with hot reload.
use std::fs;
use std::path::Path;
use std::sync::{Arc, Mutex, atomic::{AtomicU64, Ordering}};
use std::time::{Duration, Instant};
use super::build::compile;
/// HMR client script injected into every page served by `dreamstack dev`.
/// Uses Server-Sent Events to receive reload notifications from the dev server.
const HMR_CLIENT_SCRIPT: &str = r#"
<script>
// ── DreamStack HMR (poll-based) ─────────────
(function() {
let currentVersion = null;
let polling = false;
async function poll() {
if (polling) return;
polling = true;
try {
const res = await fetch('/__hmr');
const version = await res.text();
if (currentVersion === null) {
currentVersion = version;
console.log('[DS HMR] 🟢 watching (v' + version + ')');
} else if (version !== currentVersion) {
console.log('[DS HMR] 🔄 change detected (v' + currentVersion + ' v' + version + '), reloading...');
location.reload();
return;
}
} catch(e) {
// server down — retry silently
}
polling = false;
}
setInterval(poll, 500);
poll();
})();
</script>
"#;
pub fn inject_hmr(html: &str) -> String {
// Inject the HMR script just before </body>
if let Some(pos) = html.rfind("</body>") {
format!("{}{}{}", &html[..pos], HMR_CLIENT_SCRIPT, &html[pos..])
} else {
// No </body> tag — just append
format!("{html}{HMR_CLIENT_SCRIPT}")
}
}
pub fn cmd_dev(file: &Path, port: u16) {
use notify::{Watcher, RecursiveMode};
use std::sync::mpsc;
use std::thread;
println!("🚀 DreamStack dev server");
println!(" watching: {}", file.display());
println!(" serving: http://localhost:{port}");
println!();
// Shared state: compiled HTML + version counter
let version = Arc::new(AtomicU64::new(1));
let compiled_html = Arc::new(Mutex::new(String::new()));
// Initial compile
let source = match fs::read_to_string(file) {
Ok(s) => s,
Err(e) => {
eprintln!("❌ Could not read {}: {}", file.display(), e);
std::process::exit(1);
}
};
let start = Instant::now();
let base_dir = file.parent().unwrap_or(Path::new("."));
match compile(&source, base_dir, false) {
Ok(html) => {
let ms = start.elapsed().as_millis();
let html_with_hmr = inject_hmr(&html);
*compiled_html.lock().unwrap() = html_with_hmr;
println!("✅ Compiled in {ms}ms ({} bytes)", html.len());
}
Err(e) => {
eprintln!("⚠️ Compile error: {e}");
let error_html = format!(
r#"<!DOCTYPE html>
<html><head><meta charset="UTF-8"><style>
body {{ background: #0a0a0f; color: #ef4444; font-family: 'JetBrains Mono', monospace; padding: 40px; }}
pre {{ white-space: pre-wrap; line-height: 1.7; }}
h2 {{ color: #f87171; margin-bottom: 16px; }}
</style></head><body>
<h2> COMPILE ERROR </h2>
<pre>{e}</pre>
</body></html>"#
);
*compiled_html.lock().unwrap() = inject_hmr(&error_html);
}
}
// ── File Watcher Thread ─────────────────────────
let file_path = fs::canonicalize(file).unwrap_or_else(|_| file.to_path_buf());
let watch_dir = file_path.parent().unwrap().to_path_buf();
let watch_file = file_path.clone();
let v_watcher = Arc::clone(&version);
let html_watcher = Arc::clone(&compiled_html);
thread::spawn(move || {
let (tx, rx) = mpsc::channel();
let mut watcher = notify::recommended_watcher(move |res: Result<notify::Event, notify::Error>| {
if let Ok(event) = res {
let _ = tx.send(event);
}
}).expect("Failed to create file watcher");
watcher.watch(&watch_dir, RecursiveMode::Recursive)
.expect("Failed to watch directory");
// Also watch project root (for registry/components etc.)
// Walk up from watch_dir to find a directory containing examples/ or registry/
let mut project_root = watch_dir.clone();
for _ in 0..5 {
if project_root.join("registry").is_dir() || project_root.join("examples").is_dir() {
if project_root != watch_dir {
let _ = watcher.watch(&project_root, RecursiveMode::Recursive);
println!("👁 Also watching {} (project root)", project_root.display());
}
break;
}
if let Some(parent) = project_root.parent() {
project_root = parent.to_path_buf();
} else {
break;
}
}
println!("👁 Watching {} for changes (recursive)", watch_dir.display());
println!();
// Debounce: coalesce rapid events
let mut last_compile = Instant::now();
loop {
match rx.recv_timeout(Duration::from_millis(100)) {
Ok(event) => {
// Only recompile for .ds file changes
let dominated = event.paths.iter().any(|p| {
p == &watch_file ||
p.extension().map_or(false, |ext| ext == "ds")
});
if !dominated { continue; }
// Debounce: skip if less than 100ms since last compile
if last_compile.elapsed() < Duration::from_millis(100) {
continue;
}
// Recompile
if let Ok(src) = fs::read_to_string(&watch_file) {
let start = Instant::now();
match compile(&src, watch_file.parent().unwrap_or(Path::new(".")), false) {
Ok(html) => {
let ms = start.elapsed().as_millis();
let new_version = v_watcher.fetch_add(1, Ordering::SeqCst) + 1;
*html_watcher.lock().unwrap() = inject_hmr(&html);
println!("🔄 Recompiled in {ms}ms (v{new_version}, {} bytes)", html.len());
last_compile = Instant::now();
}
Err(e) => {
let new_version = v_watcher.fetch_add(1, Ordering::SeqCst) + 1;
let error_html = format!(
r#"<!DOCTYPE html>
<html><head><meta charset="UTF-8"><style>
body {{ background: #0a0a0f; color: #ef4444; font-family: 'JetBrains Mono', monospace; padding: 40px; }}
pre {{ white-space: pre-wrap; line-height: 1.7; }}
h2 {{ color: #f87171; margin-bottom: 16px; }}
</style></head><body>
<h2> COMPILE ERROR </h2>
<pre>{e}</pre>
</body></html>"#
);
*html_watcher.lock().unwrap() = inject_hmr(&error_html);
eprintln!("❌ v{new_version}: {e}");
last_compile = Instant::now();
}
}
}
}
Err(mpsc::RecvTimeoutError::Timeout) => {
// No events — loop and check again
continue;
}
Err(mpsc::RecvTimeoutError::Disconnected) => break,
}
}
});
// ── HTTP Server ─────────────────────────────────
let server = tiny_http::Server::http(format!("0.0.0.0:{port}")).unwrap();
println!("✅ Server running at http://localhost:{port}");
println!(" Press Ctrl+C to stop");
println!();
// Auto-open browser
let url = format!("http://localhost:{port}");
#[cfg(target_os = "linux")]
{ let _ = std::process::Command::new("xdg-open").arg(&url).spawn(); }
#[cfg(target_os = "macos")]
{ let _ = std::process::Command::new("open").arg(&url).spawn(); }
#[cfg(target_os = "windows")]
{ let _ = std::process::Command::new("cmd").args(["/C", "start", &url]).spawn(); }
for request in server.incoming_requests() {
let url = request.url().to_string();
if url == "/__hmr" {
// Version endpoint for HMR polling
let v = version.load(Ordering::SeqCst);
let response = tiny_http::Response::from_string(format!("{v}"))
.with_header(
tiny_http::Header::from_bytes(
&b"Content-Type"[..],
&b"text/plain"[..],
).unwrap(),
)
.with_header(
tiny_http::Header::from_bytes(
&b"Cache-Control"[..],
&b"no-cache, no-store"[..],
).unwrap(),
);
let _ = request.respond(response);
} else {
// Serve the compiled HTML
let html = compiled_html.lock().unwrap().clone();
let response = tiny_http::Response::from_string(&html)
.with_header(
tiny_http::Header::from_bytes(
&b"Content-Type"[..],
&b"text/html; charset=utf-8"[..],
).unwrap(),
);
let _ = request.respond(response);
}
}
}

View file

@ -0,0 +1,128 @@
/// Init command — initialize a new DreamStack project.
use std::fs;
use std::path::PathBuf;
use super::add::get_registry_source;
pub fn cmd_init(name: Option<String>) {
let project_dir = match &name {
Some(n) => PathBuf::from(n),
None => std::env::current_dir().expect("Failed to get current directory"),
};
if name.is_some() {
fs::create_dir_all(&project_dir).expect("Failed to create project directory");
}
let components_dir = project_dir.join("components");
fs::create_dir_all(&components_dir).expect("Failed to create components/ directory");
// Write starter app.ds
let app_source = r#"-- My DreamStack App
-- Showcases: imports, components, when/else, match, each, dynamic lists
import { Card } from "./components/card"
import { Badge } from "./components/badge"
import { Button } from "./components/button"
let count = 0
let name = ""
let darkMode = false
let mood = "happy"
let todos = ["Learn DreamStack", "Build something cool"]
let newTodo = ""
view main = column [
-- Header
text "🚀 My DreamStack App" { variant: "title" }
text "Built with DreamStack — edit app.ds and reload" { variant: "subtitle" }
-- Dashboard cards
row [
Card { title: "Counter", subtitle: "reactive state" } [
text "Count: {count}" { variant: "title" }
row [
Button { label: "+1", onClick: count += 1, variant: "primary" }
Button { label: "-1", onClick: count -= 1, variant: "secondary" }
Button { label: "Reset", onClick: count = 0, variant: "ghost" }
]
]
Card { title: "Greeting", subtitle: "two-way binding" } [
input { bind: name, placeholder: "Your name..." }
when name -> text "Hello, {name}! 👋"
else -> text "Type your name above"
]
]
-- Mood selector with match
Card { title: "Mood", subtitle: "match expressions" } [
row [
button "😊 Happy" { click: mood = "happy", variant: "primary" }
button "😢 Sad" { click: mood = "sad", variant: "secondary" }
button "🔥 Fired up" { click: mood = "fired", variant: "ghost" }
]
match mood
"happy" -> Badge { label: "FEELING GREAT 🌟", variant: "success" }
"sad" -> Badge { label: "HANG IN THERE 💙", variant: "info" }
"fired" -> Badge { label: "LET'S GO 🔥", variant: "warning" }
_ -> Badge { label: "HOW ARE YOU?", variant: "info" }
]
-- Todo list with dynamic arrays
Card { title: "Todos", subtitle: "dynamic lists" } [
row [
input { bind: newTodo, placeholder: "New task..." }
button "Add" { click: todos.push(newTodo), variant: "primary" }
]
each todo in todos ->
row [
text "→ {todo}"
button "×" { click: todos.remove(_idx), variant: "ghost" }
]
button "Clear All" { click: todos = [], variant: "ghost" }
]
]
"#;
fs::write(project_dir.join("app.ds"), app_source).expect("Failed to write app.ds");
// Write dreamstack.json
let project_name = name.as_deref().unwrap_or("my-dreamstack-app");
let config = format!(r#"{{
"name": "{}",
"version": "0.1.0",
"entry": "app.ds"
}}
"#, project_name);
fs::write(project_dir.join("dreamstack.json"), config).expect("Failed to write dreamstack.json");
// Add starter components from registry
let starter_components = ["button", "card", "badge", "input"];
for comp_name in &starter_components {
if let Some(source) = get_registry_source(comp_name) {
let comp_path = components_dir.join(format!("{}.ds", comp_name));
fs::write(&comp_path, source).expect("Failed to write component");
}
}
let display_name = name.as_deref().unwrap_or(".");
println!("🚀 DreamStack project initialized in {}/\n", display_name);
println!(" Created:");
println!(" app.ds — your main application");
println!(" dreamstack.json — project config");
println!(" components/button.ds — button component");
println!(" components/card.ds — card component");
println!(" components/badge.ds — badge component");
println!(" components/input.ds — input component\n");
println!(" Next steps:");
if name.is_some() {
println!(" cd {}", display_name);
}
println!(" dreamstack build app.ds -o dist");
println!(" dreamstack dev app.ds");
println!(" dreamstack add --list # see all 11 components");
println!(" dreamstack add dialog # add with deps\n");
}

View file

@ -0,0 +1,10 @@
/// CLI command modules.
pub mod build;
pub mod dev;
pub mod check;
pub mod stream;
pub mod playground;
pub mod add;
pub mod init;
pub mod convert;

View file

@ -0,0 +1,552 @@
/// Playground command — Monaco editor with live preview.
use std::fs;
use std::path::Path;
use std::time::Instant;
/// The playground HTML page with Monaco editor + live preview.
const PLAYGROUND_HTML: &str = r##"<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>DreamStack Playground</title>
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&family=JetBrains+Mono:wght@400;500&display=swap" rel="stylesheet">
<style>
* { margin: 0; padding: 0; box-sizing: border-box; }
:root {
--bg: #0a0a12;
--surface: #12121e;
--surface-2: #1a1a2e;
--border: #2a2a3e;
--text: #e4e4ef;
--text-dim: #888899;
--accent: #818cf8;
--accent-glow: rgba(129,140,248,0.15);
--green: #34d399;
--red: #f87171;
--yellow: #fbbf24;
}
html, body { height: 100%; background: var(--bg); color: var(--text); font-family: 'Inter', sans-serif; overflow: hidden; }
/* Header */
.header {
height: 52px;
background: var(--surface);
border-bottom: 1px solid var(--border);
display: flex;
align-items: center;
padding: 0 20px;
gap: 16px;
z-index: 100;
}
.header .logo {
font-size: 16px;
font-weight: 700;
background: linear-gradient(135deg, var(--accent), #a78bfa);
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
letter-spacing: -0.5px;
}
.header .sep { width: 1px; height: 24px; background: var(--border); }
.header .status {
font-size: 12px;
font-family: 'JetBrains Mono', monospace;
color: var(--text-dim);
display: flex;
align-items: center;
gap: 8px;
}
.header .status .dot {
width: 7px; height: 7px;
border-radius: 50%;
background: var(--green);
box-shadow: 0 0 6px rgba(52,211,153,0.5);
transition: background 0.3s, box-shadow 0.3s;
}
.header .status .dot.error {
background: var(--red);
box-shadow: 0 0 6px rgba(248,113,113,0.5);
}
.header .status .dot.compiling {
background: var(--yellow);
box-shadow: 0 0 6px rgba(251,191,36,0.5);
animation: pulse 0.6s ease-in-out infinite;
}
@keyframes pulse { 0%, 100% { opacity: 1; } 50% { opacity: 0.4; } }
.header .actions { margin-left: auto; display: flex; gap: 8px; }
.header .btn {
padding: 6px 14px;
border-radius: 6px;
border: 1px solid var(--border);
background: var(--surface-2);
color: var(--text);
font-family: 'JetBrains Mono', monospace;
font-size: 11px;
cursor: pointer;
transition: all 0.15s;
}
.header .btn:hover { border-color: var(--accent); background: var(--accent-glow); }
.header .btn.primary { background: var(--accent); border-color: var(--accent); color: #fff; }
.header .btn.primary:hover { opacity: 0.9; }
/* Layout */
.container {
display: flex;
height: calc(100vh - 52px);
}
.editor-pane {
width: 50%;
min-width: 300px;
position: relative;
border-right: 1px solid var(--border);
}
.preview-pane {
flex: 1;
position: relative;
background: #fff;
}
#previewRoot {
width: 100%;
height: 100%;
overflow: auto;
}
/* Resize handle */
.resize-handle {
position: absolute;
right: -3px;
top: 0;
width: 6px;
height: 100%;
cursor: col-resize;
z-index: 10;
background: transparent;
transition: background 0.2s;
}
.resize-handle:hover, .resize-handle.active {
background: var(--accent);
}
/* Error panel */
.error-panel {
position: absolute;
bottom: 0;
left: 0;
right: 0;
max-height: 40%;
background: rgba(10,10,18,0.97);
border-top: 2px solid var(--red);
overflow-y: auto;
z-index: 20;
display: none;
font-family: 'JetBrains Mono', monospace;
font-size: 12px;
padding: 16px 20px;
color: var(--red);
line-height: 1.6;
white-space: pre-wrap;
}
.error-panel.visible { display: block; }
/* Monaco loader */
#editor { width: 100%; height: 100%; }
</style>
</head>
<body>
<div class="header">
<span class="logo">DreamStack</span>
<span class="sep"></span>
<div class="status">
<span class="dot" id="statusDot"></span>
<span id="statusText">Ready</span>
</div>
<div class="actions">
<button class="btn" onclick="formatCode()">Format</button>
<button class="btn primary" onclick="compileNow()">Compile </button>
</div>
</div>
<div class="container">
<div class="editor-pane" id="editorPane">
<div id="editor"></div>
<div class="resize-handle" id="resizeHandle"></div>
<div class="error-panel" id="errorPanel"></div>
</div>
<div class="preview-pane" id="previewPane">
<div id="previewRoot"></div>
</div>
</div>
<script src="https://cdn.jsdelivr.net/npm/monaco-editor@0.45.0/min/vs/loader.js"></script>
<script>
// ── State ──
let editor;
let compileTimer = null;
const DEBOUNCE_MS = 400;
// ── Monaco Setup ──
require.config({ paths: { vs: 'https://cdn.jsdelivr.net/npm/monaco-editor@0.45.0/min/vs' }});
require(['vs/editor/editor.main'], function () {
// Register DreamStack language
monaco.languages.register({ id: 'dreamstack' });
monaco.languages.setMonarchTokensProvider('dreamstack', {
keywords: ['let','view','when','match','on','effect','perform','handle','import','export',
'if','else','every','type','where','stream','from','layout','component','for','in'],
typeKeywords: ['Int','Float','String','Bool','Signal','Derived','Array','Stream','View'],
operators: ['=','>','<','>=','<=','==','!=','+','-','*','/','%','|>','->','!','&&','||'],
tokenizer: {
root: [
[/\/\/.*$/, 'comment'],
[/"([^"\\]|\\.)*"/, 'string'],
[/\d+\.\d+/, 'number.float'],
[/\d+/, 'number'],
[/[a-zA-Z_]\w*/, {
cases: {
'@keywords': 'keyword',
'@typeKeywords': 'type',
'@default': 'identifier'
}
}],
[/[{}()\[\]]/, 'delimiter.bracket'],
[/[,;:]/, 'delimiter'],
[/[=><!\+\-\*\/%|&]+/, 'operator'],
[/\s+/, 'white'],
]
}
});
// Define DreamStack theme
monaco.editor.defineTheme('dreamstack-dark', {
base: 'vs-dark',
inherit: true,
rules: [
{ token: 'keyword', foreground: '818cf8', fontStyle: 'bold' },
{ token: 'type', foreground: '34d399' },
{ token: 'string', foreground: 'fbbf24' },
{ token: 'number', foreground: 'f472b6' },
{ token: 'number.float', foreground: 'f472b6' },
{ token: 'comment', foreground: '555566', fontStyle: 'italic' },
{ token: 'operator', foreground: '94a3b8' },
{ token: 'delimiter', foreground: '64748b' },
{ token: 'identifier', foreground: 'e4e4ef' },
],
colors: {
'editor.background': '#0a0a12',
'editor.foreground': '#e4e4ef',
'editor.lineHighlightBackground': '#1a1a2e',
'editorLineNumber.foreground': '#3a3a4e',
'editorLineNumber.activeForeground': '#818cf8',
'editor.selectionBackground': '#818cf833',
'editorCursor.foreground': '#818cf8',
'editorIndentGuide.background': '#1e1e30',
}
});
// Create editor
editor = monaco.editor.create(document.getElementById('editor'), {
value: INITIAL_SOURCE,
language: 'dreamstack',
theme: 'dreamstack-dark',
fontFamily: "'JetBrains Mono', monospace",
fontSize: 14,
lineHeight: 24,
padding: { top: 16 },
minimap: { enabled: false },
scrollBeyondLastLine: false,
renderLineHighlight: 'all',
cursorBlinking: 'smooth',
cursorSmoothCaretAnimation: 'on',
smoothScrolling: true,
tabSize: 2,
wordWrap: 'on',
automaticLayout: true,
});
// Auto-compile on change
editor.onDidChangeModelContent(() => {
clearTimeout(compileTimer);
compileTimer = setTimeout(compileNow, DEBOUNCE_MS);
});
// Keyboard shortcut: Cmd/Ctrl + Enter
editor.addCommand(monaco.KeyMod.CtrlCmd | monaco.KeyCode.Enter, compileNow);
// Initial compile
compileNow();
});
// ── Compile ──
let compiling = false;
async function compileNow() {
if (compiling || !editor) return;
compiling = true;
const dot = document.getElementById('statusDot');
const text = document.getElementById('statusText');
const errorPanel = document.getElementById('errorPanel');
dot.className = 'dot compiling';
text.textContent = 'Compiling...';
const source = editor.getValue();
const start = performance.now();
try {
const res = await fetch('/compile', {
method: 'POST',
headers: { 'Content-Type': 'text/plain' },
body: source,
});
const ms = (performance.now() - start).toFixed(0);
const data = await res.json();
if (data.type === 'full') {
const root = document.getElementById('previewRoot');
// Parse HTML and extract body + scripts
const parser = new DOMParser();
const doc = parser.parseFromString(data.html, 'text/html');
// Attach or reuse shadow root
if (!root.shadowRoot) root.attachShadow({ mode: 'open' });
const shadow = root.shadowRoot;
shadow.innerHTML = '';
// Copy styles into shadow
doc.querySelectorAll('style').forEach(s => shadow.appendChild(s.cloneNode(true)));
// Copy body content into shadow
const wrapper = document.createElement('div');
wrapper.innerHTML = doc.body.innerHTML;
shadow.appendChild(wrapper);
// Execute scripts in main window context (where DS signals live)
doc.querySelectorAll('script').forEach(s => {
try { new Function(s.textContent)(); } catch(e) { console.warn('Script error:', e); }
});
dot.className = 'dot';
text.textContent = `Full compile ${ms}ms`;
errorPanel.classList.remove('visible');
} else if (data.type === 'patch') {
if (data.js && data.js.length > 0) {
try {
new Function(data.js)();
} catch (e) { console.warn('Patch eval error:', e); }
}
dot.className = 'dot';
text.textContent = `Patched ${ms}ms `;
errorPanel.classList.remove('visible');
} else if (data.type === 'error') {
dot.className = 'dot error';
text.textContent = `Error (${ms}ms)`;
errorPanel.textContent = data.message;
errorPanel.classList.add('visible');
}
} catch (e) {
dot.className = 'dot error';
text.textContent = 'Network error';
errorPanel.textContent = e.message;
errorPanel.classList.add('visible');
}
compiling = false;
}
function formatCode() {
if (editor) editor.getAction('editor.action.formatDocument')?.run();
}
// ── Resize Handle ──
const handle = document.getElementById('resizeHandle');
const editorPane = document.getElementById('editorPane');
let resizing = false;
handle.addEventListener('mousedown', (e) => {
resizing = true;
handle.classList.add('active');
document.body.style.cursor = 'col-resize';
document.body.style.userSelect = 'none';
e.preventDefault();
});
window.addEventListener('mousemove', (e) => {
if (!resizing) return;
const pct = (e.clientX / window.innerWidth) * 100;
const clamped = Math.max(25, Math.min(75, pct));
editorPane.style.width = clamped + '%';
});
window.addEventListener('mouseup', () => {
resizing = false;
handle.classList.remove('active');
document.body.style.cursor = '';
document.body.style.userSelect = '';
});
</script>
</body>
</html>
"##;
pub fn cmd_playground(file: Option<&Path>, port: u16) {
println!("🎮 DreamStack Playground");
println!(" http://localhost:{port}");
println!();
// Load initial source from file or use default
let initial_source = if let Some(path) = file {
match fs::read_to_string(path) {
Ok(s) => {
println!(" loaded: {}", path.display());
s
}
Err(e) => {
eprintln!("⚠️ Could not read {}: {e}", path.display());
default_playground_source()
}
}
} else {
default_playground_source()
};
// Build the playground HTML with the initial source injected
let escaped_source = initial_source
.replace('\\', "\\\\")
.replace('`', "\\`")
.replace("${", "\\${");
let playground_html = PLAYGROUND_HTML.replace(
"INITIAL_SOURCE",
&format!("String.raw`{}`", escaped_source),
);
let server = tiny_http::Server::http(format!("0.0.0.0:{port}")).unwrap();
println!("✅ Playground running at http://localhost:{port}");
println!(" Press Ctrl+C to stop");
println!();
let base_dir = file.and_then(|f| f.parent()).unwrap_or(Path::new("."));
let _base_dir = base_dir.to_path_buf();
let mut inc_compiler = ds_incremental::IncrementalCompiler::new();
for mut request in server.incoming_requests() {
let url = request.url().to_string();
if url == "/compile" && request.method() == &tiny_http::Method::Post {
// Read the body
let mut body = String::new();
let reader = request.as_reader();
match reader.read_to_string(&mut body) {
Ok(_) => {}
Err(e) => {
let resp = tiny_http::Response::from_string(format!("Read error: {e}"))
.with_status_code(400);
let _ = request.respond(resp);
continue;
}
}
let start = Instant::now();
match inc_compiler.compile(&body) {
ds_incremental::IncrementalResult::Full(html) => {
let ms = start.elapsed().as_millis();
println!(" ✅ full compile in {ms}ms ({} bytes)", html.len());
let json = format!(r#"{{"type":"full","html":{}}}"#, json_escape(&html));
let response = tiny_http::Response::from_string(&json)
.with_header(
tiny_http::Header::from_bytes(
&b"Content-Type"[..],
&b"application/json; charset=utf-8"[..],
).unwrap(),
)
.with_header(
tiny_http::Header::from_bytes(
&b"Access-Control-Allow-Origin"[..],
&b"*"[..],
).unwrap(),
);
let _ = request.respond(response);
}
ds_incremental::IncrementalResult::Patch(js) => {
let ms = start.elapsed().as_millis();
if js.is_empty() {
println!(" ⚡ unchanged ({ms}ms)");
} else {
println!(" ⚡ incremental patch in {ms}ms ({} bytes)", js.len());
}
let json = format!(r#"{{"type":"patch","js":{}}}"#, json_escape(&js));
let response = tiny_http::Response::from_string(&json)
.with_header(
tiny_http::Header::from_bytes(
&b"Content-Type"[..],
&b"application/json; charset=utf-8"[..],
).unwrap(),
)
.with_header(
tiny_http::Header::from_bytes(
&b"Access-Control-Allow-Origin"[..],
&b"*"[..],
).unwrap(),
);
let _ = request.respond(response);
}
ds_incremental::IncrementalResult::Error(e) => {
println!(" ❌ compile error");
let json = format!(r#"{{"type":"error","message":{}}}"#, json_escape(&e));
let response = tiny_http::Response::from_string(&json)
.with_status_code(400)
.with_header(
tiny_http::Header::from_bytes(
&b"Content-Type"[..],
&b"application/json; charset=utf-8"[..],
).unwrap(),
);
let _ = request.respond(response);
}
}
} else {
// Serve the playground HTML
let response = tiny_http::Response::from_string(&playground_html)
.with_header(
tiny_http::Header::from_bytes(
&b"Content-Type"[..],
&b"text/html; charset=utf-8"[..],
).unwrap(),
);
let _ = request.respond(response);
}
}
}
fn default_playground_source() -> String {
r#"let count = 0
let label = "Hello, DreamStack!"
on click -> count = count + 1
view main = column [
text label
text count
button "Increment" { click: count += 1 }
]
"#.to_string()
}
/// Escape a string for embedding in JSON.
pub fn json_escape(s: &str) -> String {
let mut out = String::with_capacity(s.len() + 2);
out.push('"');
for c in s.chars() {
match c {
'"' => out.push_str("\\\""),
'\\' => out.push_str("\\\\"),
'\n' => out.push_str("\\n"),
'\r' => out.push_str("\\r"),
'\t' => out.push_str("\\t"),
c if c < '\x20' => out.push_str(&format!("\\u{:04x}", c as u32)),
c => out.push(c),
}
}
out.push('"');
out
}

View file

@ -0,0 +1,77 @@
/// Stream command — compile and stream via bitstream relay.
use std::fs;
use std::path::Path;
use super::build::compile;
use super::dev::inject_hmr;
pub fn cmd_stream(file: &Path, relay: &str, mode: &str, port: u16) {
println!("⚡ DreamStack stream");
println!(" source: {}", file.display());
println!(" relay: {}", relay);
println!(" mode: {}", mode);
println!(" port: {}", port);
println!();
let source = match fs::read_to_string(file) {
Ok(s) => s,
Err(e) => {
eprintln!("❌ Could not read {}: {}", file.display(), e);
std::process::exit(1);
}
};
// Inject stream declaration if not present
let stream_source = if source.contains("stream ") {
source
} else {
// Auto-inject a stream declaration for the first view
let view_name = {
let mut lexer = ds_parser::Lexer::new(&source);
let tokens = lexer.tokenize();
let mut parser = ds_parser::Parser::new(tokens);
if let Ok(program) = parser.parse_program() {
program.declarations.iter()
.find_map(|d| if let ds_parser::ast::Declaration::View(v) = d { Some(v.name.clone()) } else { None })
.unwrap_or_else(|| "main".to_string())
} else {
"main".to_string()
}
};
format!(
"{}\nstream {} on \"{}\" {{ mode: {} }}",
source, view_name, relay, mode
)
};
match compile(&stream_source, file.parent().unwrap_or(Path::new(".")), false) {
Ok(html) => {
let html_with_hmr = inject_hmr(&html);
println!("✅ Compiled with streaming enabled");
println!(" Open: http://localhost:{port}");
println!(" Relay: {relay}");
println!();
println!(" Make sure the relay is running:");
println!(" cargo run -p ds-stream");
println!();
// Serve the compiled page
let server = tiny_http::Server::http(format!("0.0.0.0:{port}")).unwrap();
for request in server.incoming_requests() {
let response = tiny_http::Response::from_string(&html_with_hmr)
.with_header(
tiny_http::Header::from_bytes(
&b"Content-Type"[..],
&b"text/html; charset=utf-8"[..],
).unwrap(),
);
let _ = request.respond(response);
}
}
Err(e) => {
eprintln!("❌ Compile error: {e}");
std::process::exit(1);
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -823,12 +823,12 @@ mod tests {
let source = r#"
let count = 0
view main {
text { "Count: {count}" }
view main = column [
text "Count: {count}"
button "+" {
click: count += 1
}
}
]
"#;
let mut lexer = ds_parser::Lexer::new(source);
let tokens = lexer.tokenize();

View file

@ -163,7 +163,38 @@ impl JsEmitter {
}
}
// Phase 1b: Emit runtime refinement guards
// Phase 1b: Emit enum declarations as frozen JS objects
for decl in &program.declarations {
if let Declaration::Enum(enum_decl) = decl {
// Emit: const Status = Object.freeze({ Loading: "Loading", Ok: (data) => ({ _tag: "Ok", data }), ... })
let mut variant_entries = Vec::new();
for variant in &enum_decl.variants {
if variant.fields.is_empty() {
// Unit variant: Status.Loading === "Loading"
variant_entries.push(format!("{}: \"{}\"", variant.name, variant.name));
} else {
// Data variant: Status.Ok(data) => { _tag: "Ok", data }
let params: Vec<&str> = variant.fields.iter()
.map(|p| p.name.as_str())
.collect();
let fields: Vec<String> = params.iter()
.map(|p| format!("{p}"))
.collect();
variant_entries.push(format!(
"{}: ({}) => ({{ _tag: \"{}\", {} }})",
variant.name, params.join(", "), variant.name, fields.join(", ")
));
}
}
self.emit_line(&format!(
"const {} = Object.freeze({{ {} }});",
enum_decl.name,
variant_entries.join(", ")
));
}
}
// Phase 1c: Emit runtime refinement guards
// Collect type aliases from program
let mut type_aliases: std::collections::HashMap<String, &TypeExpr> = std::collections::HashMap::new();
for decl in &program.declarations {
@ -1421,6 +1452,9 @@ impl JsEmitter {
let args_js: Vec<String> = args.iter().map(|a| self.emit_expr(a)).collect();
format!("{}.{}({})", obj_js, method, args_js.join(", "))
}
Expr::RawString(text) => {
format!("\"{}\"", text.replace('\\', "\\\\").replace('"', "\\\"").replace('\n', "\\n"))
}
_ => "null".to_string(),
}
}
@ -4254,3 +4288,103 @@ fn tree_shake_runtime(runtime: &str, used_features: &HashSet<String>) -> String
result
}
#[cfg(test)]
mod tests {
use super::*;
/// Helper: parse source → emit HTML (not minified).
fn emit(source: &str) -> String {
let mut lexer = ds_parser::Lexer::new(source);
let tokens = lexer.tokenize();
let mut parser = ds_parser::Parser::new(tokens);
let program = parser.parse_program().unwrap();
let graph = ds_analyzer::SignalGraph::from_program(&program);
let views = ds_analyzer::SignalGraph::analyze_views(&program);
JsEmitter::emit_html(&program, &graph, &views, false)
}
#[test]
fn test_counter_signals() {
let html = emit("let count = 0\nview main = text \"hi\"");
assert!(html.contains("DS.signal(0)"), "should emit DS.signal(0)");
}
#[test]
fn test_derived_signal() {
let html = emit("let count = 0\nlet doubled = count * 2\nview main = text \"x\"");
assert!(html.contains("DS.derived("), "should emit DS.derived()");
}
#[test]
fn test_view_container() {
let html = emit("view main = column [\n text \"hello\"\n text \"world\"\n]");
assert!(html.contains("createElement"), "should create DOM elements");
}
#[test]
fn test_event_handler() {
let html = emit("let count = 0\nview main = button \"+\" { click: count += 1 }");
assert!(html.contains("addEventListener"), "should add event listener for click");
assert!(html.contains("click"), "should reference click event");
}
#[test]
fn test_for_in_loop() {
let html = emit("let items = [\"a\", \"b\"]\nview main = column [\n for item in items -> text item\n]");
assert!(html.contains("keyedList") || html.contains("forEach") || html.contains("for"),
"should emit list rendering code");
}
#[test]
fn test_when_conditional() {
let html = emit("let show = true\nview main = column [\n when show -> text \"visible\"\n]");
assert!(html.contains("DS.effect("), "should emit effect for conditional");
}
#[test]
fn test_match_expression() {
let html = emit("let state = \"loading\"\nview main = match state\n \"loading\" -> text \"Loading...\"\n \"done\" -> text \"Done!\"");
// Match should generate some kind of conditional/switch
assert!(html.contains("DS.effect(") || html.contains("match") || html.contains("==="),
"should emit match logic");
}
#[test]
fn test_enum_declaration() {
let html = emit("enum Color { Red, Green, Blue }\nview main = text \"hi\"");
assert!(html.contains("Object.freeze") || html.contains("Red"),
"should emit enum object");
}
#[test]
fn test_component_emission() {
let html = emit("component Card(title) = text title\nview main = Card { title: \"hello\" }");
// Component should be emitted as a function
assert!(html.contains("function") || html.contains("Card"),
"should emit component as function");
}
#[test]
fn test_string_interpolation() {
let html = emit("let name = \"World\"\nview main = text \"Hello, {name}!\"");
assert!(html.contains("DS.effect("), "interpolation should use reactive effect");
}
#[test]
fn test_spring_animation() {
let html = emit("let pos = spring(100, 300, 20)\nview main = text \"x\"");
assert!(html.contains("DS.spring(") || html.contains("spring"),
"should emit spring creation");
}
#[test]
fn test_tree_shaking_no_spring() {
let html = emit("let count = 0\nview main = text \"hi\"");
// If no spring is used, spring code should be excluded
assert!(!html.contains("class Spring") || !html.contains("_activeSprings"),
"should tree-shake unused spring runtime");
}
}

View file

@ -0,0 +1,7 @@
[package]
name = "ds-diagnostic"
version = "0.1.0"
edition.workspace = true
[dependencies]
ds-parser.workspace = true

View file

@ -0,0 +1,360 @@
/// DreamStack Diagnostic — unified error/warning type shared across compiler crates.
///
/// Provides Elm-style error rendering with carets, multi-span labels, and suggestions.
use ds_parser::Span;
// ── Core Types ──────────────────────────────────────────
/// A compiler diagnostic — error, warning, or hint.
#[derive(Debug, Clone)]
pub struct Diagnostic {
pub severity: Severity,
pub code: Option<String>,
pub message: String,
pub span: Span,
pub labels: Vec<Label>,
pub suggestion: Option<Suggestion>,
}
/// Severity level of a diagnostic.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum Severity {
Hint,
Warning,
Error,
}
/// A secondary label pointing at a span with a message.
#[derive(Debug, Clone)]
pub struct Label {
pub span: Span,
pub message: String,
}
/// A suggested fix attached to a diagnostic.
#[derive(Debug, Clone)]
pub struct Suggestion {
pub message: String,
pub replacement: String,
pub span: Span,
}
// ── Constructors ────────────────────────────────────────
impl Diagnostic {
/// Create an error diagnostic.
pub fn error(message: impl Into<String>, span: Span) -> Self {
Diagnostic {
severity: Severity::Error,
code: None,
message: message.into(),
span,
labels: Vec::new(),
suggestion: None,
}
}
/// Create a warning diagnostic.
pub fn warning(message: impl Into<String>, span: Span) -> Self {
Diagnostic {
severity: Severity::Warning,
code: None,
message: message.into(),
span,
labels: Vec::new(),
suggestion: None,
}
}
/// Create a hint diagnostic.
pub fn hint(message: impl Into<String>, span: Span) -> Self {
Diagnostic {
severity: Severity::Hint,
code: None,
message: message.into(),
span,
labels: Vec::new(),
suggestion: None,
}
}
/// Attach a diagnostic code (e.g. "E0001").
pub fn with_code(mut self, code: impl Into<String>) -> Self {
self.code = Some(code.into());
self
}
/// Add a secondary label.
pub fn with_label(mut self, span: Span, message: impl Into<String>) -> Self {
self.labels.push(Label { span, message: message.into() });
self
}
/// Add a suggestion.
pub fn with_suggestion(mut self, span: Span, message: impl Into<String>, replacement: impl Into<String>) -> Self {
self.suggestion = Some(Suggestion {
message: message.into(),
replacement: replacement.into(),
span,
});
self
}
}
// ── Rendering ───────────────────────────────────────────
impl Severity {
pub fn label(&self) -> &'static str {
match self {
Severity::Error => "ERROR",
Severity::Warning => "WARNING",
Severity::Hint => "HINT",
}
}
pub fn prefix(&self) -> &'static str {
match self {
Severity::Error => "error",
Severity::Warning => "warning",
Severity::Hint => "hint",
}
}
}
impl std::fmt::Display for Severity {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.label())
}
}
/// Render a diagnostic with source context, carets, and labels.
///
/// Produces Elm/Rust-style error output:
/// ```text
/// ── ERROR ──────────────────────────────────────────────
/// 5:12
///
/// 5 │ let count: Int = "hello"
/// │ ^^^^^^^ expected Int, found String
///
/// Hint: ...
/// ```
pub fn render(diag: &Diagnostic, source: &str) -> String {
let mut out = String::new();
let lines: Vec<&str> = source.lines().collect();
// Header
let title = match &diag.code {
Some(code) => format!("{} [{}]", diag.severity.label(), code),
None => diag.severity.label().to_string(),
};
let rule_width = 60usize.saturating_sub(title.len() + 4);
out.push_str(&format!("── {} {}\n", title, "".repeat(rule_width)));
// Primary span
let line = diag.span.line as usize;
let col = diag.span.col as usize;
out.push_str(&format!("{}:{}\n", line, col));
// Source context
if line > 0 && line <= lines.len() {
let src_line = lines[line - 1];
let line_num = format!("{}", line);
let pad = " ".repeat(line_num.len());
out.push('\n');
// Line before (context)
if line >= 2 && line - 1 <= lines.len() {
let prev = lines[line - 2];
if !prev.trim().is_empty() {
out.push_str(&format!(" {}{}\n", format!("{:>width$}", line - 1, width = line_num.len()), prev));
}
}
out.push_str(&format!(" {}{}\n", line_num, src_line));
// Caret line
let caret_start = if col > 0 { col - 1 } else { 0 };
let caret_len = if diag.span.end > diag.span.start {
(diag.span.end - diag.span.start).max(1)
} else {
1
};
out.push_str(&format!(" {}{}{}",
pad,
" ".repeat(caret_start),
"^".repeat(caret_len),
));
// Primary message on caret line
out.push_str(&format!(" {}\n", diag.message));
}
// Secondary labels
for label in &diag.labels {
let l = label.span.line as usize;
if l > 0 && l <= lines.len() {
let src = lines[l - 1];
let lnum = format!("{}", l);
let lpad = " ".repeat(lnum.len());
out.push('\n');
out.push_str(&format!(" {}{}\n", lnum, src));
let lc = if label.span.col > 0 { label.span.col as usize - 1 } else { 0 };
let ll = if label.span.end > label.span.start {
(label.span.end - label.span.start).max(1)
} else {
1
};
out.push_str(&format!(" {}{}{} {}\n", lpad, " ".repeat(lc), "-".repeat(ll), label.message));
}
}
// Suggestion
if let Some(ref sugg) = diag.suggestion {
out.push_str(&format!("\n Hint: {}\n", sugg.message));
if !sugg.replacement.is_empty() {
out.push_str(&format!(" Try: {}\n", sugg.replacement));
}
}
out
}
// ── Sorting ─────────────────────────────────────────────
/// Sort diagnostics by severity (errors first) then by span position.
pub fn sort_diagnostics(diags: &mut Vec<Diagnostic>) {
diags.sort_by(|a, b| {
b.severity.cmp(&a.severity)
.then_with(|| a.span.line.cmp(&b.span.line))
.then_with(|| a.span.col.cmp(&b.span.col))
});
}
// ── Conversions ─────────────────────────────────────────
use ds_parser::parser::ParseError;
/// Convert a `ParseError` into a `Diagnostic`.
impl From<ParseError> for Diagnostic {
fn from(err: ParseError) -> Self {
Diagnostic::error(
err.message.clone(),
Span {
start: 0,
end: 0,
line: err.line,
col: err.col,
},
)
.with_code("E0001")
}
}
/// Convert a slice of `ParseError`s into a `Vec<Diagnostic>`.
pub fn parse_errors_to_diagnostics(errors: &[ParseError]) -> Vec<Diagnostic> {
errors.iter().map(|e| Diagnostic::from(e.clone())).collect()
}
// ── Tests ───────────────────────────────────────────────
#[cfg(test)]
mod tests {
use super::*;
fn span(line: usize, col: usize, start: usize, end: usize) -> Span {
Span { start, end, line, col }
}
#[test]
fn test_error_rendering() {
let source = "let count = 0\nlet name: Int = \"hello\"\nview main = text \"hi\"";
let diag = Diagnostic::error("expected Int, found String", span(2, 17, 31, 38));
let output = render(&diag, source);
assert!(output.contains("ERROR"));
assert!(output.contains("2:17"));
assert!(output.contains("^^^^^^^"));
assert!(output.contains("expected Int, found String"));
}
#[test]
fn test_warning_rendering() {
let source = "let unused = 42\nview main = text \"hi\"";
let diag = Diagnostic::warning("signal `unused` is never read", span(1, 5, 4, 10));
let output = render(&diag, source);
assert!(output.contains("WARNING"));
assert!(output.contains("unused"));
}
#[test]
fn test_with_suggestion() {
let source = "let count = 0\nmatch status\n Loading -> text \"...\"\n";
let diag = Diagnostic::error("non-exhaustive match", span(2, 1, 14, 26))
.with_code("E0004")
.with_suggestion(span(2, 1, 14, 26), "Add missing variants", " _ -> text \"fallback\"");
let output = render(&diag, source);
assert!(output.contains("[E0004]"));
assert!(output.contains("Hint:"));
assert!(output.contains("Add missing variants"));
}
#[test]
fn test_sort_diagnostics() {
let mut diags = vec![
Diagnostic::warning("w1", span(3, 1, 30, 35)),
Diagnostic::error("e1", span(1, 1, 0, 5)),
Diagnostic::error("e2", span(5, 1, 50, 55)),
];
sort_diagnostics(&mut diags);
// Errors first, then by line
assert_eq!(diags[0].message, "e1");
assert_eq!(diags[1].message, "e2");
assert_eq!(diags[2].message, "w1");
}
#[test]
fn test_parse_error_to_diagnostic() {
let err = ParseError {
message: "unexpected token: Colon".to_string(),
line: 5,
col: 12,
source_line: Some("let x = foo(bar:".to_string()),
};
let diag = Diagnostic::from(err);
assert_eq!(diag.severity, Severity::Error);
assert!(diag.message.contains("unexpected token: Colon"));
assert_eq!(diag.span.line, 5);
assert_eq!(diag.span.col, 12);
assert_eq!(diag.code, Some("E0001".to_string()));
}
#[test]
fn test_parse_errors_to_diagnostics_batch() {
let errors = vec![
ParseError {
message: "error 1".to_string(),
line: 1,
col: 1,
source_line: None,
},
ParseError {
message: "error 2".to_string(),
line: 3,
col: 5,
source_line: None,
},
];
let diags = parse_errors_to_diagnostics(&errors);
assert_eq!(diags.len(), 2);
assert_eq!(diags[0].message, "error 1");
assert_eq!(diags[1].message, "error 2");
assert_eq!(diags[1].span.line, 3);
}
}

View file

@ -55,14 +55,27 @@ impl IncrementalCompiler {
let tokens = lexer.tokenize();
// Check for lexer errors
let mut errors = Vec::new();
for tok in &tokens {
if let TokenKind::Error(msg) = &tok.kind {
return Err(format!("Lexer error at line {}: {}", tok.line, msg));
errors.push(format!("Lexer error at line {}: {}", tok.line, msg));
}
}
if !errors.is_empty() {
return Err(errors.join("\n"));
}
let mut parser = Parser::new(tokens);
parser.parse_program().map_err(|e| e.to_string())
let mut parser = Parser::with_source(tokens, source);
let result = parser.parse_program_resilient();
if !result.errors.is_empty() {
let error_msgs: Vec<String> = result.errors.iter()
.map(|e| e.to_string())
.collect();
return Err(error_msgs.join("\n"));
}
Ok(result.program)
}
/// Full compilation pipeline.

View file

@ -7,6 +7,19 @@ pub struct Program {
pub declarations: Vec<Declaration>,
}
/// Visibility modifier for declarations.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Visibility {
/// Default — visible within the current module only
Private,
/// `pub` — exported from the module
Public,
}
impl Default for Visibility {
fn default() -> Self { Visibility::Private }
}
/// Top-level declarations.
#[derive(Debug, Clone)]
pub enum Declaration {
@ -38,6 +51,26 @@ pub enum Declaration {
TypeAlias(TypeAliasDecl),
/// `layout dashboard { sidebar.width == 250 }`
Layout(LayoutDecl),
/// `enum Status { Loading, Ok(data), Error(msg) }`
Enum(EnumDecl),
}
/// `enum Status { Loading, Ok(data), Error(msg) }`
#[derive(Debug, Clone)]
pub struct EnumDecl {
pub name: String,
pub variants: Vec<EnumVariant>,
pub visibility: Visibility,
pub doc: Option<String>,
pub span: Span,
}
/// A single variant of an enum: `Ok(value: T)` or `Loading`
#[derive(Debug, Clone)]
pub struct EnumVariant {
pub name: String,
pub fields: Vec<Param>,
pub span: Span,
}
/// `layout name { constraints }`
@ -107,6 +140,8 @@ pub struct LetDecl {
pub name: String,
pub type_annotation: Option<TypeExpr>,
pub value: Expr,
pub visibility: Visibility,
pub doc: Option<String>,
pub span: Span,
}
@ -117,6 +152,8 @@ pub struct ViewDecl {
pub name: String,
pub params: Vec<Param>,
pub body: Expr,
pub visibility: Visibility,
pub doc: Option<String>,
pub span: Span,
}
@ -126,6 +163,7 @@ pub struct EffectDecl {
pub name: String,
pub params: Vec<Param>,
pub return_type: TypeExpr,
pub doc: Option<String>,
pub span: Span,
}
@ -144,6 +182,8 @@ pub struct ComponentDecl {
pub name: String,
pub props: Vec<Param>,
pub body: Expr,
pub visibility: Visibility,
pub doc: Option<String>,
pub span: Span,
}
@ -229,6 +269,8 @@ pub enum TypeExpr {
pub struct TypeAliasDecl {
pub name: String,
pub definition: TypeExpr,
pub visibility: Visibility,
pub doc: Option<String>,
pub span: Span,
}
@ -312,6 +354,8 @@ pub enum Expr {
Await(Box<Expr>),
/// Merge streams: `merge(stream1, stream2, ...)`
Merge(Vec<Expr>),
/// Triple-quoted raw string: `"""content"""`
RawString(String),
}
/// String literal with interpolation segments.
@ -419,4 +463,17 @@ pub struct Span {
pub start: usize,
pub end: usize,
pub line: usize,
pub col: usize,
}
impl Span {
/// Create a span from a token's position.
pub fn from_token_start(line: usize, col: usize, byte_offset: usize) -> Self {
Span { start: byte_offset, end: byte_offset, line, col }
}
/// Extend this span to include another span's end position.
pub fn extend_to(&self, other: &Span) -> Self {
Span { start: self.start, end: other.end, line: self.line, col: self.col }
}
}

View file

@ -6,6 +6,7 @@ pub struct Token {
pub lexeme: String,
pub line: usize,
pub col: usize,
pub byte_offset: usize,
}
#[derive(Debug, Clone, PartialEq)]
@ -62,6 +63,9 @@ pub enum TokenKind {
Type,
Where,
Layout,
Enum,
Pub,
Await,
// Operators
Plus,
@ -101,6 +105,10 @@ pub enum TokenKind {
// Special
Comment(String),
/// `/// doc comment` — preserved in AST for documentation
DocComment(String),
/// Triple-quoted raw string: `"""..."""`
TripleStringFragment(String),
Eof,
Error(String),
}
@ -110,6 +118,7 @@ pub struct Lexer {
pos: usize,
line: usize,
col: usize,
byte_offset: usize,
in_string: bool,
interp_depth: usize,
}
@ -121,6 +130,7 @@ impl Lexer {
pos: 0,
line: 1,
col: 1,
byte_offset: 0,
in_string: false,
interp_depth: 0,
}
@ -160,6 +170,7 @@ impl Lexer {
fn advance(&mut self) -> char {
let c = self.peek();
self.pos += 1;
self.byte_offset += c.len_utf8();
if c == '\n' {
self.line += 1;
self.col = 1;
@ -175,6 +186,7 @@ impl Lexer {
lexeme: lexeme.to_string(),
line: self.line,
col: self.col,
byte_offset: self.byte_offset,
}
}
@ -201,61 +213,69 @@ impl Lexer {
let line = self.line;
let col = self.col;
let offset = self.byte_offset;
let c = self.peek();
let tok = match c {
'\n' => { self.advance(); Token { kind: TokenKind::Newline, lexeme: "\n".into(), line, col } }
'\n' => { self.advance(); Token { kind: TokenKind::Newline, lexeme: "\n".into(), line, col, byte_offset: offset } }
'-' if self.peek_next() == '-' => self.lex_comment(),
'-' if self.peek_next() == '>' => { self.advance(); self.advance(); Token { kind: TokenKind::Arrow, lexeme: "->".into(), line, col } }
'-' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::MinusEq, lexeme: "-=".into(), line, col } }
'+' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::PlusEq, lexeme: "+=".into(), line, col } }
'=' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::EqEq, lexeme: "==".into(), line, col } }
'!' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::Neq, lexeme: "!=".into(), line, col } }
'<' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::Lte, lexeme: "<=".into(), line, col } }
'>' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::Gte, lexeme: ">=".into(), line, col } }
'&' if self.peek_next() == '&' => { self.advance(); self.advance(); Token { kind: TokenKind::And, lexeme: "&&".into(), line, col } }
'|' if self.peek_next() == '|' => { self.advance(); self.advance(); Token { kind: TokenKind::Or, lexeme: "||".into(), line, col } }
'+' => { self.advance(); Token { kind: TokenKind::Plus, lexeme: "+".into(), line, col } }
'-' => { self.advance(); Token { kind: TokenKind::Minus, lexeme: "-".into(), line, col } }
'*' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::StarEq, lexeme: "*=".into(), line, col } }
'*' => { self.advance(); Token { kind: TokenKind::Star, lexeme: "*".into(), line, col } }
'-' if self.peek_next() == '>' => { self.advance(); self.advance(); Token { kind: TokenKind::Arrow, lexeme: "->".into(), line, col, byte_offset: offset } }
'-' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::MinusEq, lexeme: "-=".into(), line, col, byte_offset: offset } }
'+' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::PlusEq, lexeme: "+=".into(), line, col, byte_offset: offset } }
'=' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::EqEq, lexeme: "==".into(), line, col, byte_offset: offset } }
'!' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::Neq, lexeme: "!=".into(), line, col, byte_offset: offset } }
'<' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::Lte, lexeme: "<=".into(), line, col, byte_offset: offset } }
'>' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::Gte, lexeme: ">=".into(), line, col, byte_offset: offset } }
'&' if self.peek_next() == '&' => { self.advance(); self.advance(); Token { kind: TokenKind::And, lexeme: "&&".into(), line, col, byte_offset: offset } }
'|' if self.peek_next() == '|' => { self.advance(); self.advance(); Token { kind: TokenKind::Or, lexeme: "||".into(), line, col, byte_offset: offset } }
'+' => { self.advance(); Token { kind: TokenKind::Plus, lexeme: "+".into(), line, col, byte_offset: offset } }
'-' => { self.advance(); Token { kind: TokenKind::Minus, lexeme: "-".into(), line, col, byte_offset: offset } }
'*' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::StarEq, lexeme: "*=".into(), line, col, byte_offset: offset } }
'*' => { self.advance(); Token { kind: TokenKind::Star, lexeme: "*".into(), line, col, byte_offset: offset } }
'/' if self.peek_next() == '/' => self.lex_comment(),
'/' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::SlashEq, lexeme: "/=".into(), line, col } }
'/' => { self.advance(); Token { kind: TokenKind::Slash, lexeme: "/".into(), line, col } }
'%' => { self.advance(); Token { kind: TokenKind::Percent, lexeme: "%".into(), line, col } }
'=' => { self.advance(); Token { kind: TokenKind::Eq, lexeme: "=".into(), line, col } }
'<' => { self.advance(); Token { kind: TokenKind::Lt, lexeme: "<".into(), line, col } }
'>' => { self.advance(); Token { kind: TokenKind::Gt, lexeme: ">".into(), line, col } }
'!' => { self.advance(); Token { kind: TokenKind::Not, lexeme: "!".into(), line, col } }
'|' => { self.advance(); Token { kind: TokenKind::Pipe, lexeme: "|".into(), line, col } }
';' => { self.advance(); Token { kind: TokenKind::Semicolon, lexeme: ";".into(), line, col } }
'.' => { self.advance(); Token { kind: TokenKind::Dot, lexeme: ".".into(), line, col } }
'(' => { self.advance(); Token { kind: TokenKind::LParen, lexeme: "(".into(), line, col } }
')' => { self.advance(); Token { kind: TokenKind::RParen, lexeme: ")".into(), line, col } }
'[' => { self.advance(); Token { kind: TokenKind::LBracket, lexeme: "[".into(), line, col } }
']' => { self.advance(); Token { kind: TokenKind::RBracket, lexeme: "]".into(), line, col } }
'/' if self.peek_next() == '=' => { self.advance(); self.advance(); Token { kind: TokenKind::SlashEq, lexeme: "/=".into(), line, col, byte_offset: offset } }
'/' => { self.advance(); Token { kind: TokenKind::Slash, lexeme: "/".into(), line, col, byte_offset: offset } }
'%' => { self.advance(); Token { kind: TokenKind::Percent, lexeme: "%".into(), line, col, byte_offset: offset } }
'=' => { self.advance(); Token { kind: TokenKind::Eq, lexeme: "=".into(), line, col, byte_offset: offset } }
'<' => { self.advance(); Token { kind: TokenKind::Lt, lexeme: "<".into(), line, col, byte_offset: offset } }
'>' => { self.advance(); Token { kind: TokenKind::Gt, lexeme: ">".into(), line, col, byte_offset: offset } }
'!' => { self.advance(); Token { kind: TokenKind::Not, lexeme: "!".into(), line, col, byte_offset: offset } }
'|' => { self.advance(); Token { kind: TokenKind::Pipe, lexeme: "|".into(), line, col, byte_offset: offset } }
';' => { self.advance(); Token { kind: TokenKind::Semicolon, lexeme: ";".into(), line, col, byte_offset: offset } }
'.' => { self.advance(); Token { kind: TokenKind::Dot, lexeme: ".".into(), line, col, byte_offset: offset } }
'(' => { self.advance(); Token { kind: TokenKind::LParen, lexeme: "(".into(), line, col, byte_offset: offset } }
')' => { self.advance(); Token { kind: TokenKind::RParen, lexeme: ")".into(), line, col, byte_offset: offset } }
'[' => { self.advance(); Token { kind: TokenKind::LBracket, lexeme: "[".into(), line, col, byte_offset: offset } }
']' => { self.advance(); Token { kind: TokenKind::RBracket, lexeme: "]".into(), line, col, byte_offset: offset } }
'{' => {
self.advance();
if self.in_string {
self.interp_depth += 1;
}
Token { kind: TokenKind::LBrace, lexeme: "{".into(), line, col }
Token { kind: TokenKind::LBrace, lexeme: "{".into(), line, col, byte_offset: offset }
}
'}' => {
self.advance();
if self.interp_depth > 0 {
self.interp_depth -= 1;
}
Token { kind: TokenKind::RBrace, lexeme: "}".into(), line, col }
Token { kind: TokenKind::RBrace, lexeme: "}".into(), line, col, byte_offset: offset }
}
',' => { self.advance(); Token { kind: TokenKind::Comma, lexeme: ",".into(), line, col, byte_offset: offset } }
':' => { self.advance(); Token { kind: TokenKind::Colon, lexeme: ":".into(), line, col, byte_offset: offset } }
'"' => {
// Check for triple-quote
if self.pos + 2 < self.source.len() && self.source[self.pos + 1] == '"' && self.source[self.pos + 2] == '"' {
self.lex_triple_string()
} else {
self.lex_string_start()
}
}
',' => { self.advance(); Token { kind: TokenKind::Comma, lexeme: ",".into(), line, col } }
':' => { self.advance(); Token { kind: TokenKind::Colon, lexeme: ":".into(), line, col } }
'"' => self.lex_string_start(),
c if c.is_ascii_digit() => self.lex_number(),
c if c.is_ascii_alphabetic() || c == '_' => self.lex_ident_or_keyword(),
_ => {
self.advance();
Token { kind: TokenKind::Error(format!("unexpected character: {c}")), lexeme: c.to_string(), line, col }
Token { kind: TokenKind::Error(format!("unexpected character: {c}")), lexeme: c.to_string(), line, col, byte_offset: offset }
}
};
@ -265,18 +285,33 @@ impl Lexer {
fn lex_comment(&mut self) -> Token {
let line = self.line;
let col = self.col;
self.advance(); // -
self.advance(); // -
let offset = self.byte_offset;
let first = self.advance();
let second = self.advance();
// Check for doc comment: `/// text` (three slashes)
let is_doc = first == '/' && second == '/' && self.pos < self.source.len() && self.peek() == '/';
if is_doc {
self.advance(); // consume third /
}
let mut text = String::new();
while self.pos < self.source.len() && self.peek() != '\n' {
text.push(self.advance());
}
Token { kind: TokenKind::Comment(text.trim().to_string()), lexeme: format!("--{text}"), line, col }
let trimmed = text.trim().to_string();
if is_doc {
Token { kind: TokenKind::DocComment(trimmed), lexeme: format!("///{text}"), line, col, byte_offset: offset }
} else {
Token { kind: TokenKind::Comment(trimmed), lexeme: format!("{first}{second}{text}"), line, col, byte_offset: offset }
}
}
fn lex_number(&mut self) -> Token {
let line = self.line;
let col = self.col;
let offset = self.byte_offset;
let mut num = String::new();
let mut is_float = false;
@ -292,16 +327,17 @@ impl Lexer {
if is_float {
let val: f64 = num.parse().unwrap_or(0.0);
Token { kind: TokenKind::Float(val), lexeme: num, line, col }
Token { kind: TokenKind::Float(val), lexeme: num, line, col, byte_offset: offset }
} else {
let val: i64 = num.parse().unwrap_or(0);
Token { kind: TokenKind::Int(val), lexeme: num, line, col }
Token { kind: TokenKind::Int(val), lexeme: num, line, col, byte_offset: offset }
}
}
fn lex_ident_or_keyword(&mut self) -> Token {
let line = self.line;
let col = self.col;
let offset = self.byte_offset;
let mut ident = String::new();
while self.pos < self.source.len() && (self.peek().is_ascii_alphanumeric() || self.peek() == '_') {
@ -350,10 +386,13 @@ impl Lexer {
"type" => TokenKind::Type,
"where" => TokenKind::Where,
"layout" => TokenKind::Layout,
"enum" => TokenKind::Enum,
"pub" => TokenKind::Pub,
"await" => TokenKind::Await,
_ => TokenKind::Ident(ident.clone()),
};
Token { kind, lexeme: ident, line, col }
Token { kind, lexeme: ident, line, col, byte_offset: offset }
}
fn lex_string_start(&mut self) -> Token {
@ -372,6 +411,7 @@ impl Lexer {
}
fn lex_string_body(&mut self, line: usize, col: usize) -> Token {
let offset = self.byte_offset;
let mut text = String::new();
while self.pos < self.source.len() {
@ -381,23 +421,23 @@ impl Lexer {
self.advance();
self.in_string = false;
if text.is_empty() {
return Token { kind: TokenKind::StringEnd, lexeme: "\"".into(), line, col };
return Token { kind: TokenKind::StringEnd, lexeme: "\"".into(), line, col, byte_offset: offset };
}
// Return fragment first, next call will return StringEnd
// Actually let's simplify: return the full string as a single token
return Token { kind: TokenKind::StringFragment(text.clone()), lexeme: format!("{text}\""), line, col };
return Token { kind: TokenKind::StringFragment(text.clone()), lexeme: format!("{text}\""), line, col, byte_offset: offset };
}
'{' => {
if text.is_empty() {
// No text before { — emit StringInterp directly
self.advance();
self.interp_depth += 1;
return Token { kind: TokenKind::StringInterp, lexeme: "{".into(), line, col };
return Token { kind: TokenKind::StringInterp, lexeme: "{".into(), line, col, byte_offset: offset };
} else {
// Text before { — return the text fragment first.
// DON'T consume { — the next call to lex_string_body
// will see { at position 0 (empty text) and emit StringInterp.
return Token { kind: TokenKind::StringFragment(text.clone()), lexeme: text, line, col };
return Token { kind: TokenKind::StringFragment(text.clone()), lexeme: text, line, col, byte_offset: offset };
}
}
'\\' => {
@ -419,7 +459,34 @@ impl Lexer {
}
// Unterminated string
Token { kind: TokenKind::Error("unterminated string".into()), lexeme: text, line, col }
Token { kind: TokenKind::Error("unterminated string".into()), lexeme: text, line, col, byte_offset: offset }
}
/// Lex a triple-quoted raw string: `"""content with "quotes" and newlines"""`
fn lex_triple_string(&mut self) -> Token {
let line = self.line;
let col = self.col;
let offset = self.byte_offset;
// Consume opening """
self.advance(); // "
self.advance(); // "
self.advance(); // "
let mut text = String::new();
while self.pos < self.source.len() {
if self.peek() == '"'
&& self.pos + 1 < self.source.len() && self.source[self.pos + 1] == '"'
&& self.pos + 2 < self.source.len() && self.source[self.pos + 2] == '"'
{
self.advance(); // "
self.advance(); // "
self.advance(); // "
return Token { kind: TokenKind::TripleStringFragment(text), lexeme: String::new(), line, col, byte_offset: offset };
}
text.push(self.advance());
}
Token { kind: TokenKind::Error("unterminated triple-quoted string".into()), lexeme: text, line, col, byte_offset: offset }
}
}

View file

@ -4,4 +4,4 @@ pub mod parser;
pub use ast::*;
pub use lexer::{Lexer, Token, TokenKind};
pub use parser::Parser;
pub use parser::{Parser, ParseResult};

View file

@ -2,6 +2,13 @@
use crate::ast::*;
use crate::lexer::{Token, TokenKind};
/// Result of resilient parsing — contains both successfully parsed declarations and collected errors.
#[derive(Debug)]
pub struct ParseResult {
pub program: Program,
pub errors: Vec<ParseError>,
}
pub struct Parser {
tokens: Vec<Token>,
pos: usize,
@ -39,6 +46,61 @@ impl Parser {
Ok(Program { declarations })
}
/// Parse with error recovery — collects multiple errors instead of stopping at the first.
/// Returns a `ParseResult` with both the (possibly partial) program and all errors.
pub fn parse_program_resilient(&mut self) -> ParseResult {
let mut declarations = Vec::new();
let mut errors = Vec::new();
self.skip_newlines();
while !self.is_at_end() {
match self.parse_declaration() {
Ok(decl) => {
declarations.push(decl);
declarations.extend(self.pending_decls.drain(..));
}
Err(err) => {
errors.push(err);
self.synchronize();
}
}
self.skip_newlines();
}
ParseResult {
program: Program { declarations },
errors,
}
}
/// Skip tokens until we reach a declaration keyword (error recovery).
fn synchronize(&mut self) {
while !self.is_at_end() {
// If we're at a newline, check if the next non-newline token starts a declaration
if *self.peek() == TokenKind::Newline {
self.advance();
self.skip_newlines();
if self.is_at_declaration_start() || self.is_at_end() {
return;
}
continue;
}
self.advance();
}
}
/// Check if the current token starts a declaration.
fn is_at_declaration_start(&self) -> bool {
matches!(self.peek(),
TokenKind::Let | TokenKind::View | TokenKind::Component |
TokenKind::On | TokenKind::Effect | TokenKind::Type |
TokenKind::Route | TokenKind::Stream | TokenKind::Layout |
TokenKind::Constrain | TokenKind::Import | TokenKind::Export |
TokenKind::Every | TokenKind::Enum | TokenKind::Pub |
TokenKind::DocComment(_)
)
}
// ── Helpers ──────────────────────────────────────────
fn peek(&self) -> &TokenKind {
@ -163,32 +225,132 @@ impl Parser {
// ── Declarations ────────────────────────────────────
fn parse_declaration(&mut self) -> Result<Declaration, ParseError> {
match self.peek() {
TokenKind::Let => self.parse_let_decl(),
TokenKind::View => self.parse_view_decl(),
TokenKind::Effect => self.parse_effect_decl(),
TokenKind::On => self.parse_on_handler(),
TokenKind::Component => self.parse_component_decl(),
TokenKind::Route => self.parse_route_decl(),
TokenKind::Constrain => self.parse_constrain_decl(),
TokenKind::Stream => self.parse_stream_decl(),
TokenKind::Every => self.parse_every_decl(),
TokenKind::Import => self.parse_import_decl(),
TokenKind::Export => self.parse_export_decl(),
TokenKind::Type => self.parse_type_alias_decl(),
TokenKind::Layout => self.parse_layout_decl(),
// Collect doc comments before declaration
let mut doc = None;
while matches!(self.peek(), TokenKind::DocComment(_)) {
if let TokenKind::DocComment(text) = self.peek().clone() {
doc = Some(match doc {
Some(existing) => format!("{}\n{}", existing, text),
None => text,
});
}
self.advance();
self.skip_newlines();
}
let mut decl = match self.peek() {
TokenKind::Pub => self.parse_pub_decl()?,
TokenKind::Let => self.parse_let_decl()?,
TokenKind::View => self.parse_view_decl()?,
TokenKind::Effect => self.parse_effect_decl()?,
TokenKind::On => self.parse_on_handler()?,
TokenKind::Component => self.parse_component_decl()?,
TokenKind::Route => self.parse_route_decl()?,
TokenKind::Constrain => self.parse_constrain_decl()?,
TokenKind::Stream => self.parse_stream_decl()?,
TokenKind::Every => self.parse_every_decl()?,
TokenKind::Import => self.parse_import_decl()?,
TokenKind::Export => self.parse_export_decl()?,
TokenKind::Type => self.parse_type_alias_decl()?,
TokenKind::Layout => self.parse_layout_decl()?,
TokenKind::Enum => self.parse_enum_decl()?,
// Expression statement: `log("hello")`, `push(items, x)`
TokenKind::Ident(_) => {
let expr = self.parse_expr()?;
Ok(Declaration::ExprStatement(expr))
Declaration::ExprStatement(expr)
}
_ => Err(self.error(format!(
"expected declaration (let, view, effect, on, component, route, constrain, stream, every, type, layout), got {:?}",
_ => return Err(self.error(format!(
"expected declaration (let, view, effect, on, component, route, constrain, stream, every, type, layout, enum, pub), got {:?}",
self.peek()
))),
};
// Attach doc comment to the parsed declaration
if let Some(doc_text) = doc {
match &mut decl {
Declaration::Let(d) => d.doc = Some(doc_text),
Declaration::View(d) => d.doc = Some(doc_text),
Declaration::Component(d) => d.doc = Some(doc_text),
Declaration::Effect(d) => d.doc = Some(doc_text),
Declaration::TypeAlias(d) => d.doc = Some(doc_text),
Declaration::Enum(d) => d.doc = Some(doc_text),
_ => {} // doc comments on other decls are silently ignored
}
}
Ok(decl)
}
/// Parse `pub let ...`, `pub component ...`, `pub view ...`, `pub enum ...`
fn parse_pub_decl(&mut self) -> Result<Declaration, ParseError> {
self.advance(); // consume 'pub'
self.skip_newlines();
let mut decl = match self.peek() {
TokenKind::Let => self.parse_let_decl()?,
TokenKind::View => self.parse_view_decl()?,
TokenKind::Component => self.parse_component_decl()?,
TokenKind::Type => self.parse_type_alias_decl()?,
TokenKind::Enum => self.parse_enum_decl()?,
_ => return Err(self.error(format!(
"expected let, view, component, type, or enum after 'pub', got {:?}",
self.peek()
))),
};
// Set visibility to Public
match &mut decl {
Declaration::Let(d) => d.visibility = Visibility::Public,
Declaration::View(d) => d.visibility = Visibility::Public,
Declaration::Component(d) => d.visibility = Visibility::Public,
Declaration::TypeAlias(d) => d.visibility = Visibility::Public,
Declaration::Enum(d) => d.visibility = Visibility::Public,
_ => {}
}
Ok(decl)
}
/// Parse `enum Status { Loading, Ok(data), Error(msg) }`
fn parse_enum_decl(&mut self) -> Result<Declaration, ParseError> {
let line = self.current_token().line;
self.advance(); // consume 'enum'
let name = self.expect_ident()?;
self.expect(&TokenKind::LBrace)?;
self.skip_newlines();
let mut variants = Vec::new();
while !self.check(&TokenKind::RBrace) && !self.is_at_end() {
let var_line = self.current_token().line;
let var_name = self.expect_ident()?;
// Optional fields: `Ok(value: T)` or `Ok(value)`
let fields = if self.check(&TokenKind::LParen) {
self.parse_params()?
} else {
Vec::new()
};
variants.push(EnumVariant {
name: var_name,
fields,
span: Span { start: 0, end: 0, line: var_line, col: 0 },
});
self.skip_newlines();
if self.check(&TokenKind::Comma) {
self.advance();
}
self.skip_newlines();
}
self.expect(&TokenKind::RBrace)?;
Ok(Declaration::Enum(EnumDecl {
name,
variants,
visibility: Visibility::Private,
doc: None,
span: Span { start: 0, end: 0, line, col: 0 },
}))
}
fn parse_every_decl(&mut self) -> Result<Declaration, ParseError> {
let line = self.current_token().line;
self.advance(); // consume 'every'
@ -199,7 +361,7 @@ impl Parser {
Ok(Declaration::Every(EveryDecl {
interval_ms: interval,
body,
span: Span { start: 0, end: 0, line },
span: Span { start: 0, end: 0, line, col: 0 },
}))
}
@ -257,7 +419,7 @@ impl Parser {
Ok(Declaration::Import(ImportDecl {
names,
source,
span: Span { start: 0, end: 0, line },
span: Span { start: 0, end: 0, line, col: 0 },
}))
}
@ -309,14 +471,18 @@ impl Parser {
name: temp.clone(),
type_annotation: None,
value,
span: Span { start: 0, end: 0, line },
visibility: Visibility::Private,
doc: None,
span: Span { start: 0, end: 0, line, col: 0 },
})];
for field in &names {
all_decls.push(Declaration::Let(LetDecl {
name: field.clone(),
type_annotation: None,
value: Expr::DotAccess(Box::new(Expr::Ident(temp.clone())), field.clone()),
span: Span { start: 0, end: 0, line },
visibility: Visibility::Private,
doc: None,
span: Span { start: 0, end: 0, line, col: 0 },
}));
}
// Return first decl, queue rest for injection by parse_program
@ -349,7 +515,9 @@ impl Parser {
name: temp.clone(),
type_annotation: None,
value,
span: Span { start: 0, end: 0, line },
visibility: Visibility::Private,
doc: None,
span: Span { start: 0, end: 0, line, col: 0 },
})];
for (i, name) in names.iter().enumerate() {
all_decls.push(Declaration::Let(LetDecl {
@ -359,7 +527,9 @@ impl Parser {
Box::new(Expr::Ident(temp.clone())),
Box::new(Expr::IntLit(i as i64)),
),
span: Span { start: 0, end: 0, line },
visibility: Visibility::Private,
doc: None,
span: Span { start: 0, end: 0, line, col: 0 },
}));
}
self.pending_decls.extend(all_decls.drain(1..));
@ -383,7 +553,9 @@ impl Parser {
name,
type_annotation,
value,
span: Span { start: 0, end: 0, line },
visibility: Visibility::Private,
doc: None,
span: Span { start: 0, end: 0, line, col: 0 },
}))
}
@ -398,7 +570,9 @@ impl Parser {
Ok(Declaration::TypeAlias(TypeAliasDecl {
name,
definition,
span: Span { start: 0, end: 0, line },
visibility: Visibility::Private,
doc: None,
span: Span { start: 0, end: 0, line, col: 0 },
}))
}
@ -460,7 +634,7 @@ impl Parser {
Ok(Declaration::Layout(LayoutDecl {
name,
constraints,
span: Span { start: 0, end: 0, line },
span: Span { start: 0, end: 0, line, col: 0 },
}))
}
@ -580,7 +754,9 @@ impl Parser {
name,
params,
body,
span: Span { start: 0, end: 0, line },
visibility: Visibility::Private,
doc: None,
span: Span { start: 0, end: 0, line, col: 0 },
}))
}
@ -596,7 +772,8 @@ impl Parser {
name,
params,
return_type,
span: Span { start: 0, end: 0, line },
doc: None,
span: Span { start: 0, end: 0, line, col: 0 },
}))
}
@ -623,7 +800,7 @@ impl Parser {
event,
param,
body,
span: Span { start: 0, end: 0, line },
span: Span { start: 0, end: 0, line, col: 0 },
}))
}
@ -648,7 +825,9 @@ impl Parser {
name,
props,
body,
span: Span { start: 0, end: 0, line },
visibility: Visibility::Private,
doc: None,
span: Span { start: 0, end: 0, line, col: 0 },
}))
}
@ -682,7 +861,7 @@ impl Parser {
Ok(Declaration::Route(RouteDecl {
path,
body,
span: Span { start: 0, end: 0, line },
span: Span { start: 0, end: 0, line, col: 0 },
}))
}
@ -703,7 +882,7 @@ impl Parser {
element,
prop,
expr,
span: Span { start: 0, end: 0, line },
span: Span { start: 0, end: 0, line, col: 0 },
}))
}
@ -784,7 +963,7 @@ impl Parser {
mode,
transport,
output,
span: Span { start: 0, end: 0, line },
span: Span { start: 0, end: 0, line, col: 0 },
}))
}
@ -1049,6 +1228,16 @@ impl Parser {
TokenKind::StringFragment(_) | TokenKind::StringEnd | TokenKind::StringInterp => {
self.parse_string_lit()
}
TokenKind::TripleStringFragment(s) => {
let text = s.clone();
self.advance();
Ok(Expr::RawString(text))
}
TokenKind::Await => {
self.advance(); // consume 'await' keyword
let inner = self.parse_primary()?;
Ok(Expr::Await(Box::new(inner)))
}
// Containers
TokenKind::Column => self.parse_container(ContainerKind::Column),
@ -1348,7 +1537,7 @@ impl Parser {
return Ok(Expr::Slot);
}
// Await: `await fetchJSON("/api")`
// Await: `await fetchJSON("/api")` (string form, kept for back-compat)
if name == "await" {
let inner = self.parse_primary()?;
return Ok(Expr::Await(Box::new(inner)));
@ -1736,7 +1925,7 @@ fn is_declaration_keyword(name: &str) -> bool {
matches!(name, "let" | "view" | "effect" | "on" | "handle")
}
#[derive(Debug)]
#[derive(Debug, Clone)]
pub struct ParseError {
pub message: String,
pub line: usize,
@ -1957,4 +2146,104 @@ let b = stream from "ws://localhost:9101""#);
}
}
}
// ── v0.2/v0.3 Feature Tests ─────────────────────────
fn parse_resilient(src: &str) -> ParseResult {
let mut lexer = Lexer::new(src);
let tokens = lexer.tokenize();
let mut parser = Parser::new(tokens);
parser.parse_program_resilient()
}
#[test]
fn test_enum_decl() {
let prog = parse("enum Status { Loading, Ok, Error }");
match &prog.declarations[0] {
Declaration::Enum(e) => {
assert_eq!(e.name, "Status");
assert_eq!(e.variants.len(), 3);
assert_eq!(e.variants[0].name, "Loading");
assert_eq!(e.variants[1].name, "Ok");
assert_eq!(e.variants[2].name, "Error");
}
other => panic!("expected Enum, got {other:?}"),
}
}
#[test]
fn test_enum_with_fields() {
let prog = parse("enum Result { Ok(value), Error(msg) }");
match &prog.declarations[0] {
Declaration::Enum(e) => {
assert_eq!(e.name, "Result");
assert_eq!(e.variants.len(), 2);
assert_eq!(e.variants[0].name, "Ok");
assert_eq!(e.variants[0].fields.len(), 1);
assert_eq!(e.variants[0].fields[0].name, "value");
assert_eq!(e.variants[1].name, "Error");
assert_eq!(e.variants[1].fields.len(), 1);
}
other => panic!("expected Enum, got {other:?}"),
}
}
#[test]
fn test_pub_let() {
let prog = parse("pub let count = 0");
match &prog.declarations[0] {
Declaration::Let(d) => {
assert_eq!(d.name, "count");
assert!(matches!(d.visibility, Visibility::Public));
}
other => panic!("expected Let, got {other:?}"),
}
}
#[test]
fn test_doc_comment() {
let prog = parse("/// This is a counter\nlet count = 0");
match &prog.declarations[0] {
Declaration::Let(d) => {
assert_eq!(d.name, "count");
assert_eq!(d.doc.as_deref(), Some("This is a counter"));
}
other => panic!("expected Let, got {other:?}"),
}
}
#[test]
fn test_error_recovery_two_errors() {
// Two broken declarations with a valid one in between
let result = parse_resilient("!@#$\nlet count = 0\n!@#$\n");
assert!(!result.errors.is_empty(), "expected errors");
// The valid declaration should still be parsed
let has_count = result.program.declarations.iter().any(|d| {
matches!(d, Declaration::Let(d) if d.name == "count")
});
assert!(has_count, "expected 'count' declaration to be recovered");
}
#[test]
fn test_error_recovery_valid_after_error() {
// Garbage followed by a valid declaration
let result = parse_resilient("??? bad stuff\nlet count = 42");
assert!(!result.errors.is_empty(), "expected errors");
let has_count = result.program.declarations.iter().any(|d| {
matches!(d, Declaration::Let(d) if d.name == "count")
});
assert!(has_count, "expected 'count' declaration to be recovered");
}
#[test]
fn test_empty_program() {
let prog = parse("");
assert_eq!(prog.declarations.len(), 0);
}
#[test]
fn test_multiple_declarations() {
let prog = parse("let a = 1\nlet b = 2\nlet c = 3");
assert_eq!(prog.declarations.len(), 3);
}
}

View file

@ -5,3 +5,4 @@ edition = "2021"
[dependencies]
ds-parser = { path = "../ds-parser" }
ds-diagnostic = { path = "../ds-diagnostic" }

View file

@ -8,7 +8,7 @@
use std::collections::HashMap;
use ds_parser::{Program, Declaration, LetDecl, ViewDecl, Expr, BinOp, UnaryOp, TypeExpr, TypeAliasDecl};
use ds_parser::{Program, Declaration, LetDecl, ViewDecl, Expr, BinOp, UnaryOp, TypeExpr, TypeAliasDecl, Pattern};
use crate::types::{Type, TypeVar, EffectType, Predicate, PredicateExpr};
use crate::errors::{TypeError, TypeErrorKind};
@ -41,6 +41,8 @@ pub struct TypeChecker {
in_view: bool,
/// Type alias registry: name → resolved Type.
type_aliases: HashMap<String, Type>,
/// Enum variant registry: enum name → list of variant names.
enum_variants: HashMap<String, Vec<String>>,
}
impl TypeChecker {
@ -53,6 +55,7 @@ impl TypeChecker {
substitutions: HashMap::new(),
in_view: false,
type_aliases: HashMap::new(),
enum_variants: HashMap::new(),
}
}
@ -236,6 +239,45 @@ impl TypeChecker {
}
}
// Pass 0b: register enum declarations
for decl in &program.declarations {
if let Declaration::Enum(enum_decl) = decl {
// Register each variant as a constructor function in the environment
for variant in &enum_decl.variants {
if variant.fields.is_empty() {
// Unit variant: register as a constant of the enum type
self.env.insert(
format!("{}.{}", enum_decl.name, variant.name),
Type::String, // unit variants are string tags at runtime
);
} else {
// Data variant: register as a constructor function
let param_types: Vec<Type> = variant.fields.iter()
.map(|_| self.fresh_tv())
.collect();
let ret_ty = self.fresh_tv();
let key = format!("{}.{}", enum_decl.name, variant.name);
self.env.insert(
key,
Type::Fn {
params: param_types,
ret: Box::new(ret_ty),
effects: vec![EffectType::Pure],
},
);
}
}
// Register the enum itself as a record of its variants
let enum_ty = self.fresh_tv();
self.type_aliases.insert(enum_decl.name.clone(), enum_ty);
// Register variant names for exhaustiveness checking
let variant_names: Vec<String> = enum_decl.variants.iter()
.map(|v| v.name.clone())
.collect();
self.enum_variants.insert(enum_decl.name.clone(), variant_names);
}
}
// First pass: register all let declarations
for decl in &program.declarations {
if let Declaration::Let(let_decl) = decl {
@ -873,6 +915,43 @@ impl TypeChecker {
Expr::Match(expr, arms) => {
let _ = self.infer_expr(expr);
// Exhaustiveness check: if scrutinee is a known variable with enum type
if let Expr::Ident(name) = expr.as_ref() {
// Check if name matches a known enum type variable
for (enum_name, variants) in &self.enum_variants {
if name.to_lowercase() == enum_name.to_lowercase()
|| self.env.get(name).map_or(false, |_| true)
{
// Collect matched patterns
let mut matched: std::collections::HashSet<String> = std::collections::HashSet::new();
let mut has_wildcard = false;
for arm in arms {
match &arm.pattern {
Pattern::Wildcard => { has_wildcard = true; }
Pattern::Ident(p) => { matched.insert(p.clone()); }
Pattern::Literal(_) => {}
Pattern::Constructor(p, _) => { matched.insert(p.clone()); }
}
}
if !has_wildcard {
let missing: Vec<String> = variants.iter()
.filter(|v| !matched.contains(*v))
.cloned()
.collect();
if !missing.is_empty() {
self.error(TypeErrorKind::NonExhaustiveMatch {
enum_name: enum_name.clone(),
missing_variants: missing,
});
}
}
break;
}
}
}
if arms.is_empty() {
Type::Unit
} else {
@ -947,6 +1026,21 @@ impl TypeChecker {
}
self.fresh_tv()
}
Expr::Await(inner) => {
let inner_ty = self.infer_expr(inner);
// Await unwraps a Promise/Future, for now just returns inner type
inner_ty
}
Expr::Merge(streams) => {
for s in streams {
self.infer_expr(s);
}
Type::Stream(Box::new(self.fresh_tv()))
}
Expr::RawString(_) => Type::String,
}
}
@ -975,6 +1069,58 @@ impl TypeChecker {
!self.errors.is_empty()
}
/// Convert accumulated type errors into `Diagnostic` structs.
pub fn errors_as_diagnostics(&self) -> Vec<ds_diagnostic::Diagnostic> {
self.errors.iter().map(|err| {
let (line, col) = err.span.unwrap_or((0, 0));
let span = ds_parser::Span {
start: 0,
end: 0,
line,
col,
};
let (code, message) = match &err.kind {
crate::errors::TypeErrorKind::Mismatch { expected, found, context } => {
("E0100", format!("expected {}, found {}{}", expected.display(), found.display(), context))
}
crate::errors::TypeErrorKind::NotReactive { found, context } => {
("E0101", format!("type {} is not reactive — {}", found.display(), context))
}
crate::errors::TypeErrorKind::UnhandledEffect { effect, function } => {
("E0102", format!("unhandled effect `{}` in function `{}`", effect, function))
}
crate::errors::TypeErrorKind::ViewOutsideBlock { expr } => {
("E0103", format!("view expression `{}` outside a `view` block", expr))
}
crate::errors::TypeErrorKind::UnboundVariable { name } => {
("E0104", format!("unbound variable `{}`", name))
}
crate::errors::TypeErrorKind::InfiniteType { var, ty } => {
("E0105", format!("infinite type: {} ~ {}", var, ty.display()))
}
crate::errors::TypeErrorKind::ArityMismatch { function, expected, found } => {
("E0106", format!("`{}` expects {} argument(s), found {}", function, expected, found))
}
crate::errors::TypeErrorKind::MissingField { field, record_type } => {
("E0107", format!("no field `{}` on {}", field, record_type.display()))
}
crate::errors::TypeErrorKind::RefinementViolation { type_name, predicate, value } => {
("E0108", format!("value `{}` violates refinement `{}` ({})", value, type_name, predicate))
}
crate::errors::TypeErrorKind::TypeAliasCycle { name } => {
("E0109", format!("type alias `{}` creates a cycle", name))
}
crate::errors::TypeErrorKind::NonExhaustiveMatch { enum_name, missing_variants } => {
("E0110", format!("non-exhaustive match on `{}` — missing: {}", enum_name, missing_variants.join(", ")))
}
};
ds_diagnostic::Diagnostic::error(message, span)
.with_code(code)
}).collect()
}
/// Format all errors for display.
pub fn display_errors(&self) -> String {
self.errors.iter()
@ -996,7 +1142,7 @@ mod tests {
use ds_parser::{Declaration, LetDecl, ViewDecl, Expr, Span, Container, ContainerKind, Element};
fn span() -> Span {
Span { start: 0, end: 0, line: 0 }
Span { start: 0, end: 0, line: 0, col: 0 }
}
fn make_program(decls: Vec<Declaration>) -> Program {
@ -1011,6 +1157,8 @@ mod tests {
name: "count".to_string(),
type_annotation: None,
value: Expr::IntLit(0),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1030,6 +1178,8 @@ mod tests {
name: "count".to_string(),
type_annotation: None,
value: Expr::IntLit(0),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
Declaration::Let(LetDecl {
@ -1040,6 +1190,8 @@ mod tests {
BinOp::Mul,
Box::new(Expr::IntLit(2)),
),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1064,6 +1216,8 @@ mod tests {
],
props: vec![],
}),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1085,6 +1239,8 @@ mod tests {
value: Expr::StringLit(ds_parser::StringLit {
segments: vec![ds_parser::StringSegment::Literal("hello".to_string())],
}),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1120,6 +1276,8 @@ mod tests {
name: "count".to_string(),
type_annotation: Some(ds_parser::TypeExpr::Named("Int".to_string())),
value: Expr::IntLit(42),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1137,6 +1295,8 @@ mod tests {
value: Expr::StringLit(ds_parser::StringLit {
segments: vec![ds_parser::StringSegment::Literal("oops".to_string())],
}),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1162,6 +1322,8 @@ mod tests {
)),
}),
value: Expr::IntLit(5),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1185,6 +1347,8 @@ mod tests {
)),
}),
value: Expr::IntLit(-1),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1210,12 +1374,16 @@ mod tests {
Box::new(Expr::IntLit(0)),
)),
},
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
Declaration::Let(LetDecl {
name: "count".to_string(),
type_annotation: Some(ds_parser::TypeExpr::Named("PositiveInt".to_string())),
value: Expr::IntLit(5),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1231,6 +1399,8 @@ mod tests {
Declaration::TypeAlias(ds_parser::TypeAliasDecl {
name: "Foo".to_string(),
definition: ds_parser::TypeExpr::Named("Foo".to_string()),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1257,6 +1427,8 @@ mod tests {
)),
}),
value: Expr::IntLit(-42),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1338,6 +1510,8 @@ mod tests {
name: "count".to_string(),
type_annotation: None,
value: Expr::IntLit(0),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
Declaration::Let(LetDecl {
@ -1348,6 +1522,8 @@ mod tests {
BinOp::Mul,
Box::new(Expr::IntLit(2)),
),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1385,6 +1561,8 @@ mod tests {
Expr::IntLit(2),
Expr::IntLit(3),
]),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1425,6 +1603,8 @@ mod tests {
name: "flag".to_string(),
type_annotation: None,
value: Expr::BoolLit(true),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
Declaration::Let(LetDecl {
@ -1435,6 +1615,8 @@ mod tests {
Box::new(Expr::IntLit(1)),
Box::new(Expr::IntLit(2)),
),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1454,6 +1636,8 @@ mod tests {
Expr::IntLit(2),
Expr::IntLit(3),
]),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
Declaration::Let(LetDecl {
@ -1465,6 +1649,8 @@ mod tests {
iter: Box::new(Expr::Ident("nums".to_string())),
body: Box::new(Expr::Ident("n".to_string())),
},
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);
@ -1484,6 +1670,8 @@ mod tests {
name: "count".to_string(),
type_annotation: None,
value: Expr::IntLit(0),
visibility: ds_parser::Visibility::Private,
doc: None,
span: span(),
}),
]);

View file

@ -72,6 +72,12 @@ pub enum TypeErrorKind {
TypeAliasCycle {
name: String,
},
/// A match expression does not cover all enum variants.
NonExhaustiveMatch {
enum_name: String,
missing_variants: Vec<String>,
},
}
impl TypeError {
@ -171,6 +177,15 @@ impl TypeError {
name
))
}
TypeErrorKind::NonExhaustiveMatch { enum_name, missing_variants } => {
("NON-EXHAUSTIVE MATCH".to_string(), format!(
"This `match` on `{}` does not cover all variants.\n\n\
Missing: {}\n\n\
Hint: Add the missing arms, or use `_` as a wildcard pattern.",
enum_name,
missing_variants.join(", ")
))
}
};
// Format like Elm

View file

@ -0,0 +1,119 @@
# Codebase Map
> Auto-generated by Space Operator Context extension.
> Links are relative — click to navigate to source files.
## Project Structure
- **[bench/](../../bench/)**
- [benches/](../../bench/benches/)
- [Cargo.toml](../../bench/Cargo.toml)
- [src/](../../bench/src/)
- [BITSTREAM_INTEGRATION.md](../../BITSTREAM_INTEGRATION.md)
- [Cargo.lock](../../Cargo.lock)
- [Cargo.toml](../../Cargo.toml)
- [CHANGELOG.md](../../CHANGELOG.md)
- [cliff.toml](../../cliff.toml)
- **[compiler/](../../compiler/)**
- [ds-analyzer/](../../compiler/ds-analyzer/)
- [ds-cli/](../../compiler/ds-cli/)
- [ds-codegen/](../../compiler/ds-codegen/)
- [ds-incremental/](../../compiler/ds-incremental/)
- [ds-layout/](../../compiler/ds-layout/)
- [ds-parser/](../../compiler/ds-parser/)
- [ds-types/](../../compiler/ds-types/)
- **[devices/](../../devices/)**
- [panel-preview/](../../devices/panel-preview/)
- [waveshare-p4-panel/](../../devices/waveshare-p4-panel/)
- **[docs/](../)** — Project documentation
- [explorations.md](../explorations.md)
- [fabric-display-build-guide.md](../fabric-display-build-guide.md)
- [fabric-display-overview.md](../fabric-display-overview.md)
- [generated/](/)
- [integration.md](../integration.md)
- [panel-ir-spec.md](../panel-ir-spec.md)
- [DREAMSTACK.md](../../DREAMSTACK.md)
- **[engine/](../../engine/)**
- [ds-physics/](../../engine/ds-physics/)
- [ds-screencast/](../../engine/ds-screencast/)
- [ds-stream/](../../engine/ds-stream/)
- [ds-stream-wasm/](../../engine/ds-stream-wasm/)
- **[examples/](../../examples/)**
- [beats-viewer.ds](../../examples/beats-viewer.ds)
- [bench-signals.ds](../../examples/bench-signals.ds)
- [benchmarks.html](../../examples/benchmarks.html)
- [builtins.ds](../../examples/builtins.ds)
- [callback-demo.ds](../../examples/callback-demo.ds)
- [component-gallery.ds](../../examples/component-gallery.ds)
- [compose-dashboard.ds](../../examples/compose-dashboard.ds)
- [compose-master.ds](../../examples/compose-master.ds)
- [compose-metrics.ds](../../examples/compose-metrics.ds)
- [compose-search-map.ds](../../examples/compose-search-map.ds)
- [compose-widgets.ds](../../examples/compose-widgets.ds)
- [counter.ds](../../examples/counter.ds)
- [dashboard.ds](../../examples/dashboard.ds)
- [dashboard.html](../../examples/dashboard.html)
- [each-demo.ds](../../examples/each-demo.ds)
- [form.ds](../../examples/form.ds)
- [game-breakout.ds](../../examples/game-breakout.ds)
- [game-pong.ds](../../examples/game-pong.ds)
- [game-pong.html](../../examples/game-pong.html)
- [game-reaction.ds](../../examples/game-reaction.ds)
- [IMPLEMENTATION_PLAN.md](../../IMPLEMENTATION_PLAN.md)
- **[pkg/](../../pkg/)**
- [ds-stream-wasm/](../../pkg/ds-stream-wasm/)
- **[registry/](../../registry/)**
- [components/](../../registry/components/)
- [registry.json](../../registry/registry.json)
- **[scripts/](../../scripts/)**
- [release.sh](../../scripts/release.sh)
- **[sdk/](../../sdk/)**
- [dreamstack-embed.js](../../sdk/dreamstack-embed.js)
- [STREAM_COMPOSITION.md](../../STREAM_COMPOSITION.md)
- [TODO.md](../../TODO.md)
- [USE_CASES.md](../../USE_CASES.md)
## Rust Dependencies (Key Crates)
### Local Crates
| Crate | Path |
|-------|------|
| `ds-parser` | [compiler/ds-parser](../../compiler/ds-parser) |
| `ds-analyzer` | [compiler/ds-analyzer](../../compiler/ds-analyzer) |
| `ds-codegen` | [compiler/ds-codegen](../../compiler/ds-codegen) |
| `ds-layout` | [compiler/ds-layout](../../compiler/ds-layout) |
| `ds-types` | [compiler/ds-types](../../compiler/ds-types) |
| `ds-incremental` | [compiler/ds-incremental](../../compiler/ds-incremental) |
| `ds-physics` | [engine/ds-physics](../../engine/ds-physics) |
| `ds-stream` | [engine/ds-stream](../../engine/ds-stream) |
| `ds-stream-wasm` | [engine/ds-stream-wasm](../../engine/ds-stream-wasm) |
## Tauri Backend Modules
## Frontend (p2p-ui)
React + TypeScript desktop UI built with Tauri.
### Key Pages
| Page | Description |
|------|-------------|
| ConnectionPage | Connect to a remote Space Operator server |
| ProviderPage | Register as a P2P compute provider |
| LocalServerPage | Run an embedded flow backend locally |
| CommandsPage | Browse and test registered flow commands |
| NodeDevPage | Develop and test new nodes |
| KeypairsPage | Manage Solana keypairs |
| DiscoveryPage | P2P network node discovery |
## Flow Backend
The flow engine (git submodule) provides:
- **flow** — Core flow graph execution engine
- **flow-lib** — Shared types: `CommandTrait`, `Value`, `CommandContext`
- **flow-rpc** — Cap'n Proto RPC for distributed node execution
- **cmds-std** — Standard nodes: HTTP, JSON, string ops, storage, KV store
- **cmds-solana** — Solana blockchain nodes: token ops, DeFi, NFT, governance
- **cmds-deno** — JavaScript/TypeScript node runtime via Deno
- **rhai-script** — Rhai scripting language nodes

View file

@ -0,0 +1,15 @@
# Node Catalog
> Auto-generated by Space Operator Context extension.
> Each node shows its typed inputs/outputs and which other nodes are compatible.
**0 nodes** across **0 categories**
## Quick Reference
| Node | Category | Inputs | Outputs | Source |
|------|----------|--------|---------|--------|
## Type Compatibility Index
Which nodes produce and consume each type:

View file

@ -1,11 +1,15 @@
# Changelog
All notable changes to this package will be documented in this file.
## [Unreleased]
## [0.1.0] - 2026-02-26
## [0.5.0] - 2026-03-09
### 🚀 Features
- Initial release — Rapier2D WASM integration, scene container, reactive physics bodies
- **Particle emitters**`create_emitter(x, y, rate, speed, spread, lifetime)`, `remove_emitter`, `set_emitter_position`, auto-spawn in `step()`
- **Force fields**`create_force_field(x, y, radius, strength)`, `remove_force_field`, attract/repel applied each step
- **Body tags**`set_body_tag`, `get_body_tag`, `get_bodies_by_tag` for user-defined categorization
- **Contact materials**`set_contact_material(tag_a, tag_b, friction, restitution)`, `get_contact_material`
## [0.4.0] - 2026-03-09 — Collision groups/events, point/AABB query, body locking, gravity scale
## [0.3.0] - 2026-03-09 — Joints (spring, fixed), serialize/deserialize, velocity/force, boundaries
## [0.2.0] - 2026-03-09 — Body management, sensors, material properties, sleeping
## [0.1.0] - 2026-02-26 — Initial release

View file

@ -1,6 +1,6 @@
[package]
name = "ds-physics"
version = "0.1.0"
version = "0.5.0"
edition.workspace = true
license.workspace = true

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,14 @@
# Changelog
## [0.5.0] - 2026-03-09
### 🚀 Features
- **`--record=FILE`** — record all frames to .dsrec file in real-time
- **`/screenshot`** HTTP endpoint — capture single CDP screenshot on demand
- **`/health`** HTTP endpoint — JSON status (uptime, frames, connections, quality tier, recording status)
- **Recording stats** — frame count logged on SIGINT
## [0.4.0] - 2026-03-09 — ACK-driven adaptive quality, gamepad forwarding, --audio
## [0.3.0] - 2026-03-09 — ds-stream framing, WebP, multi-tab, monitor page
## [0.2.0] - 2026-03-09 — Configurable viewport, input forwarding, CDP reconnection

View file

@ -1,16 +1,30 @@
#!/usr/bin/env node
/**
* DreamStack Screencast CDP Capture Agent
* DreamStack Screencast CDP Capture Agent v0.5.0
*
* Streams any web page to DreamStack panels via Chrome DevTools Protocol.
* Zero changes to the target app just point at any URL.
* Outputs frames using the ds-stream binary protocol (16-byte header).
*
* Usage:
* node capture.js [url] [--headless] [--fps=N] [--quality=N]
* node capture.js [url] [options]
*
* Options:
* --headless Run Chrome in headless mode
* --fps=N Max frames per second (default: 30)
* --quality=N JPEG/WebP quality 1-100 (default: 75)
* --width=N Viewport width (default: 800)
* --height=N Viewport height (default: 1280)
* --ws-port=N WebSocket server port (default: 9300)
* --monitor-port=N Monitor HTTP port (default: 9301)
* --cdp-port=N Chrome DevTools port (default: 9222)
* --format=FORMAT Image format: jpeg or webp (default: jpeg)
* --tabs=N Number of tabs to capture (default: 1)
* --audio Enable audio capture (requires Chrome audio)
*
* Examples:
* node capture.js http://localhost:3000
* node capture.js https://react.dev --headless --fps=30
* node capture.js https://react.dev --headless --fps=30 --format=webp
* node capture.js http://localhost:5173 --width=1024 --height=768 --tabs=3
*/
const CDP = require('chrome-remote-interface');
@ -18,22 +32,121 @@ const { WebSocketServer } = require('ws');
const http = require('http');
const { spawn } = require('child_process');
// ─── Parse CLI Args ───
function getArg(name, defaultVal) {
const found = process.argv.find(a => a.startsWith(`--${name}=`));
return found ? found.split('=')[1] : String(defaultVal);
}
// ─── Config ───
const TARGET_URL = process.argv[2] || 'http://localhost:3000';
const WIDTH = 800;
const HEIGHT = 1280;
const WS_PORT = 9300;
const MONITOR_PORT = 9301;
const CDP_PORT = 9222;
const QUALITY = parseInt((process.argv.find(a => a.startsWith('--quality=')) || '').split('=')[1] || '75');
const MAX_FPS = parseInt((process.argv.find(a => a.startsWith('--fps=')) || '').split('=')[1] || '30');
const WIDTH = parseInt(getArg('width', 800));
const HEIGHT = parseInt(getArg('height', 1280));
const WS_PORT = parseInt(getArg('ws-port', 9300));
const MONITOR_PORT = parseInt(getArg('monitor-port', 9301));
const CDP_PORT = parseInt(getArg('cdp-port', 9222));
const QUALITY = parseInt(getArg('quality', 75));
const MAX_FPS = parseInt(getArg('fps', 30));
const IMAGE_FORMAT = getArg('format', 'jpeg'); // 'jpeg' or 'webp'
const TAB_COUNT = parseInt(getArg('tabs', 1));
const HEADLESS = process.argv.includes('--headless');
const ENABLE_AUDIO = process.argv.includes('--audio');
const RECORD_FILE = getArg('record', '');
const STATS_INTERVAL = 5000;
const clients = new Set();
let frameCount = 0;
let bytesSent = 0;
// v0.5: Recording file stream
let recordStream = null;
let recordFrameCount = 0;
if (RECORD_FILE) {
const DSREC_MAGIC = Buffer.from([0x44, 0x53, 0x01]);
recordStream = fs.createWriteStream(RECORD_FILE);
recordStream.write(DSREC_MAGIC);
// Reserve 24 bytes for metadata (will be patched on close)
recordStream.write(Buffer.alloc(24));
console.log(`[Record] Writing to ${RECORD_FILE}`);
}
// ─── ds-stream Protocol Constants ───
const FRAME_COMPRESSED_PIXELS = 0x02;
const FLAG_COMPRESSED = 0x04;
const FLAG_KEYFRAME = 0x02;
const FLAG_INPUT = 0x08;
const HEADER_SIZE = 16;
// Input types (ds-stream protocol)
const INPUT_POINTER = 0x01;
const INPUT_PTR_DOWN = 0x02;
const INPUT_PTR_UP = 0x03;
const INPUT_KEY_DOWN = 0x10;
const INPUT_KEY_UP = 0x11;
const INPUT_TOUCH = 0x20;
const INPUT_TOUCH_END = 0x21;
const INPUT_SCROLL = 0x50;
// Compressed pixel format byte
const FMT_WEBP = 1;
const FMT_JPEG = 2;
// v0.4: ACK and Audio
const FRAME_ACK = 0x0E;
const FRAME_AUDIO = 0x08;
const clients = new Map(); // channel → Set<ws>
let globalFrameCount = 0;
let globalBytesSent = 0;
const t0 = Date.now();
// Stats per channel
const channelStats = new Map();
// ─── v0.4: Adaptive Quality Controller ───
class AdaptiveQuality {
constructor() {
this.rttHistory = [];
this.maxHistory = 30;
this.currentTier = 0; // 0=Full, 1=Reduced, 2=Minimal
}
recordAck(rttMs) {
this.rttHistory.push(rttMs);
if (this.rttHistory.length > this.maxHistory) this.rttHistory.shift();
this.currentTier = this.computeTier();
}
computeTier() {
if (this.rttHistory.length === 0) return 0;
const avg = this.rttHistory.reduce((a, b) => a + b, 0) / this.rttHistory.length;
if (avg < 50) return 0;
if (avg < 150) return 1;
return 2;
}
get quality() { return [75, 40, 0][this.currentTier]; }
get skipFrames() { return [0, 2, 999999][this.currentTier]; }
get tierName() { return ['Full', 'Reduced', 'Minimal'][this.currentTier]; }
}
const aq = new AdaptiveQuality();
// ─── ds-stream Frame Builder ───
function buildFrame(type, flags, seq, timestamp, width, height, payload) {
const header = Buffer.alloc(HEADER_SIZE);
header[0] = type;
header[1] = flags;
header.writeUInt16LE(seq & 0xFFFF, 2);
header.writeUInt32LE(timestamp >>> 0, 4);
header.writeUInt16LE(width, 8);
header.writeUInt16LE(height, 10);
header.writeUInt32LE(payload.length, 12);
return Buffer.concat([header, payload]);
}
function buildCompressedPixelFrame(seq, timestamp, width, height, format, imageData) {
const formatBuf = Buffer.from([format]);
const payload = Buffer.concat([formatBuf, imageData]);
return buildFrame(FRAME_COMPRESSED_PIXELS, FLAG_COMPRESSED | FLAG_KEYFRAME, seq, timestamp, width, height, payload);
}
// ─── 1. Launch Chrome ───
function launchChrome() {
return new Promise((resolve, reject) => {
@ -49,36 +162,46 @@ function launchChrome() {
args.push('about:blank');
const proc = spawn('google-chrome', args, { stdio: ['pipe', 'pipe', 'pipe'] });
proc.stderr.on('data', d => {
if (d.toString().includes('DevTools listening')) resolve(proc);
});
proc.on('error', reject);
proc.on('exit', code => { console.log(`[Chrome] exit ${code}`); process.exit(0); });
// Fallback timeout
setTimeout(() => resolve(proc), 4000);
});
}
// ─── 2. WebSocket server for panels/monitor ───
// ─── 2. WebSocket server ───
function startWS() {
const wss = new WebSocketServer({ host: '0.0.0.0', port: WS_PORT });
wss.on('connection', (ws, req) => {
clients.add(ws);
console.log(`[WS] +1 panel (${clients.size}) from ${req.socket.remoteAddress}`);
ws.on('close', () => { clients.delete(ws); console.log(`[WS] -1 panel (${clients.size})`); });
// Parse channel from path: /stream/channelName or /stream (default)
const path = req.url || '/stream';
const parts = path.replace(/^\//, '').split('/');
const channel = parts[1] || 'default';
if (!clients.has(channel)) clients.set(channel, new Set());
clients.get(channel).add(ws);
console.log(`[WS] +1 on "${channel}" (${clients.get(channel).size})`);
ws.on('close', () => {
const ch = clients.get(channel);
if (ch) {
ch.delete(ws);
console.log(`[WS] -1 on "${channel}" (${ch.size})`);
}
});
ws.on('message', data => { ws._inputHandler?.(data); });
});
console.log(`[WS] Panels: ws://0.0.0.0:${WS_PORT}`);
console.log(`[WS] Panels: ws://0.0.0.0:${WS_PORT}/stream/{channel}`);
return wss;
}
// ─── 3. Monitor page ───
function startMonitor() {
const fmt = IMAGE_FORMAT === 'webp' ? 'image/webp' : 'image/jpeg';
const html = `<!DOCTYPE html><html><head>
<title>DreamStack Screencast</title>
<title>DreamStack Screencast v0.3</title>
<style>
*{margin:0;padding:0;box-sizing:border-box}
body{background:#0a0a0a;display:flex;flex-direction:column;align-items:center;justify-content:center;height:100vh;font-family:system-ui;color:#999}
@ -86,61 +209,102 @@ canvas{border:1px solid #333;border-radius:8px;max-height:85vh;cursor:crosshair}
#hud{margin-top:10px;font:13px/1.5 monospace;text-align:center}
h3{margin-bottom:6px;color:#555;font-size:14px}
</style></head><body>
<h3>DreamStack Screencast Monitor</h3>
<h3>DreamStack Screencast Monitor v0.3</h3>
<canvas id="c" width="${WIDTH}" height="${HEIGHT}"></canvas>
<div id="hud">Connecting</div>
<script>
const c=document.getElementById('c'),ctx=c.getContext('2d'),hud=document.getElementById('hud');
c.style.width=Math.min(${WIDTH},innerWidth*.45)+'px';c.style.height='auto';
let fr=0,by=0,t=Date.now();
const ws=new WebSocket('ws://'+location.hostname+':${WS_PORT}');
const ws=new WebSocket('ws://'+location.hostname+':${WS_PORT}/stream/default');
ws.binaryType='arraybuffer';
ws.onopen=()=>{hud.textContent='Connected — waiting for frames…'};
ws.onmessage=e=>{
const buf=new Uint8Array(e.data);
if(buf[0]!==0x50)return;
const jpeg=buf.slice(9);fr++;by+=jpeg.length;
const blob=new Blob([jpeg],{type:'image/jpeg'});
if(buf.length<16)return;
const type=buf[0];
if(type!==0x02)return; // CompressedPixels only
const payloadLen=buf[12]|(buf[13]<<8)|(buf[14]<<16)|(buf[15]<<24);
const fmtByte=buf[16]; // format byte
const imageData=buf.slice(17); // actual image data
fr++;by+=imageData.length;
const mime=fmtByte===1?'image/webp':'image/jpeg';
const blob=new Blob([imageData],{type:mime});
const url=URL.createObjectURL(blob);
const img=new Image();
img.onload=()=>{ctx.drawImage(img,0,0);URL.revokeObjectURL(url)};
img.src=url;
const now=Date.now();
if(now-t>1000){
hud.textContent='FPS: '+(fr/((now-t)/1000)).toFixed(1)+' | '+(by/1024/((now-t)/1000)).toFixed(0)+' KB/s | Frame: '+(jpeg.length/1024).toFixed(1)+'KB';
hud.textContent='FPS: '+(fr/((now-t)/1000)).toFixed(1)+' | '+(by/1024/((now-t)/1000)).toFixed(0)+' KB/s | Frame: '+(imageData.length/1024).toFixed(1)+'KB';
fr=0;by=0;t=now;
}
};
c.addEventListener('click',e=>{
const r=c.getBoundingClientRect(),sx=${WIDTH}/r.width,sy=${HEIGHT}/r.height;
const x=Math.round((e.clientX-r.left)*sx),y=Math.round((e.clientY-r.top)*sy);
const b=new Uint8Array(7);const dv=new DataView(b.buffer);
b[0]=0x60;b[1]=0;dv.setUint16(2,x,true);dv.setUint16(4,y,true);b[6]=0;
// Send as ds-stream INPUT_PTR_DOWN then INPUT_PTR_UP
function sendInput(type,x,y){
const b=new ArrayBuffer(16+4);const dv=new DataView(b);const u=new Uint8Array(b);
u[0]=type;u[1]=0x08;dv.setUint16(8,x,true);dv.setUint16(10,y,true);dv.setUint32(12,4,true);
dv.setUint16(16,x,true);dv.setUint16(18,y,true);
ws.send(b);
setTimeout(()=>{const e2=new Uint8Array(7);const d2=new DataView(e2.buffer);e2[0]=0x60;e2[1]=2;d2.setUint16(2,x,true);d2.setUint16(4,y,true);e2[6]=0;ws.send(e2)},50);
}
sendInput(0x02,x,y);
setTimeout(()=>sendInput(0x03,x,y),50);
hud.textContent+=' | Click: ('+x+','+y+')';
});
ws.onclose=()=>{hud.textContent='Disconnected — reload to retry'};
</script></body></html>`;
http.createServer((_, res) => { res.writeHead(200, { 'Content-Type': 'text/html' }); res.end(html); })
.listen(MONITOR_PORT, '0.0.0.0', () => console.log(`[Monitor] http://0.0.0.0:${MONITOR_PORT}`));
const monitorServer = http.createServer((req, res) => {
if (req.url === '/health') {
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({
uptime: Math.round((Date.now() - t0) / 1000),
frames: globalFrameCount,
connections: Array.from(clients.values()).reduce((sum, s) => sum + s.size, 0),
qualityTier: aq.tierName,
recording: !!recordStream,
recordedFrames: recordFrameCount,
}));
return;
}
if (req.url === '/screenshot') {
// Capture via active CDP session
connectCDP(1).then(async (client) => {
const { Page } = client;
const { data } = await Page.captureScreenshot({ format: IMAGE_FORMAT === 'webp' ? 'png' : 'jpeg', quality: QUALITY });
const imgBuf = Buffer.from(data, 'base64');
res.writeHead(200, { 'Content-Type': IMAGE_FORMAT === 'webp' ? 'image/png' : 'image/jpeg' });
res.end(imgBuf);
client.close();
}).catch(err => {
res.writeHead(500); res.end('Screenshot failed: ' + err.message);
});
return;
}
res.writeHead(200, { 'Content-Type': 'text/html' }); res.end(html);
});
monitorServer.listen(MONITOR_PORT, '0.0.0.0', () => console.log(`[Monitor] http://0.0.0.0:${MONITOR_PORT}`));
}
// ─── 4. CDP screencast loop ───
async function startScreencast() {
let client;
// Retry CDP connect (Chrome may still be starting)
for (let i = 0; i < 10; i++) {
// ─── 4. CDP screencast loop (with reconnection) ───
async function connectCDP(maxRetries = 10) {
for (let i = 0; i < maxRetries; i++) {
try {
client = await CDP({ port: CDP_PORT });
break;
return await CDP({ port: CDP_PORT });
} catch {
await new Promise(r => setTimeout(r, 1000));
const delay = Math.min(1000 * Math.pow(1.5, i), 10000);
console.log(`[CDP] Connect attempt ${i + 1}/${maxRetries} failed, retry in ${Math.round(delay)}ms...`);
await new Promise(r => setTimeout(r, delay));
}
}
if (!client) throw new Error('Cannot connect to Chrome CDP');
throw new Error('Cannot connect to Chrome CDP');
}
async function startScreencast(channel = 'default', url = TARGET_URL) {
const client = await connectCDP();
const { Page, Input, Emulation } = client;
await Page.enable();
@ -148,64 +312,270 @@ async function startScreencast() {
width: WIDTH, height: HEIGHT, deviceScaleFactor: 1, mobile: true,
});
await Emulation.setTouchEmulationEnabled({ enabled: true });
await Page.navigate({ url: TARGET_URL });
await new Promise(r => setTimeout(r, 2000)); // let page load
await Page.navigate({ url });
await new Promise(r => setTimeout(r, 2000));
// Wire up input forwarding from panels
for (const ws of clients) {
ws._inputHandler = (data) => handleInput(Buffer.from(data), Input, Page);
// Initialize channel stats
if (!channelStats.has(channel)) {
channelStats.set(channel, { frames: 0, bytes: 0, seq: 0, lastStatTime: Date.now(), statFrames: 0, statBytes: 0 });
}
// Also for future connections
const origAdd = clients.add.bind(clients);
clients.add = function (ws) {
origAdd(ws);
ws._inputHandler = (data) => handleInput(Buffer.from(data), Input, Page);
const stats = channelStats.get(channel);
// v0.4: ACK listener — update adaptive quality
const wireInput = (ws) => {
ws._inputHandler = (data) => {
const buf = Buffer.from(data);
if (buf.length >= HEADER_SIZE && buf[0] === FRAME_ACK) {
// ACK frame: extract RTT from payload
const payloadLen = buf.readUInt32LE(12);
if (payloadLen >= 4) {
const rttMs = buf.readUInt16LE(HEADER_SIZE + 2);
aq.recordAck(rttMs);
// Adjust quality if tier changed
const newQuality = aq.quality || QUALITY;
if (newQuality !== currentQuality && newQuality > 0) {
currentQuality = newQuality;
Page.stopScreencast().catch(() => { });
Page.startScreencast({
format: cdpFormat, quality: currentQuality,
maxWidth: WIDTH, maxHeight: HEIGHT,
everyNthFrame: Math.max(1, Math.round(60 / MAX_FPS)),
}).catch(() => { });
console.log(`[AQ:${channel}] Tier: ${aq.tierName} → q${currentQuality}`);
}
}
return; // don't forward ACK as input
}
handleInput(buf, Input, Page);
};
};
// Start screencast
// Existing clients
const ch = clients.get(channel);
if (ch) for (const ws of ch) wireInput(ws);
// Watch for new connections to this channel
const origSet = clients.get(channel) || new Set();
const origAdd = origSet.add.bind(origSet);
origSet.add = function (ws) {
origAdd(ws);
wireInput(ws);
};
clients.set(channel, origSet);
// Determine format
const useWebP = IMAGE_FORMAT === 'webp';
const formatByte = useWebP ? FMT_WEBP : FMT_JPEG;
const cdpFormat = useWebP ? 'png' : 'jpeg'; // CDP doesn't support webp directly, fallback
// Start screencast (v0.4: use adaptive quality)
let currentQuality = QUALITY;
let skipCounter = 0;
await Page.startScreencast({
format: 'jpeg', quality: QUALITY,
format: cdpFormat, quality: currentQuality,
maxWidth: WIDTH, maxHeight: HEIGHT,
everyNthFrame: Math.max(1, Math.round(60 / MAX_FPS)),
});
// Listen for frames via the event API
client.on('event', (message) => {
if (message.method !== 'Page.screencastFrame') return;
const { sessionId, data, metadata } = message.params;
// ACK immediately (fire-and-forget)
Page.screencastFrameAck({ sessionId }).catch(() => { });
frameCount++;
const jpegBuf = Buffer.from(data, 'base64');
bytesSent += jpegBuf.length;
// Build frame: [0x50][ts:u32LE][w:u16LE][h:u16LE][jpeg...]
const hdr = Buffer.alloc(9);
hdr[0] = 0x50;
hdr.writeUInt32LE((Date.now() - t0) >>> 0, 1);
hdr.writeUInt16LE(metadata.deviceWidth || WIDTH, 5);
hdr.writeUInt16LE(metadata.deviceHeight || HEIGHT, 7);
const frame = Buffer.concat([hdr, jpegBuf]);
// Broadcast
for (const ws of clients) {
if (ws.readyState === 1) ws.send(frame);
// v0.4: adaptive frame skipping
if (aq.skipFrames > 0) {
skipCounter++;
if (skipCounter <= aq.skipFrames) return;
skipCounter = 0;
}
if (frameCount % 60 === 0) {
const elapsed = (Date.now() - t0) / 1000;
console.log(`[Cast] #${frameCount} | ${(jpegBuf.length / 1024).toFixed(1)}KB | avg ${(bytesSent / 1024 / elapsed).toFixed(0)} KB/s | ${clients.size} panels`);
stats.seq++;
stats.frames++;
stats.statFrames++;
globalFrameCount++;
const imageBuf = Buffer.from(data, 'base64');
const timestamp = (Date.now() - t0) >>> 0;
const w = metadata.deviceWidth || WIDTH;
const h = metadata.deviceHeight || HEIGHT;
// Build ds-stream CompressedPixels frame
const frame = buildCompressedPixelFrame(stats.seq, timestamp, w, h, formatByte, imageBuf);
stats.bytes += frame.length;
stats.statBytes += frame.length;
globalBytesSent += frame.length;
// Broadcast to channel
const receivers = clients.get(channel);
if (receivers) {
for (const ws of receivers) {
if (ws.readyState === 1) ws.send(frame);
}
}
// v0.5: Write to recording file
if (recordStream) {
const tsUs = BigInt(Date.now() - t0) * 1000n;
const tsBuf = Buffer.alloc(8);
tsBuf.writeBigUInt64LE(tsUs);
const lenBuf = Buffer.alloc(4);
lenBuf.writeUInt32LE(frame.length);
recordStream.write(tsBuf);
recordStream.write(lenBuf);
recordStream.write(frame);
recordFrameCount++;
}
});
console.log(`[CDP] Casting ${TARGET_URL}${WIDTH}×${HEIGHT} @ q${QUALITY}`);
// Handle CDP disconnect — attempt reconnection
client.on('disconnect', async () => {
console.log(`[CDP:${channel}] Disconnected! Attempting reconnection...`);
try {
await startScreencast(channel, url);
} catch (err) {
console.error(`[CDP:${channel}] Reconnection failed:`, err.message);
}
});
console.log(`[CDP:${channel}] Casting ${url}${WIDTH}×${HEIGHT} @ q${QUALITY} (${IMAGE_FORMAT})`);
}
// ─── Input handler ───
// ─── 5. Stats logging ───
function startStatsLogger() {
setInterval(() => {
const now = Date.now();
const totalElapsed = (now - t0) / 1000;
let totalReceivers = 0;
for (const [ch, set] of clients) totalReceivers += set.size;
for (const [channel, stats] of channelStats) {
const elapsed = (now - stats.lastStatTime) / 1000;
if (elapsed < 1) continue;
const fps = stats.statFrames / elapsed;
const kbps = stats.statBytes / 1024 / elapsed;
console.log(`[Stats:${channel}] ${fps.toFixed(1)} fps | ${kbps.toFixed(0)} KB/s | total: ${stats.frames} frames | ${(stats.bytes / 1024 / 1024).toFixed(1)} MB`);
stats.statFrames = 0;
stats.statBytes = 0;
stats.lastStatTime = now;
}
if (channelStats.size > 1 || totalReceivers > 0) {
console.log(`[Stats:global] up=${totalElapsed.toFixed(0)}s | ${globalFrameCount} frames | ${(globalBytesSent / 1024 / 1024).toFixed(1)} MB | ${totalReceivers} receivers`);
}
}, STATS_INTERVAL);
}
// ─── Input handler (ds-stream protocol) ───
function handleInput(buf, Input, Page) {
if (buf.length < HEADER_SIZE) return;
const type = buf[0];
const flags = buf[1];
// Only process input frames (FLAG_INPUT=0x08)
if (!(flags & FLAG_INPUT) && type < 0xF0) {
// Legacy compatibility: try type byte directly
handleLegacyInput(buf, Input, Page);
return;
}
const payloadLen = buf.readUInt32LE(12);
const payload = buf.slice(HEADER_SIZE, HEADER_SIZE + payloadLen);
switch (type) {
case INPUT_POINTER: { // Mouse move
if (payload.length >= 4) {
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
Input.dispatchMouseEvent({ type: 'mouseMoved', x, y }).catch(() => { });
}
break;
}
case INPUT_PTR_DOWN: { // Mouse/touch down
if (payload.length >= 4) {
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
Input.dispatchMouseEvent({ type: 'mousePressed', x, y, button: 'left', clickCount: 1 }).catch(() => { });
}
break;
}
case INPUT_PTR_UP: { // Mouse/touch up
if (payload.length >= 4) {
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
Input.dispatchMouseEvent({ type: 'mouseReleased', x, y, button: 'left', clickCount: 1 }).catch(() => { });
}
break;
}
case INPUT_KEY_DOWN: case INPUT_KEY_UP: { // Keyboard
if (payload.length >= 2) {
const keyCode = payload.readUInt16LE(0);
const key = String.fromCharCode(keyCode);
Input.dispatchKeyEvent({
type: type === INPUT_KEY_DOWN ? 'keyDown' : 'keyUp',
key, code: `Key${key.toUpperCase()}`,
windowsVirtualKeyCode: keyCode,
}).catch(() => { });
}
break;
}
case INPUT_TOUCH: { // Touch start/move
if (payload.length >= 4) {
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
Input.dispatchTouchEvent({
type: 'touchStart',
touchPoints: [{ x, y, id: 0, radiusX: 10, radiusY: 10, force: 1 }]
}).catch(() => { });
}
break;
}
case INPUT_TOUCH_END: { // Touch end
if (payload.length >= 4) {
const x = payload.readUInt16LE(0), y = payload.readUInt16LE(2);
Input.dispatchTouchEvent({
type: 'touchEnd',
touchPoints: [{ x, y, id: 0, radiusX: 10, radiusY: 10, force: 0 }]
}).catch(() => { });
}
break;
}
case INPUT_SCROLL: { // Scroll
if (payload.length >= 8) {
const dx = payload.readInt16LE(0), dy = payload.readInt16LE(2);
const x = payload.readUInt16LE(4), y = payload.readUInt16LE(6);
Input.dispatchMouseEvent({ type: 'mouseWheel', x, y, deltaX: dx, deltaY: dy }).catch(() => { });
}
break;
}
case 0x30: { // GamepadAxis
if (payload.length >= 4) {
const axisIdx = payload.readUInt8(0);
const value = payload.readInt16LE(1); // -32768 to 32767
// Map gamepad axis to scroll or custom event
console.log(`[Gamepad] Axis ${axisIdx}: ${value}`);
}
break;
}
case 0x31: { // GamepadButton
if (payload.length >= 2) {
const buttonIdx = payload.readUInt8(0);
const pressed = payload.readUInt8(1);
// Map common gamepad buttons to keyboard events
const keyMap = { 0: 'Enter', 1: 'Escape', 12: 'ArrowUp', 13: 'ArrowDown', 14: 'ArrowLeft', 15: 'ArrowRight' };
const key = keyMap[buttonIdx];
if (key) {
Input.dispatchKeyEvent({
type: pressed ? 'keyDown' : 'keyUp',
key, code: key,
}).catch(() => { });
}
}
break;
}
}
}
// Legacy input handler (v0.2 format)
function handleLegacyInput(buf, Input, Page) {
if (buf.length < 1) return;
const t = buf[0];
@ -217,40 +587,53 @@ function handleInput(buf, Input, Page) {
type, touchPoints: [{ x, y, id: buf[6] || 0, radiusX: 10, radiusY: 10, force: phase === 2 ? 0 : 1 }]
}).catch(() => { });
}
if (t === 0x61 && buf.length >= 6) {
const x = buf.readUInt16LE(1), y = buf.readUInt16LE(3);
Input.dispatchMouseEvent({ type: 'mousePressed', x, y, button: 'left', clickCount: 1 }).catch(() => { });
setTimeout(() => Input.dispatchMouseEvent({ type: 'mouseReleased', x, y, button: 'left', clickCount: 1 }).catch(() => { }), 50);
}
if (t === 0x63 && buf.length >= 3) {
const len = buf.readUInt16LE(1);
const url = buf.slice(3, 3 + len).toString();
Page.navigate({ url }).catch(() => { });
console.log(`[Nav] → ${url}`);
}
}
// ─── Main ───
async function main() {
console.log(`\n DreamStack Screencast`);
console.log(` ─────────────────────`);
console.log(`\n DreamStack Screencast v0.5.0`);
console.log(` ──────────────────────────`);
console.log(` URL: ${TARGET_URL}`);
console.log(` Viewport: ${WIDTH}×${HEIGHT}`);
console.log(` Quality: ${QUALITY}% FPS: ${MAX_FPS}`);
console.log(` Headless: ${HEADLESS}\n`);
console.log(` Format: ${IMAGE_FORMAT.toUpperCase()}`);
console.log(` Tabs: ${TAB_COUNT}`);
console.log(` Audio: ${ENABLE_AUDIO}`);
console.log(` Record: ${RECORD_FILE || 'disabled'}`);
console.log(` Headless: ${HEADLESS}`);
console.log(` WS Port: ${WS_PORT} Monitor: ${MONITOR_PORT}`);
console.log(` Endpoints: /health, /screenshot`);
console.log(` Adaptive: ACK-driven quality\n`);
const chrome = await launchChrome();
startWS();
startMonitor();
await startScreencast();
startStatsLogger();
console.log(`\n ✓ Streaming! Panels → ws://0.0.0.0:${WS_PORT}`);
// Start screencast for each tab
if (TAB_COUNT === 1) {
await startScreencast('default', TARGET_URL);
} else {
for (let i = 0; i < TAB_COUNT; i++) {
const channel = `tab-${i}`;
await startScreencast(channel, TARGET_URL);
}
}
console.log(`\n ✓ Streaming! Panels → ws://0.0.0.0:${WS_PORT}/stream/{channel}`);
console.log(` ✓ Monitor → http://localhost:${MONITOR_PORT}\n`);
process.on('SIGINT', () => {
console.log('\n[Stop]');
if (recordStream) {
recordStream.end();
console.log(`[Record] Saved ${recordFrameCount} frames to ${RECORD_FILE}`);
}
chrome.kill();
process.exit(0);
});

View file

@ -1,6 +1,6 @@
{
"name": "ds-screencast",
"version": "1.0.0",
"version": "0.5.0",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"

View file

@ -1,11 +1,19 @@
# Changelog
All notable changes to this package will be documented in this file.
## [Unreleased]
## [0.1.0] - 2026-02-26
## [0.5.0] - 2026-03-09
### 🚀 Features
- Initial release — Browser WASM codec for ds-stream protocol
- **`auth_challenge_message`/`auth_response_message`** — auth handshake builders
- **`scramble`/`descramble`** — XOR payload obfuscation
- **`decode_recording_metadata`** — decode 24-byte .dsrec metadata header
- **`FRAME_AUTH`** constant (0x0F)
### 🧪 Tests
- 4 new tests: auth challenge, auth response, XOR roundtrip, recording metadata decode
## [0.4.0] - 2026-03-09 — SessionRecorder/Player, adaptive_quality_tier, bandwidth_kbps
## [0.3.0] - 2026-03-09 — TypedFrameHeader, recording decoder, compressed pixel builder
## [0.2.0] - 2026-03-09 — parse_stream, frame_type_name, is_input_frame, ack_message
## [0.1.0] - 2026-02-26 — Initial release

View file

@ -1,6 +1,6 @@
[package]
name = "ds-stream-wasm"
version = "0.1.0"
version = "0.5.0"
edition.workspace = true
license.workspace = true
description = "WebAssembly codec for DreamStack bitstream protocol"

View file

@ -30,6 +30,7 @@ pub const FRAME_NEURAL_AUDIO: u8 = 0x41;
pub const FRAME_NEURAL_ACTUATOR: u8 = 0x42;
pub const FRAME_NEURAL_LATENT: u8 = 0x43;
pub const FRAME_KEYFRAME: u8 = 0xF0;
pub const FRAME_ACK: u8 = 0xFD;
pub const FRAME_PING: u8 = 0xFE;
pub const FRAME_END: u8 = 0xFF;
@ -273,6 +274,422 @@ pub fn sensor_input_message(seq: u16, timestamp: u32, sensor_type: u8, x: i16, y
build_message(INPUT_SENSOR, FLAG_INPUT, seq, timestamp, 0, 0, &payload)
}
// ─── ACK Builder ───
/// Build an ACK message: ack_seq(u16), rtt_ms(u16).
#[wasm_bindgen]
pub fn ack_message(seq: u16, timestamp: u32, ack_seq: u16, rtt_ms: u16) -> Vec<u8> {
let mut payload = Vec::with_capacity(4);
payload.extend_from_slice(&ack_seq.to_le_bytes());
payload.extend_from_slice(&rtt_ms.to_le_bytes());
build_message(FRAME_ACK, 0, seq, timestamp, 0, 0, &payload)
}
// ─── Stream Parsing ───
/// Parse a buffer containing one or more concatenated messages.
/// Returns a flat array of [header_offset, payload_offset, payload_len, ...] triples.
/// Each triple represents one complete message in the buffer.
#[wasm_bindgen]
pub fn parse_stream(buf: &[u8]) -> Vec<u32> {
let mut result = Vec::new();
let mut offset = 0;
while offset + HEADER_SIZE <= buf.len() {
let payload_len = u32::from_le_bytes([
buf[offset + 12],
buf[offset + 13],
buf[offset + 14],
buf[offset + 15],
]) as usize;
let total = HEADER_SIZE + payload_len;
if offset + total > buf.len() {
break;
}
result.push(offset as u32);
result.push((offset + HEADER_SIZE) as u32);
result.push(payload_len as u32);
offset += total;
}
result
}
// ─── Debug / Introspection Helpers ───
/// Get a human-readable name for a frame type byte.
/// When frame type and input type share the same byte value (e.g., 0x31),
/// the frame type name takes priority. Use with FLAG_INPUT to disambiguate.
#[wasm_bindgen]
pub fn frame_type_name(type_byte: u8) -> String {
// Frame types (prioritized) — unique values only
let name = match type_byte {
0x01 => "Pixels",
0x02 => "CompressedPixels",
0x03 => "DeltaPixels",
0x10 => "AudioPcm",
0x11 => "AudioCompressed",
0x20 => "Haptic",
0x21 => "Actuator",
0x22 => "LedMatrix",
0x30 => "SignalSync",
0x31 => "SignalDiff",
0x32 => "SchemaAnnounce",
0x33 => "SubscribeFilter",
0x40 => "NeuralFrame",
0x41 => "NeuralAudio",
0x42 => "NeuralActuator",
0x43 => "NeuralLatent",
0x50 => "Scroll",
0x60 => "Resize",
0x70 => "VoiceInput",
0x71 => "CameraInput",
0x80 => "SensorInput",
0x90 => "BciInput",
0xF0 => "Keyframe",
0xFD => "Ack",
0xFE => "Ping",
0xFF => "End",
_ => "Unknown",
};
name.into()
}
/// Get the frame type name considering the FLAG_INPUT flag.
/// When the input flag is set, resolves shared byte values to input names.
#[wasm_bindgen]
pub fn frame_type_name_with_flag(type_byte: u8, flags: u8) -> String {
if flags & FLAG_INPUT != 0 {
let name = match type_byte {
0x01 => "Pointer",
0x02 => "PointerDown",
0x03 => "PointerUp",
0x10 => "KeyDown",
0x11 => "KeyUp",
0x20 => "Touch",
0x21 => "TouchEnd",
0x30 => "GamepadAxis",
0x31 => "GamepadButton",
0x40 => "Midi",
0x50 => "Scroll",
0x60 => "Resize",
0x70 => "VoiceInput",
0x71 => "CameraInput",
0x80 => "SensorInput",
0x90 => "BciInput",
_ => "Unknown",
};
return name.into();
}
frame_type_name(type_byte)
}
/// Check if a frame type byte could represent an input event.
/// Note: some values overlap with frame types (e.g., 0x01 = Pixels or Pointer).
/// Use FLAG_INPUT to disambiguate.
#[wasm_bindgen]
pub fn is_input_frame(type_byte: u8) -> bool {
matches!(type_byte,
0x01 | 0x02 | 0x03 | // Pointer, PointerDown, PointerUp
0x10 | 0x11 | // KeyDown, KeyUp
0x20 | 0x21 | // Touch, TouchEnd
0x30 | 0x31 | // GamepadAxis, GamepadButton
0x40 | // Midi
0x50 | 0x60 | // Scroll, Resize
0x70 | 0x71 | // VoiceInput, CameraInput
0x80 | 0x90 // SensorInput, BciInput
)
}
/// Compute compression stats. Returns [ratio, savings_percent].
#[wasm_bindgen]
pub fn compression_stats(original_len: u32, compressed_len: u32) -> Vec<f64> {
if original_len == 0 {
return vec![1.0, 0.0];
}
let ratio = compressed_len as f64 / original_len as f64;
let savings = (1.0 - ratio) * 100.0;
vec![ratio, savings]
}
// ─── v0.3: TypedFrameHeader ───
/// Compressed pixel format constants
pub const COMPRESSED_FMT_PNG: u8 = 0;
pub const COMPRESSED_FMT_WEBP: u8 = 1;
pub const COMPRESSED_FMT_JPEG: u8 = 2;
/// DSREC recording magic bytes
pub const DSREC_MAGIC: [u8; 3] = [0x44, 0x53, 0x01];
/// A structured frame header with JS-friendly getter methods.
#[wasm_bindgen]
pub struct TypedFrameHeader {
frame_type: u8,
flags: u8,
seq: u16,
timestamp: u32,
width: u16,
height: u16,
payload_length: u32,
}
#[wasm_bindgen]
impl TypedFrameHeader {
pub fn frame_type(&self) -> u8 { self.frame_type }
pub fn flags(&self) -> u8 { self.flags }
pub fn seq(&self) -> u16 { self.seq }
pub fn timestamp(&self) -> u32 { self.timestamp }
pub fn width(&self) -> u16 { self.width }
pub fn height(&self) -> u16 { self.height }
pub fn payload_length(&self) -> u32 { self.payload_length }
pub fn is_input(&self) -> bool { self.flags & FLAG_INPUT != 0 }
pub fn is_keyframe(&self) -> bool { self.flags & FLAG_KEYFRAME != 0 }
pub fn is_compressed(&self) -> bool { self.flags & FLAG_COMPRESSED != 0 }
pub fn type_name(&self) -> String { frame_type_name(self.frame_type) }
pub fn type_name_resolved(&self) -> String { frame_type_name_with_flag(self.frame_type, self.flags) }
}
/// Decode a buffer into a TypedFrameHeader struct (JS-friendly object).
#[wasm_bindgen]
pub fn decode_header_typed(buf: &[u8]) -> Option<TypedFrameHeader> {
if buf.len() < HEADER_SIZE { return None; }
Some(TypedFrameHeader {
frame_type: buf[0],
flags: buf[1],
seq: u16::from_le_bytes([buf[2], buf[3]]),
timestamp: u32::from_le_bytes([buf[4], buf[5], buf[6], buf[7]]),
width: u16::from_le_bytes([buf[8], buf[9]]),
height: u16::from_le_bytes([buf[10], buf[11]]),
payload_length: u32::from_le_bytes([buf[12], buf[13], buf[14], buf[15]]),
})
}
// ─── v0.3: Recording Decoder ───
/// Decode one recording entry from a .dsrec file buffer at given offset.
/// Returns [timestamp_hi, timestamp_lo, msg_offset, msg_len] or empty if invalid.
#[wasm_bindgen]
pub fn decode_recording_entry(buf: &[u8], offset: usize) -> Vec<u32> {
if offset + 12 > buf.len() { return Vec::new(); }
let ts = u64::from_le_bytes([
buf[offset], buf[offset+1], buf[offset+2], buf[offset+3],
buf[offset+4], buf[offset+5], buf[offset+6], buf[offset+7],
]);
let msg_len = u32::from_le_bytes([
buf[offset+8], buf[offset+9], buf[offset+10], buf[offset+11],
]);
let msg_offset = offset + 12;
if msg_offset + msg_len as usize > buf.len() { return Vec::new(); }
vec![(ts >> 32) as u32, ts as u32, msg_offset as u32, msg_len]
}
// ─── v0.3: Compressed Pixel Builder ───
/// Build a CompressedPixels message with format byte.
#[wasm_bindgen]
pub fn compressed_pixel_message(
seq: u16, timestamp: u32, width: u16, height: u16,
format: u8, image_data: &[u8],
) -> Vec<u8> {
let mut payload = Vec::with_capacity(1 + image_data.len());
payload.push(format);
payload.extend_from_slice(image_data);
build_message(FRAME_COMPRESSED, FLAG_COMPRESSED, seq, timestamp, width, height, &payload)
}
// ─── v0.4: Session Recorder ───
/// In-memory recording session. Accumulates messages, exports as .dsrec bytes.
#[wasm_bindgen]
pub struct SessionRecorder {
entries: Vec<(u64, Vec<u8>)>, // (timestamp_ms, message)
}
#[wasm_bindgen]
impl SessionRecorder {
#[wasm_bindgen(constructor)]
pub fn new() -> Self {
Self { entries: Vec::new() }
}
/// Record a message with millisecond timestamp.
pub fn record(&mut self, timestamp_ms: u64, msg: &[u8]) {
self.entries.push((timestamp_ms, msg.to_vec()));
}
/// Export as .dsrec format bytes.
pub fn export(&self) -> Vec<u8> {
let mut buf = Vec::new();
buf.extend_from_slice(&DSREC_MAGIC);
for (ts, msg) in &self.entries {
// Convert ms to us for .dsrec format
let ts_us = ts * 1000;
buf.extend_from_slice(&ts_us.to_le_bytes());
buf.extend_from_slice(&(msg.len() as u32).to_le_bytes());
buf.extend_from_slice(msg);
}
buf
}
pub fn entry_count(&self) -> u32 {
self.entries.len() as u32
}
pub fn duration_ms(&self) -> u64 {
if self.entries.is_empty() { return 0; }
let first = self.entries.first().unwrap().0;
let last = self.entries.last().unwrap().0;
last - first
}
}
/// In-memory .dsrec player. Load buffer, iterate entries.
#[wasm_bindgen]
pub struct SessionPlayer {
entries: Vec<(u64, usize, usize)>, // (timestamp_us, msg_offset, msg_len)
buf: Vec<u8>,
cursor: usize,
}
#[wasm_bindgen]
impl SessionPlayer {
/// Load a .dsrec buffer. Returns None if magic is invalid.
pub fn load(buf: &[u8]) -> Option<SessionPlayer> {
if buf.len() < 3 || buf[0..3] != DSREC_MAGIC {
return None;
}
let mut entries = Vec::new();
let mut offset = 3; // skip magic
while offset + 12 <= buf.len() {
let ts = u64::from_le_bytes([
buf[offset], buf[offset+1], buf[offset+2], buf[offset+3],
buf[offset+4], buf[offset+5], buf[offset+6], buf[offset+7],
]);
let msg_len = u32::from_le_bytes([
buf[offset+8], buf[offset+9], buf[offset+10], buf[offset+11],
]) as usize;
let msg_offset = offset + 12;
if msg_offset + msg_len > buf.len() { break; }
entries.push((ts, msg_offset, msg_len));
offset = msg_offset + msg_len;
}
Some(SessionPlayer { entries, buf: buf.to_vec(), cursor: 0 })
}
/// Get next entry: [ts_hi, ts_lo, msg_offset, msg_len]. Empty if done.
pub fn next_entry(&mut self) -> Vec<u32> {
if self.cursor >= self.entries.len() { return Vec::new(); }
let (ts, offset, len) = self.entries[self.cursor];
self.cursor += 1;
vec![(ts >> 32) as u32, ts as u32, offset as u32, len as u32]
}
/// Seek to first entry at or after timestamp (microseconds).
pub fn seek_to_time(&mut self, timestamp_us: u64) {
self.cursor = self.entries.iter()
.position(|(ts, _, _)| *ts >= timestamp_us)
.unwrap_or(self.entries.len());
}
pub fn total_entries(&self) -> u32 {
self.entries.len() as u32
}
pub fn duration_ms(&self) -> u64 {
if self.entries.is_empty() { return 0; }
let first = self.entries.first().unwrap().0;
let last = self.entries.last().unwrap().0;
(last - first) / 1000 // us to ms
}
/// Get raw message bytes at an offset (for JS to decode).
pub fn get_message(&self, offset: usize, len: usize) -> Vec<u8> {
if offset + len > self.buf.len() { return Vec::new(); }
self.buf[offset..offset+len].to_vec()
}
}
// ─── v0.4: Quality Helpers ───
/// Determine quality tier from RTT. 0=Full (<50ms), 1=Reduced (50-150ms), 2=Minimal (>150ms).
#[wasm_bindgen]
pub fn adaptive_quality_tier(rtt_ms: u16) -> u8 {
if rtt_ms < 50 { 0 }
else if rtt_ms < 150 { 1 }
else { 2 }
}
/// Calculate bandwidth in kilobits per second.
#[wasm_bindgen]
pub fn bandwidth_kbps(bytes: u32, elapsed_ms: u32) -> f64 {
if elapsed_ms == 0 { return 0.0; }
(bytes as f64 * 8.0) / (elapsed_ms as f64 / 1000.0) / 1000.0
}
// ─── v0.5: Auth Constants ───
pub const FRAME_AUTH: u8 = 0x0F;
/// Build an auth challenge message (server → client).
#[wasm_bindgen]
pub fn auth_challenge_message(seq: u16, nonce: &[u8]) -> Vec<u8> {
let mut payload = Vec::with_capacity(9);
payload.push(0u8); // phase 0 = challenge
if nonce.len() >= 8 {
payload.extend_from_slice(&nonce[..8]);
} else {
payload.extend_from_slice(nonce);
payload.resize(9, 0);
}
build_message(FRAME_AUTH, 0, seq, 0, 0, 0, &payload)
}
/// Build an auth response message (client → server).
#[wasm_bindgen]
pub fn auth_response_message(seq: u16, nonce: &[u8], token: &[u8]) -> Vec<u8> {
let mut payload = Vec::with_capacity(9 + token.len());
payload.push(1u8); // phase 1 = response
if nonce.len() >= 8 {
payload.extend_from_slice(&nonce[..8]);
} else {
payload.extend_from_slice(nonce);
payload.resize(9, 0);
}
payload.extend_from_slice(token);
build_message(FRAME_AUTH, 0, seq, 0, 0, 0, &payload)
}
// ─── v0.5: XOR Scrambling ───
/// XOR-scramble payload bytes with a repeating key.
#[wasm_bindgen]
pub fn scramble(payload: &[u8], key: &[u8]) -> Vec<u8> {
if key.is_empty() { return payload.to_vec(); }
payload.iter().enumerate().map(|(i, &b)| b ^ key[i % key.len()]).collect()
}
/// Descramble (same as scramble — XOR is its own inverse).
#[wasm_bindgen]
pub fn descramble(payload: &[u8], key: &[u8]) -> Vec<u8> {
scramble(payload, key)
}
// ─── v0.5: Recording Metadata ───
/// Decode recording metadata (24 bytes after .dsrec magic).
/// Returns [version, created_at_hi, created_at_lo, duration_ms, frame_count, width, height].
#[wasm_bindgen]
pub fn decode_recording_metadata(buf: &[u8]) -> Vec<u32> {
if buf.len() < 24 { return Vec::new(); }
let version = u16::from_le_bytes([buf[0], buf[1]]) as u32;
let created_at = u64::from_le_bytes([buf[2], buf[3], buf[4], buf[5], buf[6], buf[7], buf[8], buf[9]]);
let duration_ms = u32::from_le_bytes([buf[10], buf[11], buf[12], buf[13]]);
let frame_count = u32::from_le_bytes([buf[14], buf[15], buf[16], buf[17]]);
let width = u16::from_le_bytes([buf[18], buf[19]]) as u32;
let height = u16::from_le_bytes([buf[20], buf[21]]) as u32;
vec![version, (created_at >> 32) as u32, created_at as u32, duration_ms, frame_count, width, height]
}
// ─── Tests ───
#[cfg(test)]
@ -383,4 +800,269 @@ mod tests {
assert_eq!(header[1], FLAG_INPUT as u32);
assert_eq!(msg[HEADER_SIZE], 0);
}
// ─── v0.2 Tests ───
#[test]
fn test_ack_message() {
let msg = ack_message(5, 1000, 42, 15);
let header = decode_header(&msg);
assert_eq!(header[0], FRAME_ACK as u32);
assert_eq!(header[6], 4); // payload = 2 + 2 bytes
// Check payload
let ack_seq = u16::from_le_bytes([msg[HEADER_SIZE], msg[HEADER_SIZE + 1]]);
let rtt = u16::from_le_bytes([msg[HEADER_SIZE + 2], msg[HEADER_SIZE + 3]]);
assert_eq!(ack_seq, 42);
assert_eq!(rtt, 15);
}
#[test]
fn test_parse_stream_single() {
let msg = signal_diff_message(1, 100, b"{\"x\":1}");
let offsets = parse_stream(&msg);
assert_eq!(offsets.len(), 3);
assert_eq!(offsets[0], 0); // header_offset
assert_eq!(offsets[1], HEADER_SIZE as u32); // payload_offset
assert_eq!(offsets[2], 7); // payload_len
}
#[test]
fn test_parse_stream_multi() {
let msg1 = signal_diff_message(1, 100, b"{\"a\":1}");
let msg2 = signal_sync_message(2, 200, b"{\"b\":2}");
let mut combined = Vec::new();
combined.extend_from_slice(&msg1);
combined.extend_from_slice(&msg2);
let offsets = parse_stream(&combined);
assert_eq!(offsets.len(), 6); // 2 messages × 3 values each
assert_eq!(offsets[0], 0);
assert_eq!(offsets[3], msg1.len() as u32);
}
#[test]
fn test_parse_stream_partial() {
let msg = signal_diff_message(1, 100, b"{\"x\":1}");
// Truncate the message — should return 0 messages
let partial = &msg[..msg.len() - 2];
let offsets = parse_stream(partial);
assert_eq!(offsets.len(), 0);
}
#[test]
fn test_frame_type_name() {
assert_eq!(frame_type_name(FRAME_PIXELS), "Pixels");
assert_eq!(frame_type_name(FRAME_ACK), "Ack");
assert_eq!(frame_type_name(FRAME_PING), "Ping");
assert_eq!(frame_type_name(INPUT_SCROLL), "Scroll");
assert_eq!(frame_type_name(0x99), "Unknown");
}
#[test]
fn test_is_input_frame() {
assert!(is_input_frame(INPUT_POINTER));
assert!(is_input_frame(INPUT_SCROLL));
assert!(is_input_frame(INPUT_BCI));
// FRAME_PIXELS=0x01=INPUT_POINTER — shared value, returns true
assert!(is_input_frame(FRAME_PIXELS));
// Unique frame types that are NOT input types
assert!(!is_input_frame(FRAME_PING));
assert!(!is_input_frame(FRAME_ACK));
assert!(!is_input_frame(FRAME_KEYFRAME));
}
#[test]
fn test_compression_stats() {
let stats = compression_stats(1000, 300);
assert!((stats[0] - 0.3).abs() < 0.001);
assert!((stats[1] - 70.0).abs() < 0.1);
}
// ─── v0.3 Tests ───
#[test]
fn test_typed_frame_header() {
let msg = build_message(FRAME_SIGNAL_SYNC, FLAG_KEYFRAME, 7, 5000, 320, 240, b"{}");
let typed = decode_header_typed(&msg).unwrap();
assert_eq!(typed.frame_type(), FRAME_SIGNAL_SYNC);
assert_eq!(typed.flags(), FLAG_KEYFRAME);
assert_eq!(typed.seq(), 7);
assert_eq!(typed.timestamp(), 5000);
assert_eq!(typed.width(), 320);
assert_eq!(typed.height(), 240);
assert_eq!(typed.payload_length(), 2);
assert!(!typed.is_input());
assert!(typed.is_keyframe());
assert_eq!(typed.type_name(), "SignalSync");
}
#[test]
fn test_typed_frame_header_input() {
// Build a pointer input message using input_message
let mut payload = Vec::new();
payload.extend_from_slice(&50u16.to_le_bytes()); // x
payload.extend_from_slice(&60u16.to_le_bytes()); // y
let msg = input_message(INPUT_POINTER, 1, 100, &payload);
let typed = decode_header_typed(&msg).unwrap();
assert!(typed.is_input());
assert_eq!(typed.type_name_resolved(), "Pointer");
}
#[test]
fn test_decode_recording_entry() {
let mut entry = Vec::new();
let ts: u64 = 123456;
entry.extend_from_slice(&ts.to_le_bytes());
let msg = signal_diff_message(1, 100, b"hi");
entry.extend_from_slice(&(msg.len() as u32).to_le_bytes());
entry.extend_from_slice(&msg);
let result = decode_recording_entry(&entry, 0);
assert_eq!(result.len(), 4);
assert_eq!(result[0], 0); // ts high
assert_eq!(result[1], 123456); // ts low
assert_eq!(result[2], 12); // msg_offset
assert_eq!(result[3], msg.len() as u32);
}
#[test]
fn test_decode_recording_entry_truncated() {
let result = decode_recording_entry(&[0u8; 5], 0);
assert!(result.is_empty());
}
#[test]
fn test_compressed_pixel_message() {
let fake_webp = vec![0x52, 0x49, 0x46, 0x46];
let msg = compressed_pixel_message(1, 100, 640, 480, COMPRESSED_FMT_WEBP, &fake_webp);
let header = decode_header(&msg);
assert_eq!(header[0], FRAME_COMPRESSED as u32);
assert_eq!(header[1], FLAG_COMPRESSED as u32);
assert_eq!(header[4], 640);
assert_eq!(header[5], 480);
assert_eq!(msg[HEADER_SIZE], COMPRESSED_FMT_WEBP);
assert_eq!(&msg[HEADER_SIZE + 1..HEADER_SIZE + 5], &fake_webp);
}
// ─── v0.4 Tests ───
#[test]
fn test_session_recorder_export() {
let mut rec = SessionRecorder::new();
let msg1 = signal_diff_message(1, 100, b"{}");
let msg2 = signal_sync_message(2, 200, b"{\"a\":1}");
rec.record(0, &msg1);
rec.record(100, &msg2);
assert_eq!(rec.entry_count(), 2);
assert_eq!(rec.duration_ms(), 100);
let exported = rec.export();
assert_eq!(&exported[0..3], &DSREC_MAGIC);
}
#[test]
fn test_session_player_load() {
let mut rec = SessionRecorder::new();
let msg1 = signal_diff_message(1, 100, b"hi");
rec.record(0, &msg1);
rec.record(500, &msg1);
let exported = rec.export();
let mut player = SessionPlayer::load(&exported).unwrap();
assert_eq!(player.total_entries(), 2);
let e1 = player.next_entry();
assert_eq!(e1.len(), 4);
assert_eq!(e1[3], msg1.len() as u32);
let e2 = player.next_entry();
assert_eq!(e2.len(), 4);
let e3 = player.next_entry();
assert!(e3.is_empty()); // past end
}
#[test]
fn test_session_player_seek() {
let mut rec = SessionRecorder::new();
let msg = signal_diff_message(1, 100, b"x");
rec.record(0, &msg);
rec.record(100, &msg);
rec.record(200, &msg);
let exported = rec.export();
let mut player = SessionPlayer::load(&exported).unwrap();
// Seek to 100ms = 100_000 us
player.seek_to_time(100_000);
let e = player.next_entry();
assert_eq!(e.len(), 4);
// Should be the second entry (ts=100ms=100_000us)
}
#[test]
fn test_adaptive_quality_tier() {
assert_eq!(adaptive_quality_tier(10), 0); // Full
assert_eq!(adaptive_quality_tier(49), 0); // Full
assert_eq!(adaptive_quality_tier(50), 1); // Reduced
assert_eq!(adaptive_quality_tier(149), 1); // Reduced
assert_eq!(adaptive_quality_tier(150), 2); // Minimal
assert_eq!(adaptive_quality_tier(500), 2); // Minimal
}
#[test]
fn test_bandwidth_kbps() {
let kbps = bandwidth_kbps(1_000_000, 1000);
assert!((kbps - 8000.0).abs() < 1.0, "Expected ~8000 kbps: {}", kbps);
assert_eq!(bandwidth_kbps(0, 0), 0.0);
}
// ─── v0.5 Tests ───
#[test]
fn test_auth_challenge_message() {
let nonce = [1u8, 2, 3, 4, 5, 6, 7, 8];
let msg = auth_challenge_message(1, &nonce);
let header = decode_header(&msg);
assert_eq!(header[0], FRAME_AUTH as u32);
assert_eq!(msg[HEADER_SIZE], 0); // phase = challenge
assert_eq!(&msg[HEADER_SIZE + 1..HEADER_SIZE + 9], &nonce);
}
#[test]
fn test_auth_response_message() {
let nonce = [1u8; 8];
let token = b"my-secret";
let msg = auth_response_message(2, &nonce, token);
assert_eq!(msg[HEADER_SIZE], 1); // phase = response
assert_eq!(&msg[HEADER_SIZE + 9..HEADER_SIZE + 9 + token.len()], token);
}
#[test]
fn test_xor_scramble_roundtrip() {
let data = b"hello world";
let key = b"key123";
let scrambled = scramble(data, key);
assert_ne!(scrambled, data.to_vec());
let restored = descramble(&scrambled, key);
assert_eq!(restored, data.to_vec());
}
#[test]
fn test_decode_recording_metadata() {
// Build a 24-byte metadata buffer
let mut buf = [0u8; 24];
buf[0..2].copy_from_slice(&1u16.to_le_bytes()); // version
buf[2..10].copy_from_slice(&1710000000u64.to_le_bytes()); // created_at
buf[10..14].copy_from_slice(&5000u32.to_le_bytes()); // duration_ms
buf[14..18].copy_from_slice(&150u32.to_le_bytes()); // frame_count
buf[18..20].copy_from_slice(&1920u16.to_le_bytes()); // width
buf[20..22].copy_from_slice(&1080u16.to_le_bytes()); // height
let result = decode_recording_metadata(&buf);
assert_eq!(result.len(), 7);
assert_eq!(result[0], 1); // version
assert_eq!(result[3], 5000); // duration_ms
assert_eq!(result[4], 150); // frame_count
assert_eq!(result[5], 1920); // width
assert_eq!(result[6], 1080); // height
}
}

View file

@ -1,11 +1,20 @@
# Changelog
All notable changes to this package will be documented in this file.
## [Unreleased]
## [0.1.0] - 2026-02-26
## [0.5.0] - 2026-03-09
### 🚀 Features
- Initial release — Binary streaming protocol, WebSocket relay, RLE/XOR codecs, peer mode, channel routing
- **`FrameType::Auth`** (0x0F) — new frame type for authentication handshake
- **`AuthPayload`** struct — phase, nonce, token with encode/decode
- **`scramble_payload`/`descramble_payload`** — XOR obfuscation with repeating key
- **`RecordingMetadata`** struct — 24-byte header (version, created_at, duration_ms, frame_count, width, height)
- **`auth_challenge_frame`/`auth_response_frame`** — auth handshake frame builders
### 🧪 Tests
- 4 new tests: auth payload roundtrip, XOR scramble, recording metadata, auth frame builders
## [0.4.0] - 2026-03-09 — AudioFormat, AudioHeader, AdaptiveQuality, BandwidthEstimator
## [0.3.0] - 2026-03-09 — CompressedPixelFormat, RecordingWriter/Reader, compressed_pixel_frame
## [0.2.0] - 2026-03-09 — StreamParser, Ack/AckEvent, compression_ratio
## [0.1.0] - 2026-02-26 — Initial release

View file

@ -1,6 +1,6 @@
[package]
name = "ds-stream"
version = "0.1.0"
version = "0.5.0"
edition.workspace = true
license.workspace = true
description = "Universal bitstream streaming — any input to any output"

View file

@ -4,6 +4,67 @@
use crate::protocol::*;
// ─── Stream Parser ───
/// Accumulates bytes from a transport and yields complete messages.
///
/// Handles partial reads: feed arbitrary chunks in, get complete
/// `(FrameHeader, Vec<u8>)` tuples out.
///
/// ```rust
/// let mut parser = StreamParser::new();
/// parser.feed(&data_from_websocket);
/// while let Some((header, payload)) = parser.next_message() {
/// // process message
/// }
/// ```
pub struct StreamParser {
buf: Vec<u8>,
}
impl StreamParser {
pub fn new() -> Self {
Self { buf: Vec::with_capacity(4096) }
}
/// Feed raw bytes into the parser.
pub fn feed(&mut self, data: &[u8]) {
self.buf.extend_from_slice(data);
}
/// Try to extract the next complete message.
/// Returns `Some((header, payload))` if a complete message is available.
pub fn next_message(&mut self) -> Option<(FrameHeader, Vec<u8>)> {
if self.buf.len() < HEADER_SIZE {
return None;
}
let header = FrameHeader::decode(&self.buf)?;
let total = HEADER_SIZE + header.length as usize;
if self.buf.len() < total {
return None;
}
let payload = self.buf[HEADER_SIZE..total].to_vec();
self.buf.drain(..total);
Some((header, payload))
}
/// Number of bytes currently buffered.
pub fn buffered(&self) -> usize {
self.buf.len()
}
/// Clear the internal buffer.
pub fn clear(&mut self) {
self.buf.clear();
}
}
impl Default for StreamParser {
fn default() -> Self {
Self::new()
}
}
// ─── Frame Encoder ───
/// Encode a complete frame message: header + payload.
@ -308,6 +369,333 @@ pub fn bci_input(seq: u16, timestamp: u32, event: &BciInputEvent, samples: &[u8]
encode_input(InputType::BciInput, seq, timestamp, &payload)
}
// ─── ACK Frame Builder ───
/// Build an ACK message (receiver → source for RTT measurement).
pub fn ack_frame(seq: u16, timestamp: u32, event: &AckEvent) -> Vec<u8> {
encode_frame(FrameType::Ack, seq, timestamp, 0, 0, 0, &event.encode())
}
// ─── Compression Helpers ───
/// Compute compression ratio and savings percentage.
/// Returns `(ratio, savings_percent)` where ratio = compressed/original
/// and savings_percent = (1 - ratio) * 100.
pub fn compression_ratio(original_len: usize, compressed_len: usize) -> (f32, f32) {
if original_len == 0 {
return (1.0, 0.0);
}
let ratio = compressed_len as f32 / original_len as f32;
let savings = (1.0 - ratio) * 100.0;
(ratio, savings)
}
// ─── v0.3: Compressed Pixel Frame ───
/// Build a CompressedPixels frame with format byte prepended.
/// `format`: 0=PNG, 1=WebP, 2=JPEG (see `CompressedPixelFormat`).
pub fn compressed_pixel_frame(
seq: u16, timestamp: u32, width: u16, height: u16,
format: CompressedPixelFormat, image_data: &[u8],
) -> Vec<u8> {
let mut payload = Vec::with_capacity(1 + image_data.len());
payload.push(format as u8);
payload.extend_from_slice(image_data);
encode_frame(FrameType::CompressedPixels, seq, timestamp, width, height, FLAG_COMPRESSED, &payload)
}
// ─── v0.3: Recording File I/O ───
/// Magic bytes for `.dsrec` recording files: "DS\x01"
pub const RECORDING_MAGIC: [u8; 3] = [0x44, 0x53, 0x01]; // 'D', 'S', version=1
/// Writes ds-stream messages to a `.dsrec` file.
///
/// File format:
/// ```text
/// [magic: 3 bytes "DS\x01"]
/// [entry]*
///
/// Each entry:
/// [timestamp_us: u64 LE] — microsecond timestamp
/// [msg_len: u32 LE] — message length in bytes
/// [msg: msg_len bytes] — raw ds-stream message (header + payload)
/// ```
pub struct RecordingWriter<W: std::io::Write> {
writer: W,
entry_count: u64,
}
impl<W: std::io::Write> RecordingWriter<W> {
/// Create a new recording writer. Writes the magic header immediately.
pub fn new(mut writer: W) -> std::io::Result<Self> {
writer.write_all(&RECORDING_MAGIC)?;
Ok(Self { writer, entry_count: 0 })
}
/// Write a single message entry with timestamp.
pub fn write_entry(&mut self, timestamp_us: u64, message: &[u8]) -> std::io::Result<()> {
self.writer.write_all(&timestamp_us.to_le_bytes())?;
self.writer.write_all(&(message.len() as u32).to_le_bytes())?;
self.writer.write_all(message)?;
self.entry_count += 1;
Ok(())
}
/// Flush the underlying writer.
pub fn flush(&mut self) -> std::io::Result<()> {
self.writer.flush()
}
/// Number of entries written.
pub fn entry_count(&self) -> u64 {
self.entry_count
}
}
/// Reads ds-stream messages from a `.dsrec` file.
pub struct RecordingReader<R: std::io::Read> {
reader: R,
entries_read: u64,
}
impl<R: std::io::Read> RecordingReader<R> {
/// Create a new recording reader. Validates the magic header.
pub fn new(mut reader: R) -> std::io::Result<Self> {
let mut magic = [0u8; 3];
reader.read_exact(&mut magic)?;
if magic != RECORDING_MAGIC {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Invalid .dsrec magic bytes",
));
}
Ok(Self { reader, entries_read: 0 })
}
/// Read the next entry. Returns `None` at end of file.
/// Returns `Some((timestamp_us, message_bytes))`.
pub fn next_entry(&mut self) -> std::io::Result<Option<(u64, Vec<u8>)>> {
let mut ts_buf = [0u8; 8];
match self.reader.read_exact(&mut ts_buf) {
Ok(_) => {}
Err(e) if e.kind() == std::io::ErrorKind::UnexpectedEof => return Ok(None),
Err(e) => return Err(e),
}
let timestamp_us = u64::from_le_bytes(ts_buf);
let mut len_buf = [0u8; 4];
self.reader.read_exact(&mut len_buf)?;
let msg_len = u32::from_le_bytes(len_buf) as usize;
let mut message = vec![0u8; msg_len];
self.reader.read_exact(&mut message)?;
self.entries_read += 1;
Ok(Some((timestamp_us, message)))
}
/// Number of entries read so far.
pub fn entries_read(&self) -> u64 {
self.entries_read
}
/// Read all remaining entries into a Vec.
pub fn read_all(&mut self) -> std::io::Result<Vec<(u64, Vec<u8>)>> {
let mut entries = Vec::new();
while let Some(entry) = self.next_entry()? {
entries.push(entry);
}
Ok(entries)
}
}
// ─── v0.4: Adaptive Quality ───
/// Quality tier based on measured RTT.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum QualityTier {
/// RTT < 50ms: full quality, all frames
Full = 0,
/// RTT 50-150ms: reduced quality, skip some frames
Reduced = 1,
/// RTT > 150ms: minimal — signal diffs only, no pixels
Minimal = 2,
}
/// Adaptive quality controller that tracks RTT and recommends quality settings.
pub struct AdaptiveQuality {
rtt_history: Vec<u16>,
max_history: usize,
current_tier: QualityTier,
}
impl AdaptiveQuality {
pub fn new() -> Self {
Self {
rtt_history: Vec::new(),
max_history: 30,
current_tier: QualityTier::Full,
}
}
/// Record an ACK RTT measurement and update the quality tier.
pub fn record_ack(&mut self, rtt_ms: u16) {
self.rtt_history.push(rtt_ms);
if self.rtt_history.len() > self.max_history {
self.rtt_history.remove(0);
}
self.current_tier = self.compute_tier();
}
/// Current quality tier.
pub fn current_tier(&self) -> QualityTier {
self.current_tier
}
/// Suggested JPEG/WebP quality (0-100).
pub fn suggested_quality(&self) -> u8 {
match self.current_tier {
QualityTier::Full => 80,
QualityTier::Reduced => 50,
QualityTier::Minimal => 0,
}
}
/// Suggested number of frames to skip between sends.
pub fn suggested_skip_frames(&self) -> u32 {
match self.current_tier {
QualityTier::Full => 0,
QualityTier::Reduced => 2,
QualityTier::Minimal => u32::MAX,
}
}
/// Average RTT over the history window (or 0 if empty).
pub fn avg_rtt(&self) -> u16 {
if self.rtt_history.is_empty() { return 0; }
let sum: u32 = self.rtt_history.iter().map(|&r| r as u32).sum();
(sum / self.rtt_history.len() as u32) as u16
}
fn compute_tier(&self) -> QualityTier {
let avg = self.avg_rtt();
if avg < 50 { QualityTier::Full }
else if avg < 150 { QualityTier::Reduced }
else { QualityTier::Minimal }
}
}
/// Sliding window bandwidth estimator.
pub struct BandwidthEstimator {
/// (timestamp_ms, bytes) entries
samples: Vec<(u64, usize)>,
window_ms: u64,
}
impl BandwidthEstimator {
pub fn new(window_seconds: u32) -> Self {
Self {
samples: Vec::new(),
window_ms: window_seconds as u64 * 1000,
}
}
/// Record bytes sent/received at a given timestamp.
pub fn record(&mut self, timestamp_ms: u64, bytes: usize) {
self.samples.push((timestamp_ms, bytes));
self.prune(timestamp_ms);
}
/// Estimated throughput in bytes per second.
pub fn estimate_bps(&self) -> f64 {
if self.samples.len() < 2 { return 0.0; }
let first = self.samples.first().unwrap().0;
let last = self.samples.last().unwrap().0;
let elapsed = (last - first) as f64 / 1000.0;
if elapsed <= 0.0 { return 0.0; }
let total: usize = self.samples.iter().map(|s| s.1).sum();
total as f64 / elapsed
}
/// Check if bandwidth is below a threshold.
pub fn is_congested(&self, threshold_bps: f64) -> bool {
self.estimate_bps() > 0.0 && self.estimate_bps() < threshold_bps
}
fn prune(&mut self, now: u64) {
let cutoff = now.saturating_sub(self.window_ms);
self.samples.retain(|s| s.0 >= cutoff);
}
}
// ─── v0.5: Recording Metadata ───
/// Metadata header for .dsrec files. Written after magic bytes.
/// Total size: 24 bytes.
#[derive(Debug, Clone, PartialEq)]
pub struct RecordingMetadata {
pub version: u16,
pub created_at: u64, // unix timestamp seconds
pub duration_ms: u32,
pub frame_count: u32,
pub width: u16,
pub height: u16,
}
impl RecordingMetadata {
pub const SIZE: usize = 24;
pub fn encode(&self) -> [u8; Self::SIZE] {
let mut buf = [0u8; Self::SIZE];
buf[0..2].copy_from_slice(&self.version.to_le_bytes());
buf[2..10].copy_from_slice(&self.created_at.to_le_bytes());
buf[10..14].copy_from_slice(&self.duration_ms.to_le_bytes());
buf[14..18].copy_from_slice(&self.frame_count.to_le_bytes());
buf[18..20].copy_from_slice(&self.width.to_le_bytes());
buf[20..22].copy_from_slice(&self.height.to_le_bytes());
// bytes 22-23 reserved
buf
}
pub fn decode(buf: &[u8]) -> Option<Self> {
if buf.len() < Self::SIZE { return None; }
Some(Self {
version: u16::from_le_bytes([buf[0], buf[1]]),
created_at: u64::from_le_bytes([buf[2], buf[3], buf[4], buf[5], buf[6], buf[7], buf[8], buf[9]]),
duration_ms: u32::from_le_bytes([buf[10], buf[11], buf[12], buf[13]]),
frame_count: u32::from_le_bytes([buf[14], buf[15], buf[16], buf[17]]),
width: u16::from_le_bytes([buf[18], buf[19]]),
height: u16::from_le_bytes([buf[20], buf[21]]),
})
}
}
// ─── v0.5: Auth Frame Builders ───
/// Build an auth challenge frame (server → client).
pub fn auth_challenge_frame(seq: u16, nonce: &[u8; 8]) -> Vec<u8> {
let payload = AuthPayload {
phase: 0,
nonce: *nonce,
token: Vec::new(),
};
let encoded = payload.encode();
encode_frame(FrameType::Auth, seq, 0, 0, 0, 0, &encoded)
}
/// Build an auth response frame (client → server).
pub fn auth_response_frame(seq: u16, nonce: &[u8; 8], token: &[u8]) -> Vec<u8> {
let payload = AuthPayload {
phase: 1,
nonce: *nonce,
token: token.to_vec(),
};
let encoded = payload.encode();
encode_frame(FrameType::Auth, seq, 0, 0, 0, 0, &encoded)
}
// ─── Tests ───
#[cfg(test)]
@ -596,4 +984,262 @@ mod tests {
assert_eq!(decoded.header.frame_type, InputType::BciInput as u8);
assert_eq!(decoded.payload.len(), BciInputEvent::SIZE + 32);
}
// ─── v0.2 Tests ───
#[test]
fn stream_parser_single_message() {
let mut parser = StreamParser::new();
let msg = signal_diff_frame(1, 100, b"{\"x\":1}");
parser.feed(&msg);
let (header, payload) = parser.next_message().unwrap();
assert_eq!(header.frame_type, FrameType::SignalDiff as u8);
assert_eq!(payload, b"{\"x\":1}");
assert!(parser.next_message().is_none());
}
#[test]
fn stream_parser_multi_message() {
let mut parser = StreamParser::new();
let msg1 = ping(1, 100);
let msg2 = signal_sync_frame(2, 200, b"{\"a\":1}");
let msg3 = stream_end(3, 300);
let mut combined = Vec::new();
combined.extend_from_slice(&msg1);
combined.extend_from_slice(&msg2);
combined.extend_from_slice(&msg3);
parser.feed(&combined);
let (h1, _) = parser.next_message().unwrap();
assert_eq!(h1.frame_type, FrameType::Ping as u8);
let (h2, p2) = parser.next_message().unwrap();
assert_eq!(h2.frame_type, FrameType::SignalSync as u8);
assert_eq!(p2, b"{\"a\":1}");
let (h3, _) = parser.next_message().unwrap();
assert_eq!(h3.frame_type, FrameType::End as u8);
assert!(parser.next_message().is_none());
}
#[test]
fn stream_parser_partial_feed() {
let mut parser = StreamParser::new();
let msg = signal_diff_frame(1, 100, b"{\"x\":42}");
// Feed in two halves
let mid = msg.len() / 2;
parser.feed(&msg[..mid]);
assert!(parser.next_message().is_none());
parser.feed(&msg[mid..]);
let (header, payload) = parser.next_message().unwrap();
assert_eq!(header.seq, 1);
assert_eq!(payload, b"{\"x\":42}");
}
#[test]
fn ack_frame_roundtrip() {
let evt = AckEvent { ack_seq: 99, rtt_ms: 12 };
let msg = ack_frame(5, 5000, &evt);
let decoded = decode_message(&msg).unwrap();
assert_eq!(decoded.header.frame_type, FrameType::Ack as u8);
let ack = AckEvent::decode(decoded.payload).unwrap();
assert_eq!(ack.ack_seq, 99);
assert_eq!(ack.rtt_ms, 12);
}
#[test]
fn compression_ratio_calc() {
let (ratio, savings) = compression_ratio(1000, 300);
assert!((ratio - 0.3).abs() < 0.001);
assert!((savings - 70.0).abs() < 0.1);
let (ratio_zero, _) = compression_ratio(0, 0);
assert!((ratio_zero - 1.0).abs() < 0.001);
}
// ─── v0.3 Tests ───
#[test]
fn compressed_pixel_frame_roundtrip() {
let fake_jpeg = vec![0xFF, 0xD8, 0xFF, 0xE0]; // JPEG header
let msg = compressed_pixel_frame(1, 100, 320, 240, CompressedPixelFormat::Jpeg, &fake_jpeg);
let decoded = decode_message(&msg).unwrap();
assert_eq!(decoded.header.frame_type, FrameType::CompressedPixels as u8);
assert_eq!(decoded.header.width, 320);
assert_eq!(decoded.header.height, 240);
assert_eq!(decoded.header.flags & FLAG_COMPRESSED, FLAG_COMPRESSED);
// First payload byte is format
assert_eq!(decoded.payload[0], CompressedPixelFormat::Jpeg as u8);
assert_eq!(&decoded.payload[1..], &fake_jpeg);
}
#[test]
fn recording_writer_reader_roundtrip() {
let mut buf = Vec::new();
// Write
{
let mut writer = RecordingWriter::new(&mut buf).unwrap();
let msg1 = signal_diff_frame(1, 100, b"{\"x\":1}");
let msg2 = ping(2, 200);
writer.write_entry(1000, &msg1).unwrap();
writer.write_entry(2000, &msg2).unwrap();
assert_eq!(writer.entry_count(), 2);
writer.flush().unwrap();
}
// Read
{
let cursor = std::io::Cursor::new(&buf);
let mut reader = RecordingReader::new(cursor).unwrap();
let entries = reader.read_all().unwrap();
assert_eq!(entries.len(), 2);
assert_eq!(entries[0].0, 1000); // timestamp
assert_eq!(entries[1].0, 2000);
// Verify the messages are intact
let decoded = decode_message(&entries[0].1).unwrap();
assert_eq!(decoded.header.frame_type, FrameType::SignalDiff as u8);
assert_eq!(reader.entries_read(), 2);
}
}
#[test]
fn recording_reader_invalid_magic() {
let buf = vec![0x00, 0x00, 0x00]; // wrong magic
let result = RecordingReader::new(std::io::Cursor::new(&buf));
assert!(result.is_err());
}
#[test]
fn compressed_pixel_format_roundtrip() {
assert_eq!(CompressedPixelFormat::from_u8(0), Some(CompressedPixelFormat::Png));
assert_eq!(CompressedPixelFormat::from_u8(1), Some(CompressedPixelFormat::WebP));
assert_eq!(CompressedPixelFormat::from_u8(2), Some(CompressedPixelFormat::Jpeg));
assert_eq!(CompressedPixelFormat::from_u8(3), None);
assert_eq!(CompressedPixelFormat::Jpeg.name(), "JPEG");
assert_eq!(CompressedPixelFormat::WebP.mime_type(), "image/webp");
}
// ─── v0.4 Tests ───
#[test]
fn adaptive_quality_tiers() {
let mut aq = AdaptiveQuality::new();
assert_eq!(aq.current_tier(), QualityTier::Full);
assert_eq!(aq.suggested_quality(), 80);
assert_eq!(aq.suggested_skip_frames(), 0);
// Push RTT into reduced range
for _ in 0..10 { aq.record_ack(80); }
assert_eq!(aq.current_tier(), QualityTier::Reduced);
assert_eq!(aq.suggested_quality(), 50);
assert_eq!(aq.suggested_skip_frames(), 2);
// Push into minimal
for _ in 0..30 { aq.record_ack(200); }
assert_eq!(aq.current_tier(), QualityTier::Minimal);
assert_eq!(aq.suggested_quality(), 0);
// Recover
for _ in 0..30 { aq.record_ack(10); }
assert_eq!(aq.current_tier(), QualityTier::Full);
}
#[test]
fn bandwidth_estimator_basic() {
let mut bw = BandwidthEstimator::new(5);
bw.record(0, 1000);
bw.record(1000, 1000);
bw.record(2000, 1000);
// 3000 bytes over 2 seconds = 1500 bps
let bps = bw.estimate_bps();
assert!((bps - 1500.0).abs() < 1.0, "BPS should be ~1500: {}", bps);
assert!(!bw.is_congested(1000.0));
assert!(bw.is_congested(2000.0));
}
#[test]
fn audio_header_roundtrip() {
let header = AudioHeader {
format: AudioFormat::Opus,
channels: 2,
sample_rate: 48000,
frame_size: 960,
};
let encoded = header.encode();
let decoded = AudioHeader::decode(&encoded).unwrap();
assert_eq!(decoded.format, AudioFormat::Opus);
assert_eq!(decoded.channels, 2);
assert_eq!(decoded.sample_rate, 48000);
assert_eq!(decoded.frame_size, 960);
}
#[test]
fn audio_format_enum() {
assert_eq!(AudioFormat::from_u8(0), Some(AudioFormat::Pcm));
assert_eq!(AudioFormat::from_u8(1), Some(AudioFormat::Opus));
assert_eq!(AudioFormat::from_u8(2), Some(AudioFormat::Aac));
assert_eq!(AudioFormat::from_u8(3), None);
assert_eq!(AudioFormat::Opus.name(), "Opus");
}
// ─── v0.5 Tests ───
#[test]
fn auth_payload_roundtrip() {
let ap = AuthPayload {
phase: 1,
nonce: [1, 2, 3, 4, 5, 6, 7, 8],
token: b"secret-token".to_vec(),
};
let encoded = ap.encode();
let decoded = AuthPayload::decode(&encoded).unwrap();
assert_eq!(decoded, ap);
}
#[test]
fn xor_scramble_roundtrip() {
let data = b"hello world";
let key = b"mykey";
let scrambled = scramble_payload(data, key);
assert_ne!(&scrambled, data);
let restored = descramble_payload(&scrambled, key);
assert_eq!(&restored, data);
}
#[test]
fn recording_metadata_roundtrip() {
let meta = RecordingMetadata {
version: 1,
created_at: 1710000000,
duration_ms: 5000,
frame_count: 150,
width: 1920,
height: 1080,
};
let encoded = meta.encode();
let decoded = RecordingMetadata::decode(&encoded).unwrap();
assert_eq!(decoded, meta);
}
#[test]
fn auth_frame_builders() {
let nonce = [1u8; 8];
let challenge = auth_challenge_frame(1, &nonce);
assert_eq!(challenge[0], FrameType::Auth as u8);
let payload = AuthPayload::decode(&challenge[HEADER_SIZE..]).unwrap();
assert_eq!(payload.phase, 0);
assert_eq!(payload.nonce, nonce);
let response = auth_response_frame(2, &nonce, b"token123");
let payload = AuthPayload::decode(&response[HEADER_SIZE..]).unwrap();
assert_eq!(payload.phase, 1);
assert_eq!(payload.token, b"token123");
}
}

View file

@ -64,6 +64,10 @@ pub enum FrameType {
/// Keyframe — receiver should reset state
Keyframe = 0xF0,
/// Authentication handshake
Auth = 0x0F,
/// Acknowledgement — receiver → source with seq + RTT
Ack = 0xFD,
/// Heartbeat / keep-alive
Ping = 0xFE,
/// Stream end
@ -90,11 +94,40 @@ impl FrameType {
0x42 => Some(Self::NeuralActuator),
0x43 => Some(Self::NeuralLatent),
0xF0 => Some(Self::Keyframe),
0x0F => Some(Self::Auth),
0xFD => Some(Self::Ack),
0xFE => Some(Self::Ping),
0xFF => Some(Self::End),
_ => None,
}
}
/// Human-readable name for this frame type.
pub fn name(&self) -> &'static str {
match self {
Self::Pixels => "Pixels",
Self::CompressedPixels => "CompressedPixels",
Self::DeltaPixels => "DeltaPixels",
Self::AudioPcm => "AudioPcm",
Self::AudioCompressed => "AudioCompressed",
Self::Haptic => "Haptic",
Self::Actuator => "Actuator",
Self::LedMatrix => "LedMatrix",
Self::SignalSync => "SignalSync",
Self::SignalDiff => "SignalDiff",
Self::SchemaAnnounce => "SchemaAnnounce",
Self::SubscribeFilter => "SubscribeFilter",
Self::NeuralFrame => "NeuralFrame",
Self::NeuralAudio => "NeuralAudio",
Self::NeuralActuator => "NeuralActuator",
Self::NeuralLatent => "NeuralLatent",
Self::Keyframe => "Keyframe",
Self::Auth => "Auth",
Self::Ack => "Ack",
Self::Ping => "Ping",
Self::End => "End",
}
}
}
// ─── Input Types ───
@ -613,6 +646,225 @@ impl BciInputEvent {
}
/// Acknowledgement event — receiver → source for RTT measurement.
#[derive(Debug, Clone, Copy)]
pub struct AckEvent {
/// Sequence number being acknowledged
pub ack_seq: u16,
/// Round-trip time in milliseconds (measured by receiver)
pub rtt_ms: u16,
}
impl AckEvent {
pub const SIZE: usize = 4;
pub fn encode(&self) -> [u8; Self::SIZE] {
let mut buf = [0u8; Self::SIZE];
buf[0..2].copy_from_slice(&self.ack_seq.to_le_bytes());
buf[2..4].copy_from_slice(&self.rtt_ms.to_le_bytes());
buf
}
pub fn decode(buf: &[u8]) -> Option<Self> {
if buf.len() < Self::SIZE {
return None;
}
Some(Self {
ack_seq: u16::from_le_bytes([buf[0], buf[1]]),
rtt_ms: u16::from_le_bytes([buf[2], buf[3]]),
})
}
}
/// Get a human-readable name for any frame type byte.
pub fn frame_type_name(type_byte: u8) -> &'static str {
if let Some(ft) = FrameType::from_u8(type_byte) {
ft.name()
} else if InputType::from_u8(type_byte).is_some() {
match type_byte {
0x01 => "Pointer",
0x02 => "PointerDown",
0x03 => "PointerUp",
0x10 => "KeyDown",
0x11 => "KeyUp",
0x20 => "Touch",
0x21 => "TouchEnd",
0x30 => "GamepadAxis",
0x31 => "GamepadButton",
0x40 => "Midi",
0x50 => "Scroll",
0x60 => "Resize",
0x70 => "VoiceInput",
0x71 => "CameraInput",
0x80 => "SensorInput",
0x90 => "BciInput",
_ => "Unknown",
}
} else {
"Unknown"
}
}
/// Check if a frame type byte represents an input event.
pub fn is_input_type(type_byte: u8) -> bool {
InputType::from_u8(type_byte).is_some()
}
// ─── Compressed Pixel Format ───
/// Image encoding format for CompressedPixels (0x02) frames.
/// The format byte is the first byte of the CompressedPixels payload.
#[repr(u8)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum CompressedPixelFormat {
Png = 0,
WebP = 1,
Jpeg = 2,
}
impl CompressedPixelFormat {
pub fn from_u8(val: u8) -> Option<Self> {
match val {
0 => Some(Self::Png),
1 => Some(Self::WebP),
2 => Some(Self::Jpeg),
_ => None,
}
}
pub fn name(&self) -> &'static str {
match self {
Self::Png => "PNG",
Self::WebP => "WebP",
Self::Jpeg => "JPEG",
}
}
pub fn mime_type(&self) -> &'static str {
match self {
Self::Png => "image/png",
Self::WebP => "image/webp",
Self::Jpeg => "image/jpeg",
}
}
}
// ─── v0.4: Audio Format ───
/// Audio encoding format for AudioCompressed (0x08) frames.
#[repr(u8)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum AudioFormat {
Pcm = 0,
Opus = 1,
Aac = 2,
}
impl AudioFormat {
pub fn from_u8(val: u8) -> Option<Self> {
match val {
0 => Some(Self::Pcm),
1 => Some(Self::Opus),
2 => Some(Self::Aac),
_ => None,
}
}
pub fn name(&self) -> &'static str {
match self {
Self::Pcm => "PCM",
Self::Opus => "Opus",
Self::Aac => "AAC",
}
}
}
/// Structured audio header (first 8 bytes of AudioCompressed payload).
/// ```text
/// [format: u8] [channels: u8] [sample_rate: u32 LE] [frame_size: u16 LE]
/// ```
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct AudioHeader {
pub format: AudioFormat,
pub channels: u8,
pub sample_rate: u32,
pub frame_size: u16,
}
impl AudioHeader {
pub const SIZE: usize = 8;
pub fn encode(&self) -> [u8; Self::SIZE] {
let mut buf = [0u8; Self::SIZE];
buf[0] = self.format as u8;
buf[1] = self.channels;
let sr = self.sample_rate.to_le_bytes();
buf[2] = sr[0]; buf[3] = sr[1]; buf[4] = sr[2]; buf[5] = sr[3];
let fs = self.frame_size.to_le_bytes();
buf[6] = fs[0]; buf[7] = fs[1];
buf
}
pub fn decode(buf: &[u8]) -> Option<Self> {
if buf.len() < Self::SIZE { return None; }
Some(Self {
format: AudioFormat::from_u8(buf[0])?,
channels: buf[1],
sample_rate: u32::from_le_bytes([buf[2], buf[3], buf[4], buf[5]]),
frame_size: u16::from_le_bytes([buf[6], buf[7]]),
})
}
}
// ─── v0.5: Auth Payload ───
/// Authentication handshake payload.
/// Phase 0 = challenge (server → client, carries nonce).
/// Phase 1 = response (client → server, carries token + nonce).
#[derive(Debug, Clone, PartialEq)]
pub struct AuthPayload {
pub phase: u8,
pub nonce: [u8; 8],
pub token: Vec<u8>,
}
impl AuthPayload {
/// Minimum encoded size (phase + nonce).
pub const MIN_SIZE: usize = 9;
pub fn encode(&self) -> Vec<u8> {
let mut buf = Vec::with_capacity(Self::MIN_SIZE + self.token.len());
buf.push(self.phase);
buf.extend_from_slice(&self.nonce);
buf.extend_from_slice(&self.token);
buf
}
pub fn decode(buf: &[u8]) -> Option<Self> {
if buf.len() < Self::MIN_SIZE { return None; }
let mut nonce = [0u8; 8];
nonce.copy_from_slice(&buf[1..9]);
Some(Self {
phase: buf[0],
nonce,
token: buf[9..].to_vec(),
})
}
}
// ─── v0.5: XOR Scrambling ───
/// XOR-scramble a payload with a repeating key. Not encryption — just obfuscation.
pub fn scramble_payload(payload: &[u8], key: &[u8]) -> Vec<u8> {
if key.is_empty() { return payload.to_vec(); }
payload.iter().enumerate().map(|(i, &b)| b ^ key[i % key.len()]).collect()
}
/// Descramble = same operation (XOR is its own inverse).
pub fn descramble_payload(payload: &[u8], key: &[u8]) -> Vec<u8> {
scramble_payload(payload, key)
}
// ─── Tests ───
#[cfg(test)]
@ -670,7 +922,7 @@ mod tests {
#[test]
fn frame_type_roundtrip() {
for val in [0x01, 0x02, 0x03, 0x10, 0x11, 0x20, 0x30, 0x31, 0x40, 0x41, 0xF0, 0xFE, 0xFF] {
for val in [0x01, 0x02, 0x03, 0x10, 0x11, 0x20, 0x30, 0x31, 0x40, 0x41, 0xF0, 0xFD, 0xFE, 0xFF] {
let ft = FrameType::from_u8(val);
assert!(ft.is_some(), "FrameType::from_u8({:#x}) should be Some", val);
assert_eq!(ft.unwrap() as u8, val);
@ -678,6 +930,38 @@ mod tests {
assert!(FrameType::from_u8(0x99).is_none());
}
#[test]
fn ack_event_roundtrip() {
let evt = AckEvent { ack_seq: 42, rtt_ms: 15 };
let encoded = evt.encode();
let decoded = AckEvent::decode(&encoded).unwrap();
assert_eq!(decoded.ack_seq, 42);
assert_eq!(decoded.rtt_ms, 15);
}
#[test]
fn ack_event_too_short() {
assert!(AckEvent::decode(&[0u8; 3]).is_none());
}
#[test]
fn frame_type_name_lookup() {
assert_eq!(frame_type_name(0x01), "Pixels");
assert_eq!(frame_type_name(0xFD), "Ack");
assert_eq!(frame_type_name(0xFE), "Ping");
assert_eq!(frame_type_name(0xFF), "End");
assert_eq!(frame_type_name(0x99), "Unknown");
}
#[test]
fn is_input_type_check() {
assert!(is_input_type(0x01)); // Pointer (also Pixels — shared value)
assert!(is_input_type(0x50)); // Scroll
assert!(is_input_type(0x90)); // BciInput
assert!(!is_input_type(0xFE)); // Ping — not an input type
assert!(!is_input_type(0xF0)); // Keyframe — not an input type
}
#[test]
fn input_type_roundtrip() {
for val in [0x01, 0x02, 0x03, 0x10, 0x11, 0x20, 0x21, 0x30, 0x31, 0x40, 0x50, 0x60, 0x70, 0x80, 0x90] {

View file

@ -480,7 +480,7 @@ pub(crate) fn find_matching_channels(state: &RelayState, pattern: &str) -> Vec<S
pub async fn run_relay(config: RelayConfig) -> Result<(), Box<dyn std::error::Error>> {
let listener = TcpListener::bind(&config.addr).await?;
eprintln!("╔══════════════════════════════════════════════════╗");
eprintln!("║ DreamStack Bitstream Relay v1.0.0 ║");
eprintln!("║ DreamStack Bitstream Relay v0.3.0 ║");
eprintln!("║ ║");
eprintln!("║ Source: ws://{}/source/{{name}}", config.addr);
eprintln!("║ Receiver: ws://{}/stream/{{name}}", config.addr);