v0.4 — CLI Modularity: - Split monolithic main.rs (2,038 lines) into 8 command modules + slim dispatch (107 lines) - Add 12 JS codegen tests (signals, derived, views, events, loops, match, enums, components, interpolation, springs, tree-shaking) v0.5 — Diagnostic Quality + Analyzer Confidence: - Add From<ParseError> for Diagnostic (E0001) in ds-diagnostic - Add errors_as_diagnostics() to TypeChecker (E0100–E0110) - Wire Elm-style diagnostics through dreamstack check and build commands - Switch incremental compiler to parse_program_resilient() for multi-error collection - Add 12 analyzer tests (chains, fan-out, diamond deps, empty programs, conditionals, handlers, views) - Add 2 diagnostic conversion tests Test suite: 97 → 123 tests (26 new, 0 failures)
213 lines
7.7 KiB
Rust
213 lines
7.7 KiB
Rust
/// Build command — compile .ds files to HTML+JS or Panel IR.
|
|
|
|
use std::fs;
|
|
use std::path::{Path, PathBuf};
|
|
use std::collections::HashSet;
|
|
|
|
/// Compile error with source for diagnostic rendering.
|
|
pub struct CompileError {
|
|
pub message: String,
|
|
pub source: Option<String>,
|
|
}
|
|
|
|
impl std::fmt::Display for CompileError {
|
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
write!(f, "{}", self.message)
|
|
}
|
|
}
|
|
|
|
|
|
pub fn compile(source: &str, base_dir: &Path, minify: bool) -> Result<String, CompileError> {
|
|
// 1. Lex
|
|
let mut lexer = ds_parser::Lexer::new(source);
|
|
let tokens = lexer.tokenize();
|
|
|
|
// Check for lexer errors
|
|
for tok in &tokens {
|
|
if let ds_parser::TokenKind::Error(msg) = &tok.kind {
|
|
return Err(CompileError {
|
|
message: format!("Lexer error at line {}: {}", tok.line, msg),
|
|
source: None,
|
|
});
|
|
}
|
|
}
|
|
|
|
// 2. Parse
|
|
let mut parser = ds_parser::Parser::with_source(tokens, source);
|
|
let mut program = parser.parse_program().map_err(|e| {
|
|
let diag = ds_diagnostic::Diagnostic::from(e);
|
|
CompileError {
|
|
message: ds_diagnostic::render(&diag, source),
|
|
source: Some(source.to_string()),
|
|
}
|
|
})?;
|
|
|
|
// 3. Resolve imports — inline exported declarations from imported files
|
|
resolve_imports(&mut program, base_dir).map_err(|e| CompileError { message: e, source: None })?;
|
|
|
|
// 4. Analyze
|
|
let graph = ds_analyzer::SignalGraph::from_program(&program);
|
|
let views = ds_analyzer::SignalGraph::analyze_views(&program);
|
|
|
|
// 5. Codegen
|
|
let html = ds_codegen::JsEmitter::emit_html(&program, &graph, &views, minify);
|
|
|
|
Ok(html)
|
|
}
|
|
|
|
/// Compile a DreamStack source file to Panel IR JSON for ESP32 LVGL panels.
|
|
pub fn compile_panel_ir(source: &str, base_dir: &Path) -> Result<String, CompileError> {
|
|
// 1. Lex
|
|
let mut lexer = ds_parser::Lexer::new(source);
|
|
let tokens = lexer.tokenize();
|
|
|
|
for tok in &tokens {
|
|
if let ds_parser::TokenKind::Error(msg) = &tok.kind {
|
|
return Err(CompileError {
|
|
message: format!("Lexer error at line {}: {}", tok.line, msg),
|
|
source: None,
|
|
});
|
|
}
|
|
}
|
|
|
|
// 2. Parse
|
|
let mut parser = ds_parser::Parser::with_source(tokens, source);
|
|
let mut program = parser.parse_program().map_err(|e| {
|
|
let diag = ds_diagnostic::Diagnostic::from(e);
|
|
CompileError {
|
|
message: ds_diagnostic::render(&diag, source),
|
|
source: Some(source.to_string()),
|
|
}
|
|
})?;
|
|
|
|
// 3. Resolve imports
|
|
resolve_imports(&mut program, base_dir).map_err(|e| CompileError { message: e, source: None })?;
|
|
|
|
// 4. Analyze
|
|
let graph = ds_analyzer::SignalGraph::from_program(&program);
|
|
|
|
// 5. Codegen → Panel IR
|
|
let ir = ds_codegen::IrEmitter::emit_ir(&program, &graph);
|
|
|
|
Ok(ir)
|
|
}
|
|
|
|
/// Resolve `import { X, Y } from "./file"` by parsing the imported file
|
|
/// and inlining the matching `export`ed declarations.
|
|
pub fn resolve_imports(program: &mut ds_parser::Program, base_dir: &Path) -> Result<(), String> {
|
|
let mut imported_decls = Vec::new();
|
|
let mut seen_files: HashSet<PathBuf> = HashSet::new();
|
|
|
|
for decl in &program.declarations {
|
|
if let ds_parser::Declaration::Import(import) = decl {
|
|
// Resolve the file path relative to base_dir
|
|
let mut import_path = base_dir.join(&import.source);
|
|
if !import_path.extension().map_or(false, |e| e == "ds") {
|
|
import_path.set_extension("ds");
|
|
}
|
|
|
|
let import_path = import_path.canonicalize().unwrap_or(import_path.clone());
|
|
|
|
if seen_files.contains(&import_path) {
|
|
continue; // Skip duplicate imports
|
|
}
|
|
seen_files.insert(import_path.clone());
|
|
|
|
// Read and parse the imported file
|
|
let imported_source = fs::read_to_string(&import_path)
|
|
.map_err(|e| format!("Cannot import '{}': {}", import.source, e))?;
|
|
|
|
let mut lexer = ds_parser::Lexer::new(&imported_source);
|
|
let tokens = lexer.tokenize();
|
|
for tok in &tokens {
|
|
if let ds_parser::TokenKind::Error(msg) = &tok.kind {
|
|
return Err(format!("Lexer error in '{}' at line {}: {}", import.source, tok.line, msg));
|
|
}
|
|
}
|
|
let mut parser = ds_parser::Parser::new(tokens);
|
|
let mut imported_program = parser.parse_program()
|
|
.map_err(|e| format!("Parse error in '{}': {}", import.source, e))?;
|
|
|
|
// Recursively resolve imports in the imported file
|
|
let imported_dir = import_path.parent().unwrap_or(base_dir);
|
|
resolve_imports(&mut imported_program, imported_dir)?;
|
|
|
|
// Extract matching exports
|
|
let names: HashSet<&str> = import.names.iter().map(|s| s.as_str()).collect();
|
|
for d in &imported_program.declarations {
|
|
match d {
|
|
ds_parser::Declaration::Export(name, inner) if names.contains(name.as_str()) => {
|
|
imported_decls.push(*inner.clone());
|
|
}
|
|
// Also include non-exported decls that exports depend on
|
|
// (for now, include all let decls from the imported file)
|
|
ds_parser::Declaration::Let(_) => {
|
|
imported_decls.push(d.clone());
|
|
}
|
|
_ => {}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Remove Import declarations and prepend imported decls
|
|
program.declarations.retain(|d| !matches!(d, ds_parser::Declaration::Import(_)));
|
|
let mut merged = imported_decls;
|
|
merged.append(&mut program.declarations);
|
|
program.declarations = merged;
|
|
|
|
Ok(())
|
|
}
|
|
|
|
pub fn cmd_build(file: &Path, output: &Path, minify: bool, target: &str) {
|
|
println!("🔨 DreamStack build (target: {}){}", target, if minify { " (minified)" } else { "" });
|
|
println!(" source: {}", file.display());
|
|
|
|
let source = match fs::read_to_string(file) {
|
|
Ok(s) => s,
|
|
Err(e) => {
|
|
eprintln!("❌ Could not read {}: {}", file.display(), e);
|
|
std::process::exit(1);
|
|
}
|
|
};
|
|
|
|
let base_dir = file.parent().unwrap_or(Path::new("."));
|
|
|
|
match target {
|
|
"panel" => {
|
|
// Panel IR target — emit JSON for ESP32 LVGL runtime
|
|
match compile_panel_ir(&source, base_dir) {
|
|
Ok(ir) => {
|
|
fs::create_dir_all(output).unwrap();
|
|
let out_path = output.join("app.ir.json");
|
|
fs::write(&out_path, &ir).unwrap();
|
|
println!(" output: {}", out_path.display());
|
|
println!("✅ Panel IR built ({} bytes)", ir.len());
|
|
}
|
|
Err(e) => {
|
|
eprintln!("❌ {}", e.message);
|
|
std::process::exit(1);
|
|
}
|
|
}
|
|
}
|
|
_ => {
|
|
// Default HTML target
|
|
match compile(&source, base_dir, minify) {
|
|
Ok(html) => {
|
|
fs::create_dir_all(output).unwrap();
|
|
let out_path = output.join("index.html");
|
|
fs::write(&out_path, &html).unwrap();
|
|
println!(" output: {}", out_path.display());
|
|
println!("✅ Build complete! ({} bytes)", html.len());
|
|
println!();
|
|
println!(" Open in browser:");
|
|
println!(" file://{}", fs::canonicalize(&out_path).unwrap().display());
|
|
}
|
|
Err(e) => {
|
|
eprintln!("❌ {}", e.message);
|
|
std::process::exit(1);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|