feat: v2 module system — import/export with multi-file compilation
Syntax:
import { Counter, shared_count } from "./shared"
export let shared_count = 0
export component Counter = ...
Implementation:
- Lexer: Import, Export keywords
- AST: ImportDecl(names, source), Export(name, inner_decl)
- Parser: parse_import_decl, parse_export_decl
- CLI: resolve_imports() — recursive file resolution, dedup, inline
Resolves relative paths, adds .ds extension, handles transitive imports.
110 tests, 0 failures.
This commit is contained in:
parent
26d6c4f17a
commit
6368b798cf
6 changed files with 202 additions and 8 deletions
|
|
@ -69,7 +69,7 @@ fn main() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile(source: &str) -> Result<String, String> {
|
fn compile(source: &str, base_dir: &Path) -> Result<String, String> {
|
||||||
// 1. Lex
|
// 1. Lex
|
||||||
let mut lexer = ds_parser::Lexer::new(source);
|
let mut lexer = ds_parser::Lexer::new(source);
|
||||||
let tokens = lexer.tokenize();
|
let tokens = lexer.tokenize();
|
||||||
|
|
@ -83,18 +83,90 @@ fn compile(source: &str) -> Result<String, String> {
|
||||||
|
|
||||||
// 2. Parse
|
// 2. Parse
|
||||||
let mut parser = ds_parser::Parser::new(tokens);
|
let mut parser = ds_parser::Parser::new(tokens);
|
||||||
let program = parser.parse_program().map_err(|e| e.to_string())?;
|
let mut program = parser.parse_program().map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
// 3. Analyze
|
// 3. Resolve imports — inline exported declarations from imported files
|
||||||
|
resolve_imports(&mut program, base_dir)?;
|
||||||
|
|
||||||
|
// 4. Analyze
|
||||||
let graph = ds_analyzer::SignalGraph::from_program(&program);
|
let graph = ds_analyzer::SignalGraph::from_program(&program);
|
||||||
let views = ds_analyzer::SignalGraph::analyze_views(&program);
|
let views = ds_analyzer::SignalGraph::analyze_views(&program);
|
||||||
|
|
||||||
// 4. Codegen
|
// 5. Codegen
|
||||||
let html = ds_codegen::JsEmitter::emit_html(&program, &graph, &views);
|
let html = ds_codegen::JsEmitter::emit_html(&program, &graph, &views);
|
||||||
|
|
||||||
Ok(html)
|
Ok(html)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Resolve `import { X, Y } from "./file"` by parsing the imported file
|
||||||
|
/// and inlining the matching `export`ed declarations.
|
||||||
|
fn resolve_imports(program: &mut ds_parser::Program, base_dir: &Path) -> Result<(), String> {
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
let mut imported_decls = Vec::new();
|
||||||
|
let mut seen_files: HashSet<PathBuf> = HashSet::new();
|
||||||
|
|
||||||
|
for decl in &program.declarations {
|
||||||
|
if let ds_parser::Declaration::Import(import) = decl {
|
||||||
|
// Resolve the file path relative to base_dir
|
||||||
|
let mut import_path = base_dir.join(&import.source);
|
||||||
|
if !import_path.extension().map_or(false, |e| e == "ds") {
|
||||||
|
import_path.set_extension("ds");
|
||||||
|
}
|
||||||
|
|
||||||
|
let import_path = import_path.canonicalize().unwrap_or(import_path.clone());
|
||||||
|
|
||||||
|
if seen_files.contains(&import_path) {
|
||||||
|
continue; // Skip duplicate imports
|
||||||
|
}
|
||||||
|
seen_files.insert(import_path.clone());
|
||||||
|
|
||||||
|
// Read and parse the imported file
|
||||||
|
let imported_source = fs::read_to_string(&import_path)
|
||||||
|
.map_err(|e| format!("Cannot import '{}': {}", import.source, e))?;
|
||||||
|
|
||||||
|
let mut lexer = ds_parser::Lexer::new(&imported_source);
|
||||||
|
let tokens = lexer.tokenize();
|
||||||
|
for tok in &tokens {
|
||||||
|
if let ds_parser::TokenKind::Error(msg) = &tok.kind {
|
||||||
|
return Err(format!("Lexer error in '{}' at line {}: {}", import.source, tok.line, msg));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut parser = ds_parser::Parser::new(tokens);
|
||||||
|
let mut imported_program = parser.parse_program()
|
||||||
|
.map_err(|e| format!("Parse error in '{}': {}", import.source, e))?;
|
||||||
|
|
||||||
|
// Recursively resolve imports in the imported file
|
||||||
|
let imported_dir = import_path.parent().unwrap_or(base_dir);
|
||||||
|
resolve_imports(&mut imported_program, imported_dir)?;
|
||||||
|
|
||||||
|
// Extract matching exports
|
||||||
|
let names: HashSet<&str> = import.names.iter().map(|s| s.as_str()).collect();
|
||||||
|
for d in &imported_program.declarations {
|
||||||
|
match d {
|
||||||
|
ds_parser::Declaration::Export(name, inner) if names.contains(name.as_str()) => {
|
||||||
|
imported_decls.push(*inner.clone());
|
||||||
|
}
|
||||||
|
// Also include non-exported decls that exports depend on
|
||||||
|
// (for now, include all let decls from the imported file)
|
||||||
|
ds_parser::Declaration::Let(_) => {
|
||||||
|
imported_decls.push(d.clone());
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove Import declarations and prepend imported decls
|
||||||
|
program.declarations.retain(|d| !matches!(d, ds_parser::Declaration::Import(_)));
|
||||||
|
let mut merged = imported_decls;
|
||||||
|
merged.append(&mut program.declarations);
|
||||||
|
program.declarations = merged;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn cmd_build(file: &Path, output: &Path) {
|
fn cmd_build(file: &Path, output: &Path) {
|
||||||
println!("🔨 DreamStack build");
|
println!("🔨 DreamStack build");
|
||||||
println!(" source: {}", file.display());
|
println!(" source: {}", file.display());
|
||||||
|
|
@ -107,7 +179,8 @@ fn cmd_build(file: &Path, output: &Path) {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
match compile(&source) {
|
let base_dir = file.parent().unwrap_or(Path::new("."));
|
||||||
|
match compile(&source, base_dir) {
|
||||||
Ok(html) => {
|
Ok(html) => {
|
||||||
fs::create_dir_all(output).unwrap();
|
fs::create_dir_all(output).unwrap();
|
||||||
let out_path = output.join("index.html");
|
let out_path = output.join("index.html");
|
||||||
|
|
@ -194,7 +267,8 @@ fn cmd_dev(file: &Path, port: u16) {
|
||||||
};
|
};
|
||||||
|
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
match compile(&source) {
|
let base_dir = file.parent().unwrap_or(Path::new("."));
|
||||||
|
match compile(&source, base_dir) {
|
||||||
Ok(html) => {
|
Ok(html) => {
|
||||||
let ms = start.elapsed().as_millis();
|
let ms = start.elapsed().as_millis();
|
||||||
let html_with_hmr = inject_hmr(&html);
|
let html_with_hmr = inject_hmr(&html);
|
||||||
|
|
@ -262,7 +336,7 @@ h2 {{ color: #f87171; margin-bottom: 16px; }}
|
||||||
// Recompile
|
// Recompile
|
||||||
if let Ok(src) = fs::read_to_string(&watch_file) {
|
if let Ok(src) = fs::read_to_string(&watch_file) {
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
match compile(&src) {
|
match compile(&src, watch_file.parent().unwrap_or(Path::new("."))) {
|
||||||
Ok(html) => {
|
Ok(html) => {
|
||||||
let ms = start.elapsed().as_millis();
|
let ms = start.elapsed().as_millis();
|
||||||
let new_version = v_watcher.fetch_add(1, Ordering::SeqCst) + 1;
|
let new_version = v_watcher.fetch_add(1, Ordering::SeqCst) + 1;
|
||||||
|
|
@ -472,7 +546,7 @@ fn cmd_stream(file: &Path, relay: &str, mode: &str, port: u16) {
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
match compile(&stream_source) {
|
match compile(&stream_source, file.parent().unwrap_or(Path::new("."))) {
|
||||||
Ok(html) => {
|
Ok(html) => {
|
||||||
let html_with_hmr = inject_hmr(&html);
|
let html_with_hmr = inject_hmr(&html);
|
||||||
println!("✅ Compiled with streaming enabled");
|
println!("✅ Compiled with streaming enabled");
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,18 @@ pub enum Declaration {
|
||||||
Every(EveryDecl),
|
Every(EveryDecl),
|
||||||
/// Top-level expression statement: `log("hello")`, `push(items, x)`
|
/// Top-level expression statement: `log("hello")`, `push(items, x)`
|
||||||
ExprStatement(Expr),
|
ExprStatement(Expr),
|
||||||
|
/// `import { Card, Button } from "./components"`
|
||||||
|
Import(ImportDecl),
|
||||||
|
/// `export let count = 0`, `export component Card(...) = ...`
|
||||||
|
Export(String, Box<Declaration>),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `import { Card, Button } from "./components"`
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct ImportDecl {
|
||||||
|
pub names: Vec<String>,
|
||||||
|
pub source: String,
|
||||||
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `let count = 0` or `let doubled = count * 2`
|
/// `let count = 0` or `let doubled = count * 2`
|
||||||
|
|
|
||||||
|
|
@ -55,6 +55,8 @@ pub enum TokenKind {
|
||||||
Delta,
|
Delta,
|
||||||
Signals,
|
Signals,
|
||||||
Every,
|
Every,
|
||||||
|
Import,
|
||||||
|
Export,
|
||||||
|
|
||||||
// Operators
|
// Operators
|
||||||
Plus,
|
Plus,
|
||||||
|
|
@ -330,6 +332,8 @@ impl Lexer {
|
||||||
"route" => TokenKind::Route,
|
"route" => TokenKind::Route,
|
||||||
"navigate" => TokenKind::Navigate,
|
"navigate" => TokenKind::Navigate,
|
||||||
"every" => TokenKind::Every,
|
"every" => TokenKind::Every,
|
||||||
|
"import" => TokenKind::Import,
|
||||||
|
"export" => TokenKind::Export,
|
||||||
_ => TokenKind::Ident(ident.clone()),
|
_ => TokenKind::Ident(ident.clone()),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -99,6 +99,8 @@ impl Parser {
|
||||||
TokenKind::Constrain => self.parse_constrain_decl(),
|
TokenKind::Constrain => self.parse_constrain_decl(),
|
||||||
TokenKind::Stream => self.parse_stream_decl(),
|
TokenKind::Stream => self.parse_stream_decl(),
|
||||||
TokenKind::Every => self.parse_every_decl(),
|
TokenKind::Every => self.parse_every_decl(),
|
||||||
|
TokenKind::Import => self.parse_import_decl(),
|
||||||
|
TokenKind::Export => self.parse_export_decl(),
|
||||||
// Expression statement: `log("hello")`, `push(items, x)`
|
// Expression statement: `log("hello")`, `push(items, x)`
|
||||||
TokenKind::Ident(_) => {
|
TokenKind::Ident(_) => {
|
||||||
let expr = self.parse_expr()?;
|
let expr = self.parse_expr()?;
|
||||||
|
|
@ -125,6 +127,83 @@ impl Parser {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// import { Card, Button } from "./components"
|
||||||
|
fn parse_import_decl(&mut self) -> Result<Declaration, ParseError> {
|
||||||
|
let line = self.current_token().line;
|
||||||
|
self.advance(); // consume 'import'
|
||||||
|
|
||||||
|
// Parse the import names: { name1, name2 }
|
||||||
|
self.expect(&TokenKind::LBrace)?;
|
||||||
|
let mut names = Vec::new();
|
||||||
|
loop {
|
||||||
|
self.skip_newlines();
|
||||||
|
if self.check(&TokenKind::RBrace) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
match self.peek().clone() {
|
||||||
|
TokenKind::Ident(name) => {
|
||||||
|
names.push(name);
|
||||||
|
self.advance();
|
||||||
|
}
|
||||||
|
// Allow keywords to be imported as names (e.g., component names)
|
||||||
|
_ => {
|
||||||
|
let tok = self.peek().clone();
|
||||||
|
return Err(self.error(format!("expected identifier in import, got {:?}", tok)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.skip_newlines();
|
||||||
|
if self.check(&TokenKind::Comma) {
|
||||||
|
self.advance(); // consume ','
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.expect(&TokenKind::RBrace)?;
|
||||||
|
|
||||||
|
// Parse "from"
|
||||||
|
self.expect(&TokenKind::From)?;
|
||||||
|
|
||||||
|
// Parse the source path
|
||||||
|
let source = match self.peek().clone() {
|
||||||
|
TokenKind::StringFragment(s) => {
|
||||||
|
self.advance();
|
||||||
|
// Consume the StringEnd if present
|
||||||
|
if self.check(&TokenKind::StringEnd) {
|
||||||
|
self.advance();
|
||||||
|
}
|
||||||
|
s
|
||||||
|
}
|
||||||
|
TokenKind::StringEnd => {
|
||||||
|
self.advance();
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
|
_ => return Err(self.error("expected string after 'from'".to_string())),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Declaration::Import(ImportDecl {
|
||||||
|
names,
|
||||||
|
source,
|
||||||
|
span: Span { start: 0, end: 0, line },
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// export let count = 0 / export component Card(...) = ...
|
||||||
|
fn parse_export_decl(&mut self) -> Result<Declaration, ParseError> {
|
||||||
|
self.advance(); // consume 'export'
|
||||||
|
|
||||||
|
// Parse the inner declaration
|
||||||
|
let inner = self.parse_declaration()?;
|
||||||
|
|
||||||
|
// Extract the name being exported
|
||||||
|
let name = match &inner {
|
||||||
|
Declaration::Let(d) => d.name.clone(),
|
||||||
|
Declaration::View(d) => d.name.clone(),
|
||||||
|
Declaration::Component(d) => d.name.clone(),
|
||||||
|
Declaration::Effect(d) => d.name.clone(),
|
||||||
|
_ => return Err(self.error("can only export let, view, component, or effect".to_string())),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Declaration::Export(name, Box::new(inner)))
|
||||||
|
}
|
||||||
|
|
||||||
fn parse_let_decl(&mut self) -> Result<Declaration, ParseError> {
|
fn parse_let_decl(&mut self) -> Result<Declaration, ParseError> {
|
||||||
let line = self.current_token().line;
|
let line = self.current_token().line;
|
||||||
self.advance(); // consume 'let'
|
self.advance(); // consume 'let'
|
||||||
|
|
|
||||||
15
examples/modules/app.ds
Normal file
15
examples/modules/app.ds
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
-- Multi-file module demo
|
||||||
|
-- Imports shared state and components from another .ds file
|
||||||
|
|
||||||
|
import { shared_count, Counter } from "./shared"
|
||||||
|
|
||||||
|
let local_count = 0
|
||||||
|
|
||||||
|
view app =
|
||||||
|
column [
|
||||||
|
text "Module Demo"
|
||||||
|
text "Local: {local_count}"
|
||||||
|
button "Local +1" { click: local_count += 1 }
|
||||||
|
text "Shared (from module): {shared_count}"
|
||||||
|
Counter
|
||||||
|
]
|
||||||
10
examples/modules/shared.ds
Normal file
10
examples/modules/shared.ds
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
-- Shared state module — exported values used by the main app
|
||||||
|
|
||||||
|
export let shared_count = 0
|
||||||
|
|
||||||
|
export component Counter =
|
||||||
|
column [
|
||||||
|
text "Shared counter: {shared_count}"
|
||||||
|
button "+" { click: shared_count += 1 }
|
||||||
|
button "-" { click: shared_count -= 1 }
|
||||||
|
]
|
||||||
Loading…
Add table
Reference in a new issue