feat: v2 phase 1 — array access, timer, string interpolation
Array access: Expr::Index in AST, [expr] postfix parsing, codegen for reads (grid.value[i.value]) and writes (event handler assignments with root signal extraction for stream diff broadcasting). Timer: 'every N -> expr' declaration. Every keyword in lexer. EveryDecl in AST. parse_every_decl in parser. setInterval codegen with DS.flush. String interpolation: already committed separately. Type checker: handles Expr::Index (infers array element type). 110 tests, 0 failures.
This commit is contained in:
parent
70ca55b1af
commit
2d07b1652a
5 changed files with 220 additions and 34 deletions
|
|
@ -253,6 +253,20 @@ impl JsEmitter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Phase 8: Timer / interval declarations
|
||||||
|
for decl in &program.declarations {
|
||||||
|
if let Declaration::Every(every) = decl {
|
||||||
|
let interval_js = self.emit_expr(&every.interval_ms);
|
||||||
|
let body_js = self.emit_event_handler_expr(&every.body);
|
||||||
|
self.emit_line("");
|
||||||
|
self.emit_line("// ── Timer ──");
|
||||||
|
self.emit_line(&format!(
|
||||||
|
"setInterval(() => {{ {}; DS.flush(); }}, {});",
|
||||||
|
body_js, interval_js
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
self.indent -= 1;
|
self.indent -= 1;
|
||||||
self.emit_line("})();");
|
self.emit_line("})();");
|
||||||
|
|
||||||
|
|
@ -345,7 +359,15 @@ impl JsEmitter {
|
||||||
for arg in &element.args {
|
for arg in &element.args {
|
||||||
match arg {
|
match arg {
|
||||||
Expr::StringLit(s) => {
|
Expr::StringLit(s) => {
|
||||||
if let Some(StringSegment::Literal(text)) = s.segments.first() {
|
let has_interp = s.segments.iter().any(|seg| matches!(seg, StringSegment::Interpolation(_)));
|
||||||
|
if has_interp {
|
||||||
|
// Reactive interpolated string: wrap in effect
|
||||||
|
let template_js = self.emit_expr(arg);
|
||||||
|
self.emit_line(&format!(
|
||||||
|
"DS.effect(() => {{ {}.textContent = {}; }});",
|
||||||
|
node_var, template_js
|
||||||
|
));
|
||||||
|
} else if let Some(StringSegment::Literal(text)) = s.segments.first() {
|
||||||
self.emit_line(&format!(
|
self.emit_line(&format!(
|
||||||
"{}.textContent = \"{}\";",
|
"{}.textContent = \"{}\";",
|
||||||
node_var,
|
node_var,
|
||||||
|
|
@ -622,6 +644,11 @@ impl JsEmitter {
|
||||||
let base_js = self.emit_expr(base);
|
let base_js = self.emit_expr(base);
|
||||||
format!("{base_js}.{field}")
|
format!("{base_js}.{field}")
|
||||||
}
|
}
|
||||||
|
Expr::Index(base, index) => {
|
||||||
|
let base_js = self.emit_expr(base);
|
||||||
|
let idx_js = self.emit_expr(index);
|
||||||
|
format!("{base_js}[{idx_js}]")
|
||||||
|
}
|
||||||
Expr::BinOp(left, op, right) => {
|
Expr::BinOp(left, op, right) => {
|
||||||
let l = self.emit_expr(left);
|
let l = self.emit_expr(left);
|
||||||
let r = self.emit_expr(right);
|
let r = self.emit_expr(right);
|
||||||
|
|
@ -691,21 +718,54 @@ impl JsEmitter {
|
||||||
fn emit_event_handler_expr(&self, expr: &Expr) -> String {
|
fn emit_event_handler_expr(&self, expr: &Expr) -> String {
|
||||||
match expr {
|
match expr {
|
||||||
Expr::Assign(target, op, value) => {
|
Expr::Assign(target, op, value) => {
|
||||||
let target_js = match target.as_ref() {
|
|
||||||
Expr::Ident(name) => name.clone(),
|
|
||||||
Expr::DotAccess(base, field) => {
|
|
||||||
format!("{}.{}", self.emit_expr(base), field)
|
|
||||||
}
|
|
||||||
_ => self.emit_expr(target),
|
|
||||||
};
|
|
||||||
let value_js = self.emit_expr(value);
|
let value_js = self.emit_expr(value);
|
||||||
let assign = match op {
|
// Determine the assignment target and root signal for streaming
|
||||||
AssignOp::Set => format!("{target_js}.value = {value_js}"),
|
let (assign, root_for_diff) = match target.as_ref() {
|
||||||
AssignOp::AddAssign => format!("{target_js}.value += {value_js}"),
|
Expr::Ident(name) => {
|
||||||
AssignOp::SubAssign => format!("{target_js}.value -= {value_js}"),
|
let a = match op {
|
||||||
|
AssignOp::Set => format!("{name}.value = {value_js}"),
|
||||||
|
AssignOp::AddAssign => format!("{name}.value += {value_js}"),
|
||||||
|
AssignOp::SubAssign => format!("{name}.value -= {value_js}"),
|
||||||
|
};
|
||||||
|
(a, name.clone())
|
||||||
|
}
|
||||||
|
Expr::DotAccess(base, field) => {
|
||||||
|
let base_str = self.emit_expr(base);
|
||||||
|
let target_str = format!("{base_str}.{field}");
|
||||||
|
let a = match op {
|
||||||
|
AssignOp::Set => format!("{target_str} = {value_js}"),
|
||||||
|
AssignOp::AddAssign => format!("{target_str} += {value_js}"),
|
||||||
|
AssignOp::SubAssign => format!("{target_str} -= {value_js}"),
|
||||||
|
};
|
||||||
|
(a, base_str)
|
||||||
|
}
|
||||||
|
Expr::Index(base, index) => {
|
||||||
|
let base_str = self.emit_expr(base);
|
||||||
|
let idx_str = self.emit_expr(index);
|
||||||
|
let target_str = format!("{base_str}[{idx_str}]");
|
||||||
|
let root = match base.as_ref() {
|
||||||
|
Expr::Ident(name) => name.clone(),
|
||||||
|
_ => base_str.clone(),
|
||||||
|
};
|
||||||
|
let a = match op {
|
||||||
|
AssignOp::Set => format!("{target_str} = {value_js}"),
|
||||||
|
AssignOp::AddAssign => format!("{target_str} += {value_js}"),
|
||||||
|
AssignOp::SubAssign => format!("{target_str} -= {value_js}"),
|
||||||
|
};
|
||||||
|
(a, root)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
let s = self.emit_expr(target);
|
||||||
|
let a = match op {
|
||||||
|
AssignOp::Set => format!("{s} = {value_js}"),
|
||||||
|
AssignOp::AddAssign => format!("{s} += {value_js}"),
|
||||||
|
AssignOp::SubAssign => format!("{s} -= {value_js}"),
|
||||||
|
};
|
||||||
|
(a, s)
|
||||||
|
}
|
||||||
};
|
};
|
||||||
// Stream diff: broadcast signal change if streaming is active
|
// Stream diff: broadcast signal change if streaming is active
|
||||||
format!("{}; DS._streamDiff(\"{}\", {}.value)", assign, target_js, target_js)
|
format!("{}; DS._streamDiff(\"{}\", {}.value)", assign, root_for_diff, root_for_diff)
|
||||||
}
|
}
|
||||||
Expr::Block(exprs) => {
|
Expr::Block(exprs) => {
|
||||||
let stmts: Vec<String> = exprs.iter().map(|e| self.emit_event_handler_expr(e)).collect();
|
let stmts: Vec<String> = exprs.iter().map(|e| self.emit_event_handler_expr(e)).collect();
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,8 @@ pub enum Declaration {
|
||||||
Constrain(ConstrainDecl),
|
Constrain(ConstrainDecl),
|
||||||
/// `stream main on "ws://..." { mode: signal }`
|
/// `stream main on "ws://..." { mode: signal }`
|
||||||
Stream(StreamDecl),
|
Stream(StreamDecl),
|
||||||
|
/// `every 500 -> expr`
|
||||||
|
Every(EveryDecl),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `let count = 0` or `let doubled = count * 2`
|
/// `let count = 0` or `let doubled = count * 2`
|
||||||
|
|
@ -100,6 +102,14 @@ pub struct StreamDecl {
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// `every 500 -> expr` — periodic timer
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct EveryDecl {
|
||||||
|
pub interval_ms: Expr,
|
||||||
|
pub body: Expr,
|
||||||
|
pub span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||||
pub enum StreamMode {
|
pub enum StreamMode {
|
||||||
Pixel, // raw RGBA framebuffer every frame
|
Pixel, // raw RGBA framebuffer every frame
|
||||||
|
|
@ -200,6 +210,8 @@ pub enum Expr {
|
||||||
props: Vec<(String, Expr)>,
|
props: Vec<(String, Expr)>,
|
||||||
children: Vec<Expr>,
|
children: Vec<Expr>,
|
||||||
},
|
},
|
||||||
|
/// Index access: `grid[i]`, `pads[8 + i]`
|
||||||
|
Index(Box<Expr>, Box<Expr>),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// String literal with interpolation segments.
|
/// String literal with interpolation segments.
|
||||||
|
|
|
||||||
|
|
@ -54,6 +54,7 @@ pub enum TokenKind {
|
||||||
Pixel,
|
Pixel,
|
||||||
Delta,
|
Delta,
|
||||||
Signals,
|
Signals,
|
||||||
|
Every,
|
||||||
|
|
||||||
// Operators
|
// Operators
|
||||||
Plus,
|
Plus,
|
||||||
|
|
@ -328,6 +329,7 @@ impl Lexer {
|
||||||
"component" => TokenKind::Component,
|
"component" => TokenKind::Component,
|
||||||
"route" => TokenKind::Route,
|
"route" => TokenKind::Route,
|
||||||
"navigate" => TokenKind::Navigate,
|
"navigate" => TokenKind::Navigate,
|
||||||
|
"every" => TokenKind::Every,
|
||||||
_ => TokenKind::Ident(ident.clone()),
|
_ => TokenKind::Ident(ident.clone()),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -366,13 +368,17 @@ impl Lexer {
|
||||||
return Token { kind: TokenKind::StringFragment(text.clone()), lexeme: format!("{text}\""), line, col };
|
return Token { kind: TokenKind::StringFragment(text.clone()), lexeme: format!("{text}\""), line, col };
|
||||||
}
|
}
|
||||||
'{' => {
|
'{' => {
|
||||||
// String interpolation
|
|
||||||
self.advance();
|
|
||||||
self.interp_depth += 1;
|
|
||||||
if text.is_empty() {
|
if text.is_empty() {
|
||||||
|
// No text before { — emit StringInterp directly
|
||||||
|
self.advance();
|
||||||
|
self.interp_depth += 1;
|
||||||
return Token { kind: TokenKind::StringInterp, lexeme: "{".into(), line, col };
|
return Token { kind: TokenKind::StringInterp, lexeme: "{".into(), line, col };
|
||||||
|
} else {
|
||||||
|
// Text before { — return the text fragment first.
|
||||||
|
// DON'T consume { — the next call to lex_string_body
|
||||||
|
// will see { at position 0 (empty text) and emit StringInterp.
|
||||||
|
return Token { kind: TokenKind::StringFragment(text.clone()), lexeme: text, line, col };
|
||||||
}
|
}
|
||||||
return Token { kind: TokenKind::StringFragment(text.clone()), lexeme: text, line, col };
|
|
||||||
}
|
}
|
||||||
'\\' => {
|
'\\' => {
|
||||||
self.advance();
|
self.advance();
|
||||||
|
|
@ -455,4 +461,57 @@ mod tests {
|
||||||
assert!(matches!(tokens[4].kind, TokenKind::Newline));
|
assert!(matches!(tokens[4].kind, TokenKind::Newline));
|
||||||
assert!(matches!(tokens[5].kind, TokenKind::Let));
|
assert!(matches!(tokens[5].kind, TokenKind::Let));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_string_interpolation_tokens() {
|
||||||
|
let mut lexer = Lexer::new(r#""Hello {name}!""#);
|
||||||
|
let tokens = lexer.tokenize();
|
||||||
|
// Expected: StringFragment("Hello ") → StringInterp → Ident("name") → RBrace → StringFragment("!")
|
||||||
|
assert!(matches!(&tokens[0].kind, TokenKind::StringFragment(s) if s == "Hello "));
|
||||||
|
assert!(matches!(tokens[1].kind, TokenKind::StringInterp));
|
||||||
|
assert!(matches!(&tokens[2].kind, TokenKind::Ident(s) if s == "name"));
|
||||||
|
assert!(matches!(tokens[3].kind, TokenKind::RBrace));
|
||||||
|
assert!(matches!(&tokens[4].kind, TokenKind::StringFragment(s) if s == "!"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_string_interpolation_at_start() {
|
||||||
|
let mut lexer = Lexer::new(r#""{count} items""#);
|
||||||
|
let tokens = lexer.tokenize();
|
||||||
|
// Expected: StringInterp → Ident("count") → RBrace → StringFragment(" items")
|
||||||
|
assert!(matches!(tokens[0].kind, TokenKind::StringInterp));
|
||||||
|
assert!(matches!(&tokens[1].kind, TokenKind::Ident(s) if s == "count"));
|
||||||
|
assert!(matches!(tokens[2].kind, TokenKind::RBrace));
|
||||||
|
assert!(matches!(&tokens[3].kind, TokenKind::StringFragment(s) if s == " items"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_string_interpolation_multiple() {
|
||||||
|
let mut lexer = Lexer::new(r#""{a} and {b}""#);
|
||||||
|
let tokens = lexer.tokenize();
|
||||||
|
// StringInterp → Ident(a) → RBrace → StringFragment(" and ") → StringInterp → Ident(b) → RBrace → StringEnd
|
||||||
|
assert!(matches!(tokens[0].kind, TokenKind::StringInterp));
|
||||||
|
assert!(matches!(&tokens[1].kind, TokenKind::Ident(s) if s == "a"));
|
||||||
|
assert!(matches!(tokens[2].kind, TokenKind::RBrace));
|
||||||
|
assert!(matches!(&tokens[3].kind, TokenKind::StringFragment(s) if s == " and "));
|
||||||
|
assert!(matches!(tokens[4].kind, TokenKind::StringInterp));
|
||||||
|
assert!(matches!(&tokens[5].kind, TokenKind::Ident(s) if s == "b"));
|
||||||
|
assert!(matches!(tokens[6].kind, TokenKind::RBrace));
|
||||||
|
assert!(matches!(tokens[7].kind, TokenKind::StringEnd));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_string_no_interpolation() {
|
||||||
|
let mut lexer = Lexer::new(r#""plain string""#);
|
||||||
|
let tokens = lexer.tokenize();
|
||||||
|
assert!(matches!(&tokens[0].kind, TokenKind::StringFragment(s) if s == "plain string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_string_escaped_brace() {
|
||||||
|
let mut lexer = Lexer::new(r#""literal \{brace}""#);
|
||||||
|
let tokens = lexer.tokenize();
|
||||||
|
// \{ should be a literal { in the string, not interpolation
|
||||||
|
assert!(matches!(&tokens[0].kind, TokenKind::StringFragment(s) if s.contains("{")));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -98,13 +98,28 @@ impl Parser {
|
||||||
TokenKind::Route => self.parse_route_decl(),
|
TokenKind::Route => self.parse_route_decl(),
|
||||||
TokenKind::Constrain => self.parse_constrain_decl(),
|
TokenKind::Constrain => self.parse_constrain_decl(),
|
||||||
TokenKind::Stream => self.parse_stream_decl(),
|
TokenKind::Stream => self.parse_stream_decl(),
|
||||||
|
TokenKind::Every => self.parse_every_decl(),
|
||||||
_ => Err(self.error(format!(
|
_ => Err(self.error(format!(
|
||||||
"expected declaration (let, view, effect, on, component, route, constrain, stream), got {:?}",
|
"expected declaration (let, view, effect, on, component, route, constrain, stream, every), got {:?}",
|
||||||
self.peek()
|
self.peek()
|
||||||
))),
|
))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn parse_every_decl(&mut self) -> Result<Declaration, ParseError> {
|
||||||
|
let line = self.current_token().line;
|
||||||
|
self.advance(); // consume 'every'
|
||||||
|
let interval = self.parse_expr()?;
|
||||||
|
self.expect(&TokenKind::Arrow)?;
|
||||||
|
self.skip_newlines();
|
||||||
|
let body = self.parse_expr()?;
|
||||||
|
Ok(Declaration::Every(EveryDecl {
|
||||||
|
interval_ms: interval,
|
||||||
|
body,
|
||||||
|
span: Span { start: 0, end: 0, line },
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
fn parse_let_decl(&mut self) -> Result<Declaration, ParseError> {
|
fn parse_let_decl(&mut self) -> Result<Declaration, ParseError> {
|
||||||
let line = self.current_token().line;
|
let line = self.current_token().line;
|
||||||
self.advance(); // consume 'let'
|
self.advance(); // consume 'let'
|
||||||
|
|
@ -515,6 +530,12 @@ impl Parser {
|
||||||
let field = self.expect_ident()?;
|
let field = self.expect_ident()?;
|
||||||
expr = Expr::DotAccess(Box::new(expr), field);
|
expr = Expr::DotAccess(Box::new(expr), field);
|
||||||
}
|
}
|
||||||
|
TokenKind::LBracket => {
|
||||||
|
self.advance(); // consume [
|
||||||
|
let index = self.parse_expr()?;
|
||||||
|
self.expect(&TokenKind::RBracket)?;
|
||||||
|
expr = Expr::Index(Box::new(expr), Box::new(index));
|
||||||
|
}
|
||||||
_ => break,
|
_ => break,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -523,6 +544,43 @@ impl Parser {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Primary expressions: literals, identifiers, containers, etc.
|
/// Primary expressions: literals, identifiers, containers, etc.
|
||||||
|
/// Parse a string literal, handling interpolation: `"hello {name}, you are {age}"`
|
||||||
|
/// Lexer emits: StringFragment("hello ") → StringInterp → [name tokens] → RBrace → StringFragment(", you are ") → StringInterp → [age tokens] → RBrace → StringFragment/StringEnd
|
||||||
|
fn parse_string_lit(&mut self) -> Result<Expr, ParseError> {
|
||||||
|
let mut segments = Vec::new();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match self.peek().clone() {
|
||||||
|
TokenKind::StringFragment(text) => {
|
||||||
|
self.advance();
|
||||||
|
segments.push(StringSegment::Literal(text));
|
||||||
|
}
|
||||||
|
TokenKind::StringEnd => {
|
||||||
|
self.advance();
|
||||||
|
// Empty string or end after interpolation
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
TokenKind::StringInterp => {
|
||||||
|
self.advance(); // consume the { marker
|
||||||
|
let expr = self.parse_expr()?;
|
||||||
|
segments.push(StringSegment::Interpolation(Box::new(expr)));
|
||||||
|
// Consume the closing } — lexer emits RBrace and decrements interp_depth,
|
||||||
|
// then next_token transitions back to string mode automatically.
|
||||||
|
if self.check(&TokenKind::RBrace) {
|
||||||
|
self.advance();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if segments.is_empty() {
|
||||||
|
segments.push(StringSegment::Literal(String::new()));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Expr::StringLit(StringLit { segments }))
|
||||||
|
}
|
||||||
|
|
||||||
fn parse_primary(&mut self) -> Result<Expr, ParseError> {
|
fn parse_primary(&mut self) -> Result<Expr, ParseError> {
|
||||||
match self.peek().clone() {
|
match self.peek().clone() {
|
||||||
TokenKind::Int(n) => {
|
TokenKind::Int(n) => {
|
||||||
|
|
@ -541,17 +599,8 @@ impl Parser {
|
||||||
self.advance();
|
self.advance();
|
||||||
Ok(Expr::BoolLit(false))
|
Ok(Expr::BoolLit(false))
|
||||||
}
|
}
|
||||||
TokenKind::StringFragment(s) => {
|
TokenKind::StringFragment(_) | TokenKind::StringEnd => {
|
||||||
self.advance();
|
self.parse_string_lit()
|
||||||
Ok(Expr::StringLit(StringLit {
|
|
||||||
segments: vec![StringSegment::Literal(s)],
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
TokenKind::StringEnd => {
|
|
||||||
self.advance();
|
|
||||||
Ok(Expr::StringLit(StringLit {
|
|
||||||
segments: vec![StringSegment::Literal(String::new())],
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Containers
|
// Containers
|
||||||
|
|
@ -823,11 +872,8 @@ impl Parser {
|
||||||
// Parse string or ident args
|
// Parse string or ident args
|
||||||
loop {
|
loop {
|
||||||
match self.peek().clone() {
|
match self.peek().clone() {
|
||||||
TokenKind::StringFragment(s) => {
|
TokenKind::StringFragment(_) | TokenKind::StringEnd => {
|
||||||
self.advance();
|
args.push(self.parse_string_lit()?);
|
||||||
args.push(Expr::StringLit(StringLit {
|
|
||||||
segments: vec![StringSegment::Literal(s)],
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
TokenKind::Ident(name) if !is_declaration_keyword(&name) => {
|
TokenKind::Ident(name) if !is_declaration_keyword(&name) => {
|
||||||
// Only consume if it looks like an element argument
|
// Only consume if it looks like an element argument
|
||||||
|
|
|
||||||
|
|
@ -427,6 +427,15 @@ impl TypeChecker {
|
||||||
}
|
}
|
||||||
Type::View
|
Type::View
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Expr::Index(base, index) => {
|
||||||
|
let base_ty = self.infer_expr(base);
|
||||||
|
let _ = self.infer_expr(index);
|
||||||
|
match base_ty {
|
||||||
|
Type::Array(elem) => *elem,
|
||||||
|
_ => self.fresh_tv(),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue